diff --git a/CMakeLists.txt b/CMakeLists.txt
index b57689aa8c143d79b0272a2d9080066b2d1b71e8..7ba4412b38e22775b382d5145ff40d5db147cd6b 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -102,21 +102,20 @@ add_list_string_option(CMAKE_BUILD_TYPE "RelWithDebInfo" "Choose the type of bui
 # in case /proc/cpuinfo exists we want to inspect available Intrinsics
 # -so not to go always through SIMDE emulation
 # -so to avoid AVX512 instructions generation by gcc   
-execute_process(COMMAND uname -m OUTPUT_VARIABLE CPUARCH OUTPUT_STRIP_TRAILING_WHITESPACE)
-message(STATUS "CPUARCH ${CPUARCH}")
 if(EXISTS "/proc/cpuinfo")
   file(STRINGS "/proc/cpuinfo" CPUFLAGS REGEX flags LIMIT_COUNT 1)
 else()
   message(WARNING "did not find /proc/cpuinfo -- not setting any x86-specific compilation variables")
 endif()
-
+ 
 eval_boolean(AUTODETECT_AVX512 DEFINED CPUFLAGS AND CPUFLAGS MATCHES "avx512")
 add_boolean_option(AVX512 ${AUTODETECT_AVX512} "Whether AVX512 intrinsics is available on the host processor" ON)
 
 eval_boolean(AUTODETECT_AVX2 DEFINED CPUFLAGS AND CPUFLAGS MATCHES "avx2")
 add_boolean_option(AVX2 ${AUTODETECT_AVX2} "Whether AVX2 intrinsics is available on the host processor" ON)
 
-if(${CPUARCH} STREQUAL "x86_64" AND DEFINED CPUFLAGS)
+message(STATUS "CPU architecture is ${CMAKE_SYSTEM_PROCESSOR}")
+if(CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64")
   # The following intrinsics are assumed to be available on any x86 system
   # (avx, f16c, fma, gnfi, mmx, pclmul, sse, sse2, sse3, xop)
   set(C_FLAGS_PROCESSOR "${C_FLAGS_PROCESSOR} -DSIMDE_X86_AVX_NATIVE -DSIMDE_X86_AVX_NATIVE -DSIMDE_X86_F16C_NATIVE -DSIMDE_X86_FMA_NATIVE -DSIMDE_X86_GFNI_NATIVE -DSIMDE_X86_MMX_NATIVE -DSIMDE_X86_PCLMUL_NATIVE -DSIMDE_X86_SSE2_NATIVE -DSIMDE_X86_SSE3_NATIVE -DSIMDE_X86_SSE_NATIVE -DSIMDE_X86_XOP_HAVE_COM_ -DSIMDE_X86_XOP_NATIVE")
@@ -139,8 +138,12 @@ if(${CPUARCH} STREQUAL "x86_64" AND DEFINED CPUFLAGS)
   if(CPUINFO MATCHES "ssse3")
     set(C_FLAGS_PROCESSOR "${C_FLAGS_PROCESSOR} -DSIMDE_X86_SSSE3_NATIVE")
   endif()
-elseif(${CPUARCH} NOT STREQUAL "x86_64")
-  message(FATAL_ERROR "Cannot compile for CPU architecture ${CPUARCH}")
+elseif (CMAKE_SYSTEM_PROCESSOR STREQUAL "armv7l")
+  set(C_FLAGS_PROCESSOR "${C_FLAGS_PROCESSOR} -gdwarf-2 -mfloat-abi=hard -mfpu=neon -lgcc -lrt")
+elseif (CMAKE_SYSTEM_PROCESSOR STREQUAL "aarch64")
+  set(C_FLAGS_PROCESSOR "${C_FLAGS_PROCESSOR} -gdwarf-2 -lgcc -lrt")
+else()
+  message(FATAL_ERROR "compile for CPU architecture ${CPUARCH}, CMAKE_SYSTEM_PROCESSOR ${CMAKE_SYSTEM_PROCESSOR}")
 endif()
 
 set(C_FLAGS_PROCESSOR "${C_FLAGS_PROCESSOR} -march=native")
@@ -856,7 +859,7 @@ target_link_libraries(ldpc_parityCheck  PRIVATE ldpc_gen_HEADERS)
 
 add_library(coding MODULE ${PHY_TURBOSRC} )
 
-add_library(dfts MODULE ${OPENAIR1_DIR}/PHY/TOOLS/oai_dfts.c )
+add_library(dfts MODULE ${OPENAIR1_DIR}/PHY/TOOLS/oai_dfts.c ${OPENAIR1_DIR}/PHY/TOOLS/oai_dfts_neon.c)
 
 
 set(PHY_SRC_COMMON
diff --git a/common/utils/LOG/log.c b/common/utils/LOG/log.c
index 8b0e1da6badba3d432c8a14be8867366729c3b73..9d47256bb4ef20c1caa029d9a866e92bc5eb7888 100644
--- a/common/utils/LOG/log.c
+++ b/common/utils/LOG/log.c
@@ -52,7 +52,11 @@
 
 // Fixme: a better place to be shure it is called 
 void read_cpu_hardware (void) __attribute__ ((constructor));
-void read_cpu_hardware (void) {__builtin_cpu_init(); }
+#if !defined(__arm__) && !defined(__aarch64__) 
+  void read_cpu_hardware (void) {__builtin_cpu_init(); }
+#else 
+  void read_cpu_hardware (void) {}
+#endif
 
 log_mem_cnt_t log_mem_d[2];
 int log_mem_flag = 0;
diff --git a/common/utils/time_meas.c b/common/utils/time_meas.c
index 9577968bc47f0d3d55b0b4ffd88721617b27f78b..ade06acac7caeb606ca2eec054fe65d0c9641148 100644
--- a/common/utils/time_meas.c
+++ b/common/utils/time_meas.c
@@ -38,7 +38,7 @@ static time_stats_t  **measur_table;
 notifiedFIFO_t measur_fifo;
 double get_cpu_freq_GHz(void)
 {
-  if (cpu_freq_GHz <1 ) {
+  if (cpu_freq_GHz <0.01 ) {
     time_stats_t ts = {0};
     reset_meas(&ts);
     ts.trials++;
@@ -46,8 +46,7 @@ double get_cpu_freq_GHz(void)
     sleep(1);
     ts.diff = (rdtsc_oai()-ts.in);
     cpu_freq_GHz = (double)ts.diff/1000000000;
-    printf("CPU Freq is %f \n", cpu_freq_GHz);
-  }
+  } 
   return cpu_freq_GHz;
 }
 
diff --git a/common/utils/time_meas.h b/common/utils/time_meas.h
index 3b5e194c2cafa338293c33f1e2127179aed62998..0735597633c82420539cc58180d29cd4ef5c4cbc 100644
--- a/common/utils/time_meas.h
+++ b/common/utils/time_meas.h
@@ -106,8 +106,16 @@ static inline unsigned long long rdtsc_oai(void) {
   __asm__ volatile ("rdtsc" : "=a" (a), "=d" (d));
   return (d<<32) | a;
 }
+#elif defined(__aarch64__)
+static inline uint64_t rdtsc_oai(void) __attribute__((always_inline));
+static inline uint64_t rdtsc_oai(void)
+{
+	  uint64_t r = 0;
+	    asm volatile("mrs %0, cntvct_el0" : "=r"(r));
+	      return r;
+}
 
-#elif defined(__arm__) || defined(__aarch64__)
+#elif defined(__arm__) 
 static inline uint32_t rdtsc_oai(void) __attribute__((always_inline));
 static inline uint32_t rdtsc_oai(void) {
   uint32_t r = 0;
diff --git a/common/utils/utils.c b/common/utils/utils.c
index 16402c1057fc052a628c6c61187fdcf456ed2f6d..831b4afbd343f86d6e0efc980f12ccaef6c86909 100644
--- a/common/utils/utils.c
+++ b/common/utils/utils.c
@@ -118,12 +118,6 @@ char *itoa(int i) {
   return strdup(buffer);
 }
 
-void *memcpy1(void *dst,const void *src,size_t n) {
-
-  void *ret=dst;
-  asm volatile("rep movsb" : "+D" (dst) : "c"(n), "S"(src) : "cc","memory");
-  return(ret);
-}
 
 void set_priority(int priority)
 {
diff --git a/common/utils/utils.h b/common/utils/utils.h
index af90adc8678ab488fd26d5aa49572e736f788830..3f618b61f64956942adbe68a688b6f970f21f0c7 100644
--- a/common/utils/utils.h
+++ b/common/utils/utils.h
@@ -102,8 +102,6 @@ int hex_char_to_hex_value (char c);
 // Converts an hexadecimal ASCII coded string into its value.**
 int hex_string_to_hex_value (uint8_t *hex_value, const char *hex_string, int size);
 
-void *memcpy1(void *dst,const void *src,size_t n);
-
 void set_priority(int priority);
 
 char *itoa(int i);
diff --git a/openair1/PHY/CODING/3gpplte_sse.c b/openair1/PHY/CODING/3gpplte_sse.c
index 731f3710b459daec501e7326ff1ababe1531123e..865ffef2d9e372d4ad39da3b83faffa81cfa7a4b 100644
--- a/openair1/PHY/CODING/3gpplte_sse.c
+++ b/openair1/PHY/CODING/3gpplte_sse.c
@@ -45,34 +45,18 @@
 //#define DEBUG_TURBO_ENCODER 1
 //#define CALLGRIND 1
 
-#if defined(__x86_64__) || defined(__i386__)
 struct treillis {
   union {
-    __m64 systematic_andp1_64[3];
+    simde__m64 systematic_andp1_64[3];
     uint8_t systematic_andp1_8[24];
   };
   union {
-    __m64 parity2_64[3];
+    simde__m64 parity2_64[3];
     uint8_t parity2_8[24];
   };
   int exit_state;
 }  __attribute__ ((aligned(64)));
 
-#elif defined(__arm__) || defined(__aarch64__)
-
-struct treillis {
-  union {
-    uint8x8_t systematic_andp1_64[3];
-    char systematic_andp1_8[24];
-  } __attribute__((aligned(64)));
-  union {
-    uint8x8_t parity2_64[3];
-    char parity2_8[24];
-  } __attribute__((aligned(64)));
-  int exit_state;
-};
-#endif
-
 struct treillis all_treillis[8][256];
 
 int all_treillis_initialized=0;
@@ -125,66 +109,41 @@ static void treillis_table_init(void) {
 
 char interleave_compact_byte(short *base_interleaver,unsigned char *input, unsigned char *output, int n) {
   char expandInput[768*8] __attribute__((aligned(32)));
-  int i,loop=n>>4;
-#if defined(__x86_64__) || defined(__i386__)
-  __m256i *i_256=(__m256i *)input, *o_256=(__m256i *)expandInput;
-  __m256i tmp1, tmp2, tmp3, tmp4, tmp5, tmp6, tmp7;
-  __m256i BIT_MASK = simde_mm256_set_epi8(  0b00000001,
-                                       0b00000010,
-                                       0b00000100,
-                                       0b00001000,
-                                       0b00010000,
-                                       0b00100000,
-                                       0b01000000,
-                                       0b10000000,
-                                       0b00000001,
-                                       0b00000010,
-                                       0b00000100,
-                                       0b00001000,
-                                       0b00010000,
-                                       0b00100000,
-                                       0b01000000,
-                                       0b10000000,
-                                       0b00000001,
-                                       0b00000010,
-                                       0b00000100,
-                                       0b00001000,
-                                       0b00010000,
-                                       0b00100000,
-                                       0b01000000,
-                                       0b10000000,
-                                       0b00000001,
-                                       0b00000010,
-                                       0b00000100,
-                                       0b00001000,
-                                       0b00010000,
-                                       0b00100000,
-                                       0b01000000,
-                                       0b10000000);
-#elif defined(__arm__) || defined(__aarch64__)
-  uint8x16_t *i_128=(uint8x16_t *)input, *o_128=(uint8x16_t *)expandInput;
-  uint8x16_t tmp1,tmp2;
-  uint16x8_t tmp3;
-  uint32x4_t tmp4;
-  uint8x16_t and_tmp;
-  uint8x16_t BIT_MASK = {       0b10000000,
-                                0b01000000,
-                                0b00100000,
-                                0b00010000,
-                                0b00001000,
-                                0b00000100,
-                                0b00000010,
-                                0b00000001,
-                                0b10000000,
-                                0b01000000,
-                                0b00100000,
-                                0b00010000,
-                                0b00001000,
-                                0b00000100,
-                                0b00000010,
-                                0b00000001
-                        };
-#endif
+  int i, loop = n >> 4;
+  simde__m256i *i_256 = (simde__m256i *)input, *o_256 = (simde__m256i *)expandInput;
+  simde__m256i tmp1, tmp2, tmp3, tmp4, tmp5, tmp6, tmp7;
+  simde__m256i BIT_MASK = simde_mm256_set_epi8(0b00000001,
+                                               0b00000010,
+                                               0b00000100,
+                                               0b00001000,
+                                               0b00010000,
+                                               0b00100000,
+                                               0b01000000,
+                                               0b10000000,
+                                               0b00000001,
+                                               0b00000010,
+                                               0b00000100,
+                                               0b00001000,
+                                               0b00010000,
+                                               0b00100000,
+                                               0b01000000,
+                                               0b10000000,
+                                               0b00000001,
+                                               0b00000010,
+                                               0b00000100,
+                                               0b00001000,
+                                               0b00010000,
+                                               0b00100000,
+                                               0b01000000,
+                                               0b10000000,
+                                               0b00000001,
+                                               0b00000010,
+                                               0b00000100,
+                                               0b00001000,
+                                               0b00010000,
+                                               0b00100000,
+                                               0b01000000,
+                                               0b10000000);
   loop=n>>5;
 
   if ((n&31) > 0)
@@ -194,7 +153,6 @@ char interleave_compact_byte(short *base_interleaver,unsigned char *input, unsig
     // int cur_byte=i<<3;
     // for (b=0;b<8;b++)
     //   expandInput[cur_byte+b] = (input[i]&(1<<(7-b)))>>(7-b);
-#if defined(__x86_64__) || defined(__i386__)
     tmp1=simde_mm256_load_si256(i_256++);       // tmp1 = B0,B1,...,B15,...,B31
     //print_bytes2("in",(uint8_t*)&tmp1);
     tmp2=simde_mm256_unpacklo_epi8(tmp1,tmp1);  // tmp2 = B0,B0,B1,B1,...,B7,B7,B16,B16,B17,B17,...,B23,B23
@@ -259,78 +217,15 @@ char interleave_compact_byte(short *base_interleaver,unsigned char *input, unsig
     //print_bytes2("out",(uint8_t*)(o_256+3));
     o_256[7]=simde_mm256_cmpeq_epi8(simde_mm256_and_si256(tmp7,BIT_MASK),BIT_MASK);;
     //print_bytes2("out",(uint8_t*)(o_256+7));
-    o_256+=8;
-#elif defined(__arm__) || defined(__aarch64__)
-    tmp1=vld1q_u8((uint8_t *)i_128);
-    //print_bytes("tmp1:",(uint8_t*)&tmp1);
-    uint8x16x2_t temp1 =  vzipq_u8(tmp1,tmp1);
-    tmp2 = temp1.val[0];
-    uint16x8x2_t temp2 =  vzipq_u16((uint16x8_t)tmp2,(uint16x8_t)tmp2);
-    tmp3 = temp2.val[0];
-    uint32x4x2_t temp3 =  vzipq_u32((uint32x4_t)tmp3,(uint32x4_t)tmp3);
-    tmp4 = temp3.val[0];
-    //print_bytes("tmp4:",(uint8_t*)&tmp4);
-    *o_128++=vceqq_u8(vandq_u8((uint8x16_t)tmp4,BIT_MASK),BIT_MASK);    //1
-    //print_bytes("o:",(uint8_t*)(o_128-1));
-    tmp4 = temp3.val[1];
-    //print_bytes("tmp4:",(uint8_t*)&tmp4);
-    *o_128++=vceqq_u8(vandq_u8((uint8x16_t)tmp4,BIT_MASK),BIT_MASK);    //2
-    //print_bytes("o:",(uint8_t*)(o_128-1));
-    tmp3 = temp2.val[1];
-    temp3 =  vzipq_u32((uint32x4_t)tmp3,(uint32x4_t)tmp3);
-    tmp4 = temp3.val[0];
-    //print_bytes("tmp4:",(uint8_t*)&tmp4);
-    *o_128++=vceqq_u8(vandq_u8((uint8x16_t)tmp4,BIT_MASK),BIT_MASK);    //3
-    //print_bytes("o:",(uint8_t*)(o_128-1));
-    tmp4 = temp3.val[1];
-    //print_bytes("tmp4:",(uint8_t*)&tmp4);
-    *o_128++=vceqq_u8(vandq_u8((uint8x16_t)tmp4,BIT_MASK),BIT_MASK);    //4
-    //and_tmp = vandq_u8((uint8x16_t)tmp4,BIT_MASK); print_bytes("and:",and_tmp);
-    //print_bytes("o:",(uint8_t*)(o_128-1));
-    temp1 =  vzipq_u8(tmp1,tmp1);
-    tmp2 = temp1.val[1];
-    temp2 =  vzipq_u16((uint16x8_t)tmp2,(uint16x8_t)tmp2);
-    tmp3 = temp2.val[0];
-    temp3 =  vzipq_u32((uint32x4_t)tmp3,(uint32x4_t)tmp3);
-    tmp4 = temp3.val[0];
-    //print_bytes("tmp4:",(uint8_t*)&tmp4);
-    *o_128++=vceqq_u8(vandq_u8((uint8x16_t)tmp4,BIT_MASK),BIT_MASK);    //5
-    //print_bytes("o:",(uint8_t*)(o_128-1));
-    tmp4 = temp3.val[1];
-    //print_bytes("tmp4:",(uint8_t*)&tmp4);
-    *o_128++=vceqq_u8(vandq_u8((uint8x16_t)tmp4,BIT_MASK),BIT_MASK);    //6
-    //print_bytes("o:",(uint8_t*)(o_128-1));
-    temp2 =  vzipq_u16((uint16x8_t)tmp2,(uint16x8_t)tmp2);
-    tmp3 = temp2.val[1];
-    temp3 =  vzipq_u32((uint32x4_t)tmp3,(uint32x4_t)tmp3);
-    tmp4 = temp3.val[0];
-    //print_bytes("tmp4:",(uint8_t*)&tmp4);
-    *o_128++=vceqq_u8(vandq_u8((uint8x16_t)tmp4,BIT_MASK),BIT_MASK);    //7
-    //print_bytes("o:",(uint8_t*)(o_128-1));
-    tmp4 = temp3.val[1];
-    //print_bytes("tmp4:",(uint8_t*)&tmp4);
-    *o_128++=vceqq_u8(vandq_u8((uint8x16_t)tmp4,BIT_MASK),BIT_MASK);    //7
-    //print_bytes("o:",(uint8_t*)(o_128-1));
-    i_128++;
-#endif
+    o_256 += 8;
   }
 
-  short *ptr_intl=base_interleaver;
-#if defined(__x86_64) || defined(__i386__)
-  __m256i tmp={0};
-  uint32_t *systematic2_ptr=(uint32_t *) output;
-#elif defined(__arm__) || defined(__aarch64__)
-  uint8x16_t tmp;
-  const uint8_t __attribute__ ((aligned (16))) _Powers[16]=
-  { 1, 2, 4, 8, 16, 32, 64, 128, 1, 2, 4, 8, 16, 32, 64, 128 };
-  // Set the powers of 2 (do it once for all, if applicable)
-  uint8x16_t Powers= vld1q_u8(_Powers);
-  uint8_t *systematic2_ptr=(uint8_t *) output;
-#endif
+  short *ptr_intl = base_interleaver;
+  simde__m256i tmp = {0};
+  uint32_t *systematic2_ptr = (uint32_t *)output;
   int input_length_words=1+((n-1)>>2);
 
-  for ( i=0; i<  input_length_words ; i ++ ) {
-#if defined(__x86_64__) || defined(__i386__)
+  for (i = 0; i < input_length_words; i++) {
     tmp=simde_mm256_insert_epi8(tmp,expandInput[*ptr_intl++],7);
     tmp=simde_mm256_insert_epi8(tmp,expandInput[*ptr_intl++],6);
     tmp=simde_mm256_insert_epi8(tmp,expandInput[*ptr_intl++],5);
@@ -363,42 +258,19 @@ char interleave_compact_byte(short *base_interleaver,unsigned char *input, unsig
     tmp=simde_mm256_insert_epi8(tmp,expandInput[*ptr_intl++],24+2);
     tmp=simde_mm256_insert_epi8(tmp,expandInput[*ptr_intl++],24+1);
     tmp=simde_mm256_insert_epi8(tmp,expandInput[*ptr_intl++],24+0);
-    *systematic2_ptr++=(unsigned int)simde_mm256_movemask_epi8(tmp);
-#elif defined(__arm__) || defined(__aarch64__)
-    tmp=vsetq_lane_u8(expandInput[*ptr_intl++],tmp,7);
-    tmp=vsetq_lane_u8(expandInput[*ptr_intl++],tmp,6);
-    tmp=vsetq_lane_u8(expandInput[*ptr_intl++],tmp,5);
-    tmp=vsetq_lane_u8(expandInput[*ptr_intl++],tmp,4);
-    tmp=vsetq_lane_u8(expandInput[*ptr_intl++],tmp,3);
-    tmp=vsetq_lane_u8(expandInput[*ptr_intl++],tmp,2);
-    tmp=vsetq_lane_u8(expandInput[*ptr_intl++],tmp,1);
-    tmp=vsetq_lane_u8(expandInput[*ptr_intl++],tmp,0);
-    tmp=vsetq_lane_u8(expandInput[*ptr_intl++],tmp,8+7);
-    tmp=vsetq_lane_u8(expandInput[*ptr_intl++],tmp,8+6);
-    tmp=vsetq_lane_u8(expandInput[*ptr_intl++],tmp,8+5);
-    tmp=vsetq_lane_u8(expandInput[*ptr_intl++],tmp,8+4);
-    tmp=vsetq_lane_u8(expandInput[*ptr_intl++],tmp,8+3);
-    tmp=vsetq_lane_u8(expandInput[*ptr_intl++],tmp,8+2);
-    tmp=vsetq_lane_u8(expandInput[*ptr_intl++],tmp,8+1);
-    tmp=vsetq_lane_u8(expandInput[*ptr_intl++],tmp,8+0);
-    // Compute the mask from the input
-    uint64x2_t Mask= vpaddlq_u32(vpaddlq_u16(vpaddlq_u8(vandq_u8(tmp, Powers))));
-    vst1q_lane_u8(systematic2_ptr++, (uint8x16_t)Mask, 0);
-    vst1q_lane_u8(systematic2_ptr++, (uint8x16_t)Mask, 8);
-#endif
+    *systematic2_ptr++ = (unsigned int)simde_mm256_movemask_epi8(tmp);
   }
 
   return n;
 }
 
-
 /*
-#define _mm_expand_si128(xmmx, out, bit_mask)   \
+#define simde_mm_expand_si128(xmmx, out, bit_mask)   \
   {             \
-   __m128i loc_mm;          \
+   simde__m128i loc_mm;          \
    loc_mm=(xmmx);         \
-   loc_mm=_mm_and_si128(loc_mm,bit_mask);   \
-   out=_mm_cmpeq_epi8(loc_mm,bit_mask);   \
+   loc_mm=simde_mm_and_si128(loc_mm,bit_mask);   \
+   out=simde_mm_cmpeq_epi8(loc_mm,bit_mask);   \
   }
 */
 
@@ -427,12 +299,8 @@ void threegpplte_turbo_encoder_sse(unsigned char *input,
   }
 
   unsigned char systematic2[768] __attribute__((aligned(32)));
-  interleave_compact_byte(base_interleaver,input,systematic2,input_length_bytes);
-#if defined(__x86_64__) || defined(__i386__)
-  __m64 *ptr_output=(__m64 *) output;
-#elif defined(__arm__) || defined(__aarch64__)
-  uint8x8_t *ptr_output=(uint8x8_t *)output;
-#endif
+  interleave_compact_byte(base_interleaver, input, systematic2, input_length_bytes);
+  simde__m64 *ptr_output = (simde__m64 *)output;
   unsigned char cur_s1, cur_s2;
   int code_rate;
 
@@ -440,19 +308,14 @@ void threegpplte_turbo_encoder_sse(unsigned char *input,
     cur_s1=input[i];
     cur_s2=systematic2[i];
 
-    for ( code_rate=0; code_rate<3; code_rate++) {
-#if defined(__x86_64__) || defined(__i386__)
+    for (code_rate = 0; code_rate < 3; code_rate++) {
       /*
-       *ptr_output++ = _mm_add_pi8(all_treillis[state0][cur_s1].systematic_64[code_rate],
-       _mm_add_pi8(all_treillis[state0][cur_s1].parity1_64[code_rate],
+       *ptr_output++ = simde_mm_add_pi8(all_treillis[state0][cur_s1].systematic_64[code_rate],
+       simde_mm_add_pi8(all_treillis[state0][cur_s1].parity1_64[code_rate],
       all_treillis[state1][cur_s2].parity2_64[code_rate]));
       */
-      *ptr_output++ = _mm_add_pi8(all_treillis[state0][cur_s1].systematic_andp1_64[code_rate],
-                                  all_treillis[state1][cur_s2].parity2_64[code_rate]);
-#elif defined(__arm__) || defined(__aarch64__)
-      *ptr_output++ = vadd_u8(all_treillis[state0][cur_s1].systematic_andp1_64[code_rate],
-                              all_treillis[state0][cur_s1].parity2_64[code_rate]);
-#endif
+      *ptr_output++ = simde_mm_add_pi8(all_treillis[state0][cur_s1].systematic_andp1_64[code_rate],
+                                       all_treillis[state1][cur_s2].parity2_64[code_rate]);
     }
 
     state0=all_treillis[state0][cur_s1].exit_state;
@@ -484,11 +347,9 @@ void threegpplte_turbo_encoder_sse(unsigned char *input,
   threegpplte_rsc_termination(&x[10],&x[11],&state1);
 #ifdef DEBUG_TURBO_ENCODER
   printf("term: x0 %u, x1 %u, state1 %d\n",x[10],x[11],state1);
-#endif //DEBUG_TURBO_ENCODER
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+#endif // DEBUG_TURBO_ENCODER
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void init_encoder_sse (void) {
diff --git a/openair1/PHY/CODING/3gpplte_turbo_decoder_avx2_16bit.c b/openair1/PHY/CODING/3gpplte_turbo_decoder_avx2_16bit.c
index 3941b94d5ec6871e00fe7d5d9cac402b284f9ffc..77708f4489a7fc284c023f697760f3178ed13cee 100644
--- a/openair1/PHY/CODING/3gpplte_turbo_decoder_avx2_16bit.c
+++ b/openair1/PHY/CODING/3gpplte_turbo_decoder_avx2_16bit.c
@@ -125,10 +125,10 @@ void compute_gamma16avx2(llr_t* m11,llr_t* m10,llr_t* systematic,channel_t* y_pa
 {
   int k,K1;
 
-  __m256i *systematic128 = (__m256i *)systematic;
-  __m256i *y_parity128   = (__m256i *)y_parity;
-  __m256i *m10_128        = (__m256i *)m10;
-  __m256i *m11_128        = (__m256i *)m11;
+  simde__m256i *systematic128 = (simde__m256i *)systematic;
+  simde__m256i *y_parity128 = (simde__m256i *)y_parity;
+  simde__m256i *m10_128 = (simde__m256i *)m10;
+  simde__m256i *m11_128 = (simde__m256i *)m11;
 
 #ifdef DEBUG_LOGMAP
   fprintf(fdavx2,"compute_gamma (avx2_16bit), %p,%p,%p,%p,framelength %d\n",m11,m10,systematic,y_parity,frame_length);
@@ -170,11 +170,11 @@ void compute_alpha16avx2(llr_t* alpha,llr_t* beta,llr_t* m_11,llr_t* m_10,uint16
 {
   int k,l,l2,K1,rerun_flag=0;
 
-  __m256i *alpha128=(__m256i *)alpha,*alpha_ptr;
-  __m256i a0,a1,a2,a3,a4,a5,a6,a7,*m11p,*m10p;
-  __m256i m_b0,m_b1,m_b2,m_b3,m_b4,m_b5,m_b6,m_b7;
-  __m256i new0,new1,new2,new3,new4,new5,new6,new7;
-  __m256i alpha_max;
+  simde__m256i *alpha128 = (simde__m256i *)alpha, *alpha_ptr;
+  simde__m256i a0, a1, a2, a3, a4, a5, a6, a7, *m11p, *m10p;
+  simde__m256i m_b0, m_b1, m_b2, m_b3, m_b4, m_b5, m_b6, m_b7;
+  simde__m256i new0, new1, new2, new3, new4, new5, new6, new7;
+  simde__m256i alpha_max;
 
   unsigned long long timein,timeout;
 
@@ -187,7 +187,7 @@ void compute_alpha16avx2(llr_t* alpha,llr_t* beta,llr_t* m_11,llr_t* m_10,uint16
   timein = rdtsc_oai();
 
   for (l=K1;; l=l2,rerun_flag=1) {
-    alpha128 = (__m256i *)alpha;
+    alpha128 = (simde__m256i *)alpha;
 
     if (rerun_flag == 0) {
 
@@ -253,8 +253,8 @@ void compute_alpha16avx2(llr_t* alpha,llr_t* beta,llr_t* m_11,llr_t* m_10,uint16
     }
 
     alpha_ptr = &alpha128[0];
-    m11p = (__m256i*)m_11;
-    m10p = (__m256i*)m_10;
+    m11p = (simde__m256i *)m_11;
+    m10p = (simde__m256i *)m_10;
 
     for (k=0;
          k<l;
@@ -383,13 +383,13 @@ void compute_beta16avx2(llr_t* alpha,llr_t* beta,llr_t *m_11,llr_t* m_10,uint16_
 
   int k,rerun_flag=0;
 
-  __m256i *m11p,*m10p;
-  register __m256i b0,b1,b2,b3,b4,b5,b6,b7;
-  register __m256i m_b0,m_b1,m_b2,m_b3,m_b4,m_b5,m_b6,m_b7;
-  register __m256i new0,new1,new2,new3,new4,new5,new6,new7;
+  simde__m256i *m11p, *m10p;
+  register simde__m256i b0, b1, b2, b3, b4, b5, b6, b7;
+  register simde__m256i m_b0, m_b1, m_b2, m_b3, m_b4, m_b5, m_b6, m_b7;
+  register simde__m256i new0, new1, new2, new3, new4, new5, new6, new7;
 
-  __m256i *beta128,*alpha128,*beta_ptr;
-  __m256i beta_max;
+  simde__m256i *beta128, *alpha128, *beta_ptr;
+  simde__m256i beta_max;
 
   llr_t m11,m10,beta0_16,beta1_16,beta2_16,beta3_16,beta4_16,beta5_16,beta6_16,beta7_16,beta0_2,beta1_2,beta2_2,beta3_2,beta_m;
   llr_t m11_cw2,m10_cw2,beta0_cw2_16,beta1_cw2_16,beta2_cw2_16,beta3_cw2_16,beta4_cw2_16,beta5_cw2_16,beta6_cw2_16,beta7_cw2_16,beta0_2_cw2,beta1_2_cw2,beta2_2_cw2,beta3_2_cw2,beta_m_cw2;
@@ -506,9 +506,8 @@ void compute_beta16avx2(llr_t* alpha,llr_t* beta,llr_t *m_11,llr_t* m_10,uint16_
   beta7_cw2_16=beta7_cw2_16-beta_m_cw2;
 
   for (rerun_flag=0;; rerun_flag=1) {
-
-    beta_ptr   = (__m256i*)&beta[frame_length<<4];
-    alpha128   = (__m256i*)&alpha[0];
+    beta_ptr = (simde__m256i *)&beta[frame_length << 4];
+    alpha128 = (simde__m256i *)&alpha[0];
 
     if (rerun_flag == 0) {
       beta_ptr[0] = alpha128[(frame_length)];
@@ -533,8 +532,7 @@ void compute_beta16avx2(llr_t* alpha,llr_t* beta,llr_t *m_11,llr_t* m_10,uint16_
       print_shorts("b7",(int16_t*)&beta_ptr[7]);
 #endif
     } else {
-
-      beta128 = (__m256i*)&beta[0];
+      beta128 = (simde__m256i *)&beta[0];
       beta_ptr[0] = simde_mm256_srli_si256(beta128[0],2);
       beta_ptr[1] = simde_mm256_srli_si256(beta128[1],2);
       beta_ptr[2] = simde_mm256_srli_si256(beta128[2],2);
@@ -594,8 +592,8 @@ void compute_beta16avx2(llr_t* alpha,llr_t* beta,llr_t *m_11,llr_t* m_10,uint16_
 
     timein = rdtsc_oai();
 
-    m11p = (frame_length>>3)-1+(__m256i*)m_11;
-    m10p = (frame_length>>3)-1+(__m256i*)m_10;
+    m11p = (frame_length >> 3) - 1 + (simde__m256i *)m_11;
+    m10p = (frame_length >> 3) - 1 + (simde__m256i *)m_10;
 
     for (k=(frame_length>>3)-1; k>=loopval; k--) {
 
@@ -686,15 +684,14 @@ void compute_beta16avx2(llr_t* alpha,llr_t* beta,llr_t *m_11,llr_t* m_10,uint16_
 
 void compute_ext16avx2(llr_t* alpha,llr_t* beta,llr_t* m_11,llr_t* m_10,llr_t* ext, llr_t* systematic,uint16_t frame_length)
 {
-
-  __m256i *alpha128=(__m256i *)alpha;
-  __m256i *beta128=(__m256i *)beta;
-  __m256i *m11_128,*m10_128,*ext_128;
-  __m256i *alpha_ptr,*beta_ptr;
-  __m256i m00_1,m00_2,m00_3,m00_4;
-  __m256i m01_1,m01_2,m01_3,m01_4;
-  __m256i m10_1,m10_2,m10_3,m10_4;
-  __m256i m11_1,m11_2,m11_3,m11_4;
+  simde__m256i *alpha128 = (simde__m256i *)alpha;
+  simde__m256i *beta128 = (simde__m256i *)beta;
+  simde__m256i *m11_128, *m10_128, *ext_128;
+  simde__m256i *alpha_ptr, *beta_ptr;
+  simde__m256i m00_1, m00_2, m00_3, m00_4;
+  simde__m256i m01_1, m01_2, m01_3, m01_4;
+  simde__m256i m10_1, m10_2, m10_3, m10_4;
+  simde__m256i m11_1, m11_2, m11_3, m11_4;
 
   int k;
 
@@ -712,11 +709,9 @@ void compute_ext16avx2(llr_t* alpha,llr_t* beta,llr_t* m_11,llr_t* m_10,llr_t* e
 
 
   for (k=0; k<(frame_length>>3); k++) {
-
-
-    m11_128        = (__m256i*)&m_11[k<<4];
-    m10_128        = (__m256i*)&m_10[k<<4];
-    ext_128        = (__m256i*)&ext[k<<4];
+    m11_128 = (simde__m256i *)&m_11[k << 4];
+    m10_128 = (simde__m256i *)&m_10[k << 4];
+    ext_128 = (simde__m256i *)&ext[k << 4];
 
     /*
       fprintf(fdavx2,"EXT %03d\n",k);
@@ -927,9 +922,7 @@ unsigned char phy_threegpplte_turbo_decoder16avx2(int16_t *y,
   uint8_t temp;
   uint32_t db;
 
-
-  __m256i tmp={0}, zeros=simde_mm256_setzero_si256();
-
+  simde__m256i tmp = {0}, zeros = simde_mm256_setzero_si256();
 
   int offset8_flag=0;
 
@@ -1058,24 +1051,24 @@ unsigned char phy_threegpplte_turbo_decoder16avx2(int16_t *y,
 
     for (i=0; i<(n>>3); i++) { // steady-state portion
 
-      ((__m256i *)systematic2)[i]=simde_mm256_insert_epi16(((__m256i *)systematic2)[i],ext[*pi4_p],0);
-      ((__m256i *)systematic2)[i]=simde_mm256_insert_epi16(((__m256i *)systematic2)[i],ext[8+*pi4_p++],8);
-      ((__m256i *)systematic2)[i]=simde_mm256_insert_epi16(((__m256i *)systematic2)[i],ext[*pi4_p],1);
-      ((__m256i *)systematic2)[i]=simde_mm256_insert_epi16(((__m256i *)systematic2)[i],ext[8+*pi4_p++],9);
-      ((__m256i *)systematic2)[i]=simde_mm256_insert_epi16(((__m256i *)systematic2)[i],ext[*pi4_p],2);
-      ((__m256i *)systematic2)[i]=simde_mm256_insert_epi16(((__m256i *)systematic2)[i],ext[8+*pi4_p++],10);
-      ((__m256i *)systematic2)[i]=simde_mm256_insert_epi16(((__m256i *)systematic2)[i],ext[*pi4_p],3);
-      ((__m256i *)systematic2)[i]=simde_mm256_insert_epi16(((__m256i *)systematic2)[i],ext[8+*pi4_p++],11);
-      ((__m256i *)systematic2)[i]=simde_mm256_insert_epi16(((__m256i *)systematic2)[i],ext[*pi4_p],4);
-      ((__m256i *)systematic2)[i]=simde_mm256_insert_epi16(((__m256i *)systematic2)[i],ext[8+*pi4_p++],12);
-      ((__m256i *)systematic2)[i]=simde_mm256_insert_epi16(((__m256i *)systematic2)[i],ext[*pi4_p],5);
-      ((__m256i *)systematic2)[i]=simde_mm256_insert_epi16(((__m256i *)systematic2)[i],ext[8+*pi4_p++],13);
-      ((__m256i *)systematic2)[i]=simde_mm256_insert_epi16(((__m256i *)systematic2)[i],ext[*pi4_p],6);
-      ((__m256i *)systematic2)[i]=simde_mm256_insert_epi16(((__m256i *)systematic2)[i],ext[8+*pi4_p++],14);
-      ((__m256i *)systematic2)[i]=simde_mm256_insert_epi16(((__m256i *)systematic2)[i],ext[*pi4_p],7);
-      ((__m256i *)systematic2)[i]=simde_mm256_insert_epi16(((__m256i *)systematic2)[i],ext[8+*pi4_p++],15);
+      ((simde__m256i *)systematic2)[i] = simde_mm256_insert_epi16(((simde__m256i *)systematic2)[i], ext[*pi4_p], 0);
+      ((simde__m256i *)systematic2)[i] = simde_mm256_insert_epi16(((simde__m256i *)systematic2)[i], ext[8 + *pi4_p++], 8);
+      ((simde__m256i *)systematic2)[i] = simde_mm256_insert_epi16(((simde__m256i *)systematic2)[i], ext[*pi4_p], 1);
+      ((simde__m256i *)systematic2)[i] = simde_mm256_insert_epi16(((simde__m256i *)systematic2)[i], ext[8 + *pi4_p++], 9);
+      ((simde__m256i *)systematic2)[i] = simde_mm256_insert_epi16(((simde__m256i *)systematic2)[i], ext[*pi4_p], 2);
+      ((simde__m256i *)systematic2)[i] = simde_mm256_insert_epi16(((simde__m256i *)systematic2)[i], ext[8 + *pi4_p++], 10);
+      ((simde__m256i *)systematic2)[i] = simde_mm256_insert_epi16(((simde__m256i *)systematic2)[i], ext[*pi4_p], 3);
+      ((simde__m256i *)systematic2)[i] = simde_mm256_insert_epi16(((simde__m256i *)systematic2)[i], ext[8 + *pi4_p++], 11);
+      ((simde__m256i *)systematic2)[i] = simde_mm256_insert_epi16(((simde__m256i *)systematic2)[i], ext[*pi4_p], 4);
+      ((simde__m256i *)systematic2)[i] = simde_mm256_insert_epi16(((simde__m256i *)systematic2)[i], ext[8 + *pi4_p++], 12);
+      ((simde__m256i *)systematic2)[i] = simde_mm256_insert_epi16(((simde__m256i *)systematic2)[i], ext[*pi4_p], 5);
+      ((simde__m256i *)systematic2)[i] = simde_mm256_insert_epi16(((simde__m256i *)systematic2)[i], ext[8 + *pi4_p++], 13);
+      ((simde__m256i *)systematic2)[i] = simde_mm256_insert_epi16(((simde__m256i *)systematic2)[i], ext[*pi4_p], 6);
+      ((simde__m256i *)systematic2)[i] = simde_mm256_insert_epi16(((simde__m256i *)systematic2)[i], ext[8 + *pi4_p++], 14);
+      ((simde__m256i *)systematic2)[i] = simde_mm256_insert_epi16(((simde__m256i *)systematic2)[i], ext[*pi4_p], 7);
+      ((simde__m256i *)systematic2)[i] = simde_mm256_insert_epi16(((simde__m256i *)systematic2)[i], ext[8 + *pi4_p++], 15);
 #ifdef DEBUG_LOGMAP
-      print_shorts("syst2",(int16_t*)&((__m256i *)systematic2)[i]);
+      print_shorts("syst2", (int16_t *)&((simde__m256i *)systematic2)[i]);
 #endif
     }
 
@@ -1107,9 +1100,10 @@ unsigned char phy_threegpplte_turbo_decoder16avx2(int16_t *y,
       tmp=simde_mm256_insert_epi16(tmp,ext2[8+*pi5_p++],14);
       tmp=simde_mm256_insert_epi16(tmp,ext2[*pi5_p],7);
       tmp=simde_mm256_insert_epi16(tmp,ext2[8+*pi5_p++],15);
-      ((__m256i *)systematic1)[i] = simde_mm256_adds_epi16(simde_mm256_subs_epi16(tmp,((__m256i*)ext)[i]),((__m256i *)systematic0)[i]);
+      ((simde__m256i *)systematic1)[i] =
+          simde_mm256_adds_epi16(simde_mm256_subs_epi16(tmp, ((simde__m256i *)ext)[i]), ((simde__m256i *)systematic0)[i]);
 #ifdef DEBUG_LOGMAP
-      print_shorts("syst1",(int16_t*)&((__m256i *)systematic1)[i]);
+      print_shorts("syst1", (int16_t *)&((simde__m256i *)systematic1)[i]);
 #endif
     }
 
@@ -1249,9 +1243,9 @@ unsigned char phy_threegpplte_turbo_decoder16avx2(int16_t *y,
     if (iteration_cnt < max_iterations) {
       log_map16avx2(systematic1,yparity1,m11,m10,alpha,beta,ext,n,0,F,offset8_flag,alpha_stats,beta_stats,gamma_stats,ext_stats);
 
-      __m256i* ext_128=(__m256i*) ext;
-      __m256i* s1_128=(__m256i*) systematic1;
-      __m256i* s0_128=(__m256i*) systematic0;
+      simde__m256i *ext_128 = (simde__m256i *)ext;
+      simde__m256i *s1_128 = (simde__m256i *)systematic1;
+      simde__m256i *s0_128 = (simde__m256i *)systematic0;
       int myloop=n>>3;
 
       for (i=0; i<myloop; i++) {
@@ -1264,9 +1258,8 @@ unsigned char phy_threegpplte_turbo_decoder16avx2(int16_t *y,
 
   //  fprintf(fdavx2,"crc %x, oldcrc %x\n",crc,oldcrc);
 
-
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 
 #ifdef DEBUG_LOGMAP
   fclose(fdavx2);
diff --git a/openair1/PHY/CODING/3gpplte_turbo_decoder_sse.c b/openair1/PHY/CODING/3gpplte_turbo_decoder_sse.c
index 547ea11db7555433db8bd4c86add8d3931df5bcb..fa1b55b45eee59d90e1094b58416b092f4fe970d 100644
--- a/openair1/PHY/CODING/3gpplte_turbo_decoder_sse.c
+++ b/openair1/PHY/CODING/3gpplte_turbo_decoder_sse.c
@@ -49,26 +49,23 @@
   #include <string.h>
 #endif
 
-#define SHUFFLE16(a,b,c,d,e,f,g,h) _mm_set_epi8(h==-1?-1:h*2+1, \
-    h==-1?-1:h*2, \
-    g==-1?-1:g*2+1, \
-    g==-1?-1:g*2, \
-    f==-1?-1:f*2+1, \
-    f==-1?-1:f*2, \
-    e==-1?-1:e*2+1, \
-    e==-1?-1:e*2, \
-    d==-1?-1:d*2+1, \
-    d==-1?-1:d*2, \
-    c==-1?-1:c*2+1, \
-    c==-1?-1:c*2, \
-    b==-1?-1:b*2+1, \
-    b==-1?-1:b*2, \
-    a==-1?-1:a*2+1, \
-    a==-1?-1:a*2);
-
-
-
-
+#define SHUFFLE16(a, b, c, d, e, f, g, h)     \
+  simde_mm_set_epi8(h == -1 ? -1 : h * 2 + 1, \
+                    h == -1 ? -1 : h * 2,     \
+                    g == -1 ? -1 : g * 2 + 1, \
+                    g == -1 ? -1 : g * 2,     \
+                    f == -1 ? -1 : f * 2 + 1, \
+                    f == -1 ? -1 : f * 2,     \
+                    e == -1 ? -1 : e * 2 + 1, \
+                    e == -1 ? -1 : e * 2,     \
+                    d == -1 ? -1 : d * 2 + 1, \
+                    d == -1 ? -1 : d * 2,     \
+                    c == -1 ? -1 : c * 2 + 1, \
+                    c == -1 ? -1 : c * 2,     \
+                    b == -1 ? -1 : b * 2 + 1, \
+                    b == -1 ? -1 : b * 2,     \
+                    a == -1 ? -1 : a * 2 + 1, \
+                    a == -1 ? -1 : a * 2);
 
 //#define DEBUG_LOGMAP
 
@@ -93,15 +90,14 @@ void compute_alpha(llr_t *alpha,llr_t *beta, llr_t *m11,llr_t *m10, unsigned sho
 void compute_beta(llr_t *alpha, llr_t *beta,llr_t *m11,llr_t *m10, unsigned short frame_length,unsigned char F,int offset8_flag);
 void compute_ext(llr_t *alpha,llr_t *beta,llr_t *m11,llr_t *m10,llr_t *extrinsic, llr_t *ap, unsigned short frame_length);
 
-
-void print_bytes(char *s, __m128i *x) {
+void print_bytes(char *s, simde__m128i *x)
+{
   int8_t *tempb = (int8_t *)x;
   printf("%s  : %d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d\n",s,
          tempb[0],tempb[1],tempb[2],tempb[3],tempb[4],tempb[5],tempb[6],tempb[7],
          tempb[8],tempb[9],tempb[10],tempb[11],tempb[12],tempb[13],tempb[14],tempb[15]);
 }
 
-
 void log_map(llr_t *systematic,
              channel_t *y_parity,
              llr_t *m11,
@@ -137,10 +133,10 @@ void log_map(llr_t *systematic,
 void compute_gamma(llr_t *m11,llr_t *m10,llr_t *systematic,channel_t *y_parity,
                    unsigned short frame_length,unsigned char term_flag) {
   int k,K1;
-  __m128i *systematic128 = (__m128i *)systematic;
-  __m128i *y_parity128   = (__m128i *)y_parity;
-  __m128i *m10_128        = (__m128i *)m10;
-  __m128i *m11_128        = (__m128i *)m11;
+  simde__m128i *systematic128 = (simde__m128i *)systematic;
+  simde__m128i *y_parity128 = (simde__m128i *)y_parity;
+  simde__m128i *m10_128 = (simde__m128i *)m10;
+  simde__m128i *m11_128 = (simde__m128i *)m11;
 #ifdef DEBUG_LOGMAP
   msg("compute_gamma, %p,%p,%p,%p,framelength %d\n",m11,m10,systematic,y_parity,frame_length);
 #endif
@@ -148,235 +144,239 @@ void compute_gamma(llr_t *m11,llr_t *m10,llr_t *systematic,channel_t *y_parity,
   K1=frame_length>>3;
 
   for (k=0; k<K1; k++) {
-    m11_128[k] = _mm_srai_epi16(_mm_adds_epi16(systematic128[k],y_parity128[k]),1);
-    m10_128[k] = _mm_srai_epi16(_mm_subs_epi16(systematic128[k],y_parity128[k]),1);
+    m11_128[k] = simde_mm_srai_epi16(simde_mm_adds_epi16(systematic128[k], y_parity128[k]), 1);
+    m10_128[k] = simde_mm_srai_epi16(simde_mm_subs_epi16(systematic128[k], y_parity128[k]), 1);
     /*
     printf("gamma %d: s %d,%d,%d,%d,%d,%d,%d,%d\n",
      k,
-     (int16_t)_mm_extract_epi16(systematic128[k],0),
-     (int16_t)_mm_extract_epi16(systematic128[k],1),
-     (int16_t)_mm_extract_epi16(systematic128[k],2),
-     (int16_t)_mm_extract_epi16(systematic128[k],3),
-     (int16_t)_mm_extract_epi16(systematic128[k],4),
-     (int16_t)_mm_extract_epi16(systematic128[k],5),
-     (int16_t)_mm_extract_epi16(systematic128[k],6),
-     (int16_t)_mm_extract_epi16(systematic128[k],7));
+     (int16_t)simde_mm_extract_epi16(systematic128[k],0),
+     (int16_t)simde_mm_extract_epi16(systematic128[k],1),
+     (int16_t)simde_mm_extract_epi16(systematic128[k],2),
+     (int16_t)simde_mm_extract_epi16(systematic128[k],3),
+     (int16_t)simde_mm_extract_epi16(systematic128[k],4),
+     (int16_t)simde_mm_extract_epi16(systematic128[k],5),
+     (int16_t)simde_mm_extract_epi16(systematic128[k],6),
+     (int16_t)simde_mm_extract_epi16(systematic128[k],7));
 
     printf("gamma %d: yp %d,%d,%d,%d,%d,%d,%d,%d\n",
      k,
-     (int16_t)_mm_extract_epi16(y_parity128[k],0),
-     (int16_t)_mm_extract_epi16(y_parity128[k],1),
-     (int16_t)_mm_extract_epi16(y_parity128[k],2),
-     (int16_t)_mm_extract_epi16(y_parity128[k],3),
-     (int16_t)_mm_extract_epi16(y_parity128[k],4),
-     (int16_t)_mm_extract_epi16(y_parity128[k],5),
-     (int16_t)_mm_extract_epi16(y_parity128[k],6),
-     (int16_t)_mm_extract_epi16(y_parity128[k],7));
+     (int16_t)simde_mm_extract_epi16(y_parity128[k],0),
+     (int16_t)simde_mm_extract_epi16(y_parity128[k],1),
+     (int16_t)simde_mm_extract_epi16(y_parity128[k],2),
+     (int16_t)simde_mm_extract_epi16(y_parity128[k],3),
+     (int16_t)simde_mm_extract_epi16(y_parity128[k],4),
+     (int16_t)simde_mm_extract_epi16(y_parity128[k],5),
+     (int16_t)simde_mm_extract_epi16(y_parity128[k],6),
+     (int16_t)simde_mm_extract_epi16(y_parity128[k],7));
 
     printf("gamma %d: m11 %d,%d,%d,%d,%d,%d,%d,%d\n",
      k,
-     (int16_t)_mm_extract_epi16(m11_128[k],0),
-     (int16_t)_mm_extract_epi16(m11_128[k],1),
-     (int16_t)_mm_extract_epi16(m11_128[k],2),
-     (int16_t)_mm_extract_epi16(m11_128[k],3),
-     (int16_t)_mm_extract_epi16(m11_128[k],4),
-     (int16_t)_mm_extract_epi16(m11_128[k],5),
-     (int16_t)_mm_extract_epi16(m11_128[k],6),
-     (int16_t)_mm_extract_epi16(m11_128[k],7));
+     (int16_t)simde_mm_extract_epi16(m11_128[k],0),
+     (int16_t)simde_mm_extract_epi16(m11_128[k],1),
+     (int16_t)simde_mm_extract_epi16(m11_128[k],2),
+     (int16_t)simde_mm_extract_epi16(m11_128[k],3),
+     (int16_t)simde_mm_extract_epi16(m11_128[k],4),
+     (int16_t)simde_mm_extract_epi16(m11_128[k],5),
+     (int16_t)simde_mm_extract_epi16(m11_128[k],6),
+     (int16_t)simde_mm_extract_epi16(m11_128[k],7));
     printf("gamma %d: m10 %d,%d,%d,%d,%d,%d,%d,%d\n",
      k,
-     (int16_t)_mm_extract_epi16(m10_128[k],0),
-     (int16_t)_mm_extract_epi16(m10_128[k],1),
-     (int16_t)_mm_extract_epi16(m10_128[k],2),
-     (int16_t)_mm_extract_epi16(m10_128[k],3),
-     (int16_t)_mm_extract_epi16(m10_128[k],4),
-     (int16_t)_mm_extract_epi16(m10_128[k],5),
-     (int16_t)_mm_extract_epi16(m10_128[k],6),
-     (int16_t)_mm_extract_epi16(m10_128[k],7));
+     (int16_t)simde_mm_extract_epi16(m10_128[k],0),
+     (int16_t)simde_mm_extract_epi16(m10_128[k],1),
+     (int16_t)simde_mm_extract_epi16(m10_128[k],2),
+     (int16_t)simde_mm_extract_epi16(m10_128[k],3),
+     (int16_t)simde_mm_extract_epi16(m10_128[k],4),
+     (int16_t)simde_mm_extract_epi16(m10_128[k],5),
+     (int16_t)simde_mm_extract_epi16(m10_128[k],6),
+     (int16_t)simde_mm_extract_epi16(m10_128[k],7));
     */
   }
 
   // Termination
-  m11_128[k] = _mm_srai_epi16(_mm_adds_epi16(systematic128[k+term_flag],y_parity128[k]),1);
-  m10_128[k] = _mm_srai_epi16(_mm_subs_epi16(systematic128[k+term_flag],y_parity128[k]),1);
+  m11_128[k] = simde_mm_srai_epi16(simde_mm_adds_epi16(systematic128[k + term_flag], y_parity128[k]), 1);
+  m10_128[k] = simde_mm_srai_epi16(simde_mm_subs_epi16(systematic128[k + term_flag], y_parity128[k]), 1);
   //  printf("gamma (term): %d,%d, %d,%d, %d,%d\n",m11[k<<3],m10[k<<3],m11[1+(k<<3)],m10[1+(k<<3)],m11[2+(k<<3)],m10[2+(k<<3)]);
 #else
-  register __m128i sl,sh,ypl,yph; //K128=_mm_set1_epi8(-128);
+  register simde__m128i sl, sh, ypl, yph; // K128=simde_mm_set1_epi8(-128);
   K1 = (frame_length>>4);
 
   for (k=0; k<K1; k++) {
-    sl  = _mm_cvtepi8_epi16(systematic128[k]);
-    sh = _mm_cvtepi8_epi16(_mm_srli_si128(systematic128[k],8));
-    ypl = _mm_cvtepi8_epi16(y_parity128[k]);
-    yph = _mm_cvtepi8_epi16(_mm_srli_si128(y_parity128[k],8));
-    m11_128[k] = _mm_packs_epi16(_mm_srai_epi16(_mm_adds_epi16(sl,ypl),1),
-                                 _mm_srai_epi16(_mm_adds_epi16(sh,yph),1));
-    m10_128[k] = _mm_packs_epi16(_mm_srai_epi16(_mm_subs_epi16(sl,ypl),1),
-                                 _mm_srai_epi16(_mm_subs_epi16(sh,yph),1));
-    //    m11_128[k] = _mm_adds_epi8(systematic128[k],y_parity128[k]);
-    //    m10_128[k] = _mm_subs_epi8(systematic128[k],y_parity128[k]);
-    //    m11_128[k] = _mm_sub_epi8(_mm_avg_epu8(_mm_add_epi8(systematic128[k],K128),_mm_add_epi8(y_parity128[k],K128)),K128);
-    //    m10_128[k] = _mm_sub_epi8(_mm_avg_epu8(_mm_add_epi8(systematic128[k],K128),_mm_add_epi8(_mm_sign_epi8(y_parity128[k],K128),K128)),K128);
+    sl = simde_mm_cvtepi8_epi16(systematic128[k]);
+    sh = simde_mm_cvtepi8_epi16(simde_mm_srli_si128(systematic128[k], 8));
+    ypl = simde_mm_cvtepi8_epi16(y_parity128[k]);
+    yph = simde_mm_cvtepi8_epi16(simde_mm_srli_si128(y_parity128[k], 8));
+    m11_128[k] = simde_mm_packs_epi16(simde_mm_srai_epi16(simde_mm_adds_epi16(sl, ypl), 1),
+                                      simde_mm_srai_epi16(simde_mm_adds_epi16(sh, yph), 1));
+    m10_128[k] = simde_mm_packs_epi16(simde_mm_srai_epi16(simde_mm_subs_epi16(sl, ypl), 1),
+                                      simde_mm_srai_epi16(simde_mm_subs_epi16(sh, yph), 1));
+    //    m11_128[k] = simde_mm_adds_epi8(systematic128[k],y_parity128[k]);
+    //    m10_128[k] = simde_mm_subs_epi8(systematic128[k],y_parity128[k]);
+    //    m11_128[k] =
+    //    simde_mm_sub_epi8(simde_mm_avg_epu8(simde_mm_add_epi8(systematic128[k],K128),simde_mm_add_epi8(y_parity128[k],K128)),K128);
+    //    m10_128[k] =
+    //    simde_mm_sub_epi8(simde_mm_avg_epu8(simde_mm_add_epi8(systematic128[k],K128),simde_mm_add_epi8(simde_mm_sign_epi8(y_parity128[k],K128),K128)),K128);
     /*
     printf("gamma %d: s %d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d\n",
      k,
-     (int8_t)_mm_extract_epi8(systematic128[k],0),
-     (int8_t)_mm_extract_epi8(systematic128[k],1),
-     (int8_t)_mm_extract_epi8(systematic128[k],2),
-     (int8_t)_mm_extract_epi8(systematic128[k],3),
-     (int8_t)_mm_extract_epi8(systematic128[k],4),
-     (int8_t)_mm_extract_epi8(systematic128[k],5),
-     (int8_t)_mm_extract_epi8(systematic128[k],6),
-     (int8_t)_mm_extract_epi8(systematic128[k],7),
-     (int8_t)_mm_extract_epi8(systematic128[k],8),
-     (int8_t)_mm_extract_epi8(systematic128[k],9),
-     (int8_t)_mm_extract_epi8(systematic128[k],10),
-     (int8_t)_mm_extract_epi8(systematic128[k],11),
-     (int8_t)_mm_extract_epi8(systematic128[k],12),
-     (int8_t)_mm_extract_epi8(systematic128[k],13),
-     (int8_t)_mm_extract_epi8(systematic128[k],14),
-     (int8_t)_mm_extract_epi8(systematic128[k],15));
+     (int8_t)simde_mm_extract_epi8(systematic128[k],0),
+     (int8_t)simde_mm_extract_epi8(systematic128[k],1),
+     (int8_t)simde_mm_extract_epi8(systematic128[k],2),
+     (int8_t)simde_mm_extract_epi8(systematic128[k],3),
+     (int8_t)simde_mm_extract_epi8(systematic128[k],4),
+     (int8_t)simde_mm_extract_epi8(systematic128[k],5),
+     (int8_t)simde_mm_extract_epi8(systematic128[k],6),
+     (int8_t)simde_mm_extract_epi8(systematic128[k],7),
+     (int8_t)simde_mm_extract_epi8(systematic128[k],8),
+     (int8_t)simde_mm_extract_epi8(systematic128[k],9),
+     (int8_t)simde_mm_extract_epi8(systematic128[k],10),
+     (int8_t)simde_mm_extract_epi8(systematic128[k],11),
+     (int8_t)simde_mm_extract_epi8(systematic128[k],12),
+     (int8_t)simde_mm_extract_epi8(systematic128[k],13),
+     (int8_t)simde_mm_extract_epi8(systematic128[k],14),
+     (int8_t)simde_mm_extract_epi8(systematic128[k],15));
     printf("gamma %d: yp %d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d\n",
      k,
-     (int8_t)_mm_extract_epi8(y_parity128[k],0),
-     (int8_t)_mm_extract_epi8(y_parity128[k],1),
-     (int8_t)_mm_extract_epi8(y_parity128[k],2),
-     (int8_t)_mm_extract_epi8(y_parity128[k],3),
-     (int8_t)_mm_extract_epi8(y_parity128[k],4),
-     (int8_t)_mm_extract_epi8(y_parity128[k],5),
-     (int8_t)_mm_extract_epi8(y_parity128[k],6),
-     (int8_t)_mm_extract_epi8(y_parity128[k],7),
-     (int8_t)_mm_extract_epi8(y_parity128[k],8),
-     (int8_t)_mm_extract_epi8(y_parity128[k],9),
-     (int8_t)_mm_extract_epi8(y_parity128[k],10),
-     (int8_t)_mm_extract_epi8(y_parity128[k],11),
-     (int8_t)_mm_extract_epi8(y_parity128[k],12),
-     (int8_t)_mm_extract_epi8(y_parity128[k],13),
-     (int8_t)_mm_extract_epi8(y_parity128[k],14),
-     (int8_t)_mm_extract_epi8(y_parity128[k],15));
+     (int8_t)simde_mm_extract_epi8(y_parity128[k],0),
+     (int8_t)simde_mm_extract_epi8(y_parity128[k],1),
+     (int8_t)simde_mm_extract_epi8(y_parity128[k],2),
+     (int8_t)simde_mm_extract_epi8(y_parity128[k],3),
+     (int8_t)simde_mm_extract_epi8(y_parity128[k],4),
+     (int8_t)simde_mm_extract_epi8(y_parity128[k],5),
+     (int8_t)simde_mm_extract_epi8(y_parity128[k],6),
+     (int8_t)simde_mm_extract_epi8(y_parity128[k],7),
+     (int8_t)simde_mm_extract_epi8(y_parity128[k],8),
+     (int8_t)simde_mm_extract_epi8(y_parity128[k],9),
+     (int8_t)simde_mm_extract_epi8(y_parity128[k],10),
+     (int8_t)simde_mm_extract_epi8(y_parity128[k],11),
+     (int8_t)simde_mm_extract_epi8(y_parity128[k],12),
+     (int8_t)simde_mm_extract_epi8(y_parity128[k],13),
+     (int8_t)simde_mm_extract_epi8(y_parity128[k],14),
+     (int8_t)simde_mm_extract_epi8(y_parity128[k],15));
     printf("gamma %d: m11 %d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d\n",
      k,
-     (int8_t)_mm_extract_epi8(m11_128[k],0),
-     (int8_t)_mm_extract_epi8(m11_128[k],1),
-     (int8_t)_mm_extract_epi8(m11_128[k],2),
-     (int8_t)_mm_extract_epi8(m11_128[k],3),
-     (int8_t)_mm_extract_epi8(m11_128[k],4),
-     (int8_t)_mm_extract_epi8(m11_128[k],5),
-     (int8_t)_mm_extract_epi8(m11_128[k],6),
-     (int8_t)_mm_extract_epi8(m11_128[k],7),
-     (int8_t)_mm_extract_epi8(m11_128[k],8),
-     (int8_t)_mm_extract_epi8(m11_128[k],9),
-     (int8_t)_mm_extract_epi8(m11_128[k],10),
-     (int8_t)_mm_extract_epi8(m11_128[k],11),
-     (int8_t)_mm_extract_epi8(m11_128[k],12),
-     (int8_t)_mm_extract_epi8(m11_128[k],13),
-     (int8_t)_mm_extract_epi8(m11_128[k],14),
-     (int8_t)_mm_extract_epi8(m11_128[k],15));
+     (int8_t)simde_mm_extract_epi8(m11_128[k],0),
+     (int8_t)simde_mm_extract_epi8(m11_128[k],1),
+     (int8_t)simde_mm_extract_epi8(m11_128[k],2),
+     (int8_t)simde_mm_extract_epi8(m11_128[k],3),
+     (int8_t)simde_mm_extract_epi8(m11_128[k],4),
+     (int8_t)simde_mm_extract_epi8(m11_128[k],5),
+     (int8_t)simde_mm_extract_epi8(m11_128[k],6),
+     (int8_t)simde_mm_extract_epi8(m11_128[k],7),
+     (int8_t)simde_mm_extract_epi8(m11_128[k],8),
+     (int8_t)simde_mm_extract_epi8(m11_128[k],9),
+     (int8_t)simde_mm_extract_epi8(m11_128[k],10),
+     (int8_t)simde_mm_extract_epi8(m11_128[k],11),
+     (int8_t)simde_mm_extract_epi8(m11_128[k],12),
+     (int8_t)simde_mm_extract_epi8(m11_128[k],13),
+     (int8_t)simde_mm_extract_epi8(m11_128[k],14),
+     (int8_t)simde_mm_extract_epi8(m11_128[k],15));
     printf("gamma %d: m10 %d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d\n",
      k,
-     (int8_t)_mm_extract_epi8(m10_128[k],0),
-     (int8_t)_mm_extract_epi8(m10_128[k],1),
-     (int8_t)_mm_extract_epi8(m10_128[k],2),
-     (int8_t)_mm_extract_epi8(m10_128[k],3),
-     (int8_t)_mm_extract_epi8(m10_128[k],4),
-     (int8_t)_mm_extract_epi8(m10_128[k],5),
-     (int8_t)_mm_extract_epi8(m10_128[k],6),
-     (int8_t)_mm_extract_epi8(m10_128[k],7),
-     (int8_t)_mm_extract_epi8(m10_128[k],8),
-     (int8_t)_mm_extract_epi8(m10_128[k],9),
-     (int8_t)_mm_extract_epi8(m10_128[k],10),
-     (int8_t)_mm_extract_epi8(m10_128[k],11),
-     (int8_t)_mm_extract_epi8(m10_128[k],12),
-     (int8_t)_mm_extract_epi8(m10_128[k],13),
-     (int8_t)_mm_extract_epi8(m10_128[k],14),
-     (int8_t)_mm_extract_epi8(m10_128[k],15));
+     (int8_t)simde_mm_extract_epi8(m10_128[k],0),
+     (int8_t)simde_mm_extract_epi8(m10_128[k],1),
+     (int8_t)simde_mm_extract_epi8(m10_128[k],2),
+     (int8_t)simde_mm_extract_epi8(m10_128[k],3),
+     (int8_t)simde_mm_extract_epi8(m10_128[k],4),
+     (int8_t)simde_mm_extract_epi8(m10_128[k],5),
+     (int8_t)simde_mm_extract_epi8(m10_128[k],6),
+     (int8_t)simde_mm_extract_epi8(m10_128[k],7),
+     (int8_t)simde_mm_extract_epi8(m10_128[k],8),
+     (int8_t)simde_mm_extract_epi8(m10_128[k],9),
+     (int8_t)simde_mm_extract_epi8(m10_128[k],10),
+     (int8_t)simde_mm_extract_epi8(m10_128[k],11),
+     (int8_t)simde_mm_extract_epi8(m10_128[k],12),
+     (int8_t)simde_mm_extract_epi8(m10_128[k],13),
+     (int8_t)simde_mm_extract_epi8(m10_128[k],14),
+     (int8_t)simde_mm_extract_epi8(m10_128[k],15));
     */
   }
 
   // Termination
-  sl  = _mm_cvtepi8_epi16(systematic128[k+term_flag]);
-  sh = _mm_cvtepi8_epi16(_mm_srli_si128(systematic128[k],8));
-  ypl = _mm_cvtepi8_epi16(y_parity128[k+term_flag]);
-  yph = _mm_cvtepi8_epi16(_mm_srli_si128(y_parity128[k],8));
-  m11_128[k] = _mm_packs_epi16(_mm_srai_epi16(_mm_adds_epi16(sl,ypl),1),
-                               _mm_srai_epi16(_mm_adds_epi16(sh,yph),1));
-  m10_128[k] = _mm_packs_epi16(_mm_srai_epi16(_mm_subs_epi16(sl,ypl),1),
-                               _mm_srai_epi16(_mm_subs_epi16(sh,yph),1));
-  //    m11_128[k] = _mm_adds_epi8(systematic128[k+term_flag],y_parity128[k]);
-  //    m10_128[k] = _mm_subs_epi8(systematic128[k+term_flag],y_parity128[k]);
-  //  m11_128[k] = _mm_sub_epi8(_mm_avg_epu8(_mm_add_epi8(systematic128[k+term_flag],K128),_mm_add_epi8(y_parity128[k],K128)),K128);
-  //  m10_128[k] = _mm_sub_epi8(_mm_avg_epu8(_mm_add_epi8(systematic128[k+term_flag],K128),_mm_add_epi8(_mm_sign_epi8(y_parity128[k],K128),K128)),K128);
+  sl = simde_mm_cvtepi8_epi16(systematic128[k + term_flag]);
+  sh = simde_mm_cvtepi8_epi16(simde_mm_srli_si128(systematic128[k], 8));
+  ypl = simde_mm_cvtepi8_epi16(y_parity128[k + term_flag]);
+  yph = simde_mm_cvtepi8_epi16(simde_mm_srli_si128(y_parity128[k], 8));
+  m11_128[k] = simde_mm_packs_epi16(simde_mm_srai_epi16(simde_mm_adds_epi16(sl, ypl), 1),
+                                    simde_mm_srai_epi16(simde_mm_adds_epi16(sh, yph), 1));
+  m10_128[k] = simde_mm_packs_epi16(simde_mm_srai_epi16(simde_mm_subs_epi16(sl, ypl), 1),
+                                    simde_mm_srai_epi16(simde_mm_subs_epi16(sh, yph), 1));
+  //    m11_128[k] = simde_mm_adds_epi8(systematic128[k+term_flag],y_parity128[k]);
+  //    m10_128[k] = simde_mm_subs_epi8(systematic128[k+term_flag],y_parity128[k]);
+  //  m11_128[k] =
+  //  simde_mm_sub_epi8(simde_mm_avg_epu8(simde_mm_add_epi8(systematic128[k+term_flag],K128),simde_mm_add_epi8(y_parity128[k],K128)),K128);
+  //  m10_128[k] =
+  //  simde_mm_sub_epi8(simde_mm_avg_epu8(simde_mm_add_epi8(systematic128[k+term_flag],K128),simde_mm_add_epi8(simde_mm_sign_epi8(y_parity128[k],K128),K128)),K128);
   /*
   printf("m11 = %p (K1 %d)\n",&m11_128[k],K1);
     printf("gamma %d: s %d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d\n",
      k+term_flag,
-     _mm_extract_epi8(systematic128[k+term_flag],0),
-     _mm_extract_epi8(systematic128[k+term_flag],1),
-     _mm_extract_epi8(systematic128[k+term_flag],2),
-     _mm_extract_epi8(systematic128[k+term_flag],3),
-     _mm_extract_epi8(systematic128[k+term_flag],4),
-     _mm_extract_epi8(systematic128[k+term_flag],5),
-     _mm_extract_epi8(systematic128[k+term_flag],6),
-     _mm_extract_epi8(systematic128[k+term_flag],7),
-     _mm_extract_epi8(systematic128[k+term_flag],8),
-     _mm_extract_epi8(systematic128[k+term_flag],9),
-     _mm_extract_epi8(systematic128[k+term_flag],10),
-     _mm_extract_epi8(systematic128[k+term_flag],11),
-     _mm_extract_epi8(systematic128[k+term_flag],12),
-     _mm_extract_epi8(systematic128[k+term_flag],13),
-     _mm_extract_epi8(systematic128[k+term_flag],14),
-     _mm_extract_epi8(systematic128[k+term_flag],15));
+     simde_mm_extract_epi8(systematic128[k+term_flag],0),
+     simde_mm_extract_epi8(systematic128[k+term_flag],1),
+     simde_mm_extract_epi8(systematic128[k+term_flag],2),
+     simde_mm_extract_epi8(systematic128[k+term_flag],3),
+     simde_mm_extract_epi8(systematic128[k+term_flag],4),
+     simde_mm_extract_epi8(systematic128[k+term_flag],5),
+     simde_mm_extract_epi8(systematic128[k+term_flag],6),
+     simde_mm_extract_epi8(systematic128[k+term_flag],7),
+     simde_mm_extract_epi8(systematic128[k+term_flag],8),
+     simde_mm_extract_epi8(systematic128[k+term_flag],9),
+     simde_mm_extract_epi8(systematic128[k+term_flag],10),
+     simde_mm_extract_epi8(systematic128[k+term_flag],11),
+     simde_mm_extract_epi8(systematic128[k+term_flag],12),
+     simde_mm_extract_epi8(systematic128[k+term_flag],13),
+     simde_mm_extract_epi8(systematic128[k+term_flag],14),
+     simde_mm_extract_epi8(systematic128[k+term_flag],15));
     printf("gamma %d: yp %d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d\n",
      k,
-     _mm_extract_epi8(y_parity128[k],0),
-     _mm_extract_epi8(y_parity128[k],1),
-     _mm_extract_epi8(y_parity128[k],2),
-     _mm_extract_epi8(y_parity128[k],3),
-     _mm_extract_epi8(y_parity128[k],4),
-     _mm_extract_epi8(y_parity128[k],5),
-     _mm_extract_epi8(y_parity128[k],6),
-     _mm_extract_epi8(y_parity128[k],7),
-     _mm_extract_epi8(y_parity128[k],8),
-     _mm_extract_epi8(y_parity128[k],9),
-     _mm_extract_epi8(y_parity128[k],10),
-     _mm_extract_epi8(y_parity128[k],11),
-     _mm_extract_epi8(y_parity128[k],12),
-     _mm_extract_epi8(y_parity128[k],13),
-     _mm_extract_epi8(y_parity128[k],14),
-     _mm_extract_epi8(y_parity128[k],15));
+     simde_mm_extract_epi8(y_parity128[k],0),
+     simde_mm_extract_epi8(y_parity128[k],1),
+     simde_mm_extract_epi8(y_parity128[k],2),
+     simde_mm_extract_epi8(y_parity128[k],3),
+     simde_mm_extract_epi8(y_parity128[k],4),
+     simde_mm_extract_epi8(y_parity128[k],5),
+     simde_mm_extract_epi8(y_parity128[k],6),
+     simde_mm_extract_epi8(y_parity128[k],7),
+     simde_mm_extract_epi8(y_parity128[k],8),
+     simde_mm_extract_epi8(y_parity128[k],9),
+     simde_mm_extract_epi8(y_parity128[k],10),
+     simde_mm_extract_epi8(y_parity128[k],11),
+     simde_mm_extract_epi8(y_parity128[k],12),
+     simde_mm_extract_epi8(y_parity128[k],13),
+     simde_mm_extract_epi8(y_parity128[k],14),
+     simde_mm_extract_epi8(y_parity128[k],15));
     printf("gamma %d: m11 %d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d\n",
      k,
-     _mm_extract_epi8(m11_128[k],0),
-     _mm_extract_epi8(m11_128[k],1),
-     _mm_extract_epi8(m11_128[k],2),
-     _mm_extract_epi8(m11_128[k],3),
-     _mm_extract_epi8(m11_128[k],4),
-     _mm_extract_epi8(m11_128[k],5),
-     _mm_extract_epi8(m11_128[k],6),
-     _mm_extract_epi8(m11_128[k],7),
-     _mm_extract_epi8(m11_128[k],8),
-     _mm_extract_epi8(m11_128[k],9),
-     _mm_extract_epi8(m11_128[k],10),
-     _mm_extract_epi8(m11_128[k],11),
-     _mm_extract_epi8(m11_128[k],12),
-     _mm_extract_epi8(m11_128[k],13),
-     _mm_extract_epi8(m11_128[k],14),
-     _mm_extract_epi8(m11_128[k],15));
+     simde_mm_extract_epi8(m11_128[k],0),
+     simde_mm_extract_epi8(m11_128[k],1),
+     simde_mm_extract_epi8(m11_128[k],2),
+     simde_mm_extract_epi8(m11_128[k],3),
+     simde_mm_extract_epi8(m11_128[k],4),
+     simde_mm_extract_epi8(m11_128[k],5),
+     simde_mm_extract_epi8(m11_128[k],6),
+     simde_mm_extract_epi8(m11_128[k],7),
+     simde_mm_extract_epi8(m11_128[k],8),
+     simde_mm_extract_epi8(m11_128[k],9),
+     simde_mm_extract_epi8(m11_128[k],10),
+     simde_mm_extract_epi8(m11_128[k],11),
+     simde_mm_extract_epi8(m11_128[k],12),
+     simde_mm_extract_epi8(m11_128[k],13),
+     simde_mm_extract_epi8(m11_128[k],14),
+     simde_mm_extract_epi8(m11_128[k],15));
   */
 #endif
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 #define L 40
 
 void compute_alpha(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned short frame_length,unsigned char F) {
   int k,l,l2,K1,rerun_flag=0;
-  __m128i *alpha128=(__m128i *)alpha,*alpha_ptr;
-  __m128i a0,a1,a2,a3,a4,a5,a6,a7,*m11p,*m10p;
-  __m128i m_b0,m_b1,m_b2,m_b3,m_b4,m_b5,m_b6,m_b7;
-  __m128i new0,new1,new2,new3,new4,new5,new6,new7;
-  __m128i alpha_max;
+  simde__m128i *alpha128 = (simde__m128i *)alpha, *alpha_ptr;
+  simde__m128i a0, a1, a2, a3, a4, a5, a6, a7, *m11p, *m10p;
+  simde__m128i m_b0, m_b1, m_b2, m_b3, m_b4, m_b5, m_b6, m_b7;
+  simde__m128i new0, new1, new2, new3, new4, new5, new6, new7;
+  simde__m128i alpha_max;
 #ifndef LLR8
   l2 = L>>3;
   K1 = (frame_length>>3);
@@ -387,27 +387,27 @@ void compute_alpha(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned sho
 
   for (l=K1;; l=l2,rerun_flag=1) {
 #ifndef LLR8
-    alpha128 = (__m128i *)alpha;
+    alpha128 = (simde__m128i *)alpha;
 
     if (rerun_flag == 0) {
-      alpha128[0] = _mm_set_epi16(-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,0);
-      alpha128[1] = _mm_set_epi16(-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2);
-      alpha128[2] = _mm_set_epi16(-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2);
-      alpha128[3] = _mm_set_epi16(-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2);
-      alpha128[4] = _mm_set_epi16(-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2);
-      alpha128[5] = _mm_set_epi16(-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2);
-      alpha128[6] = _mm_set_epi16(-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2);
-      alpha128[7] = _mm_set_epi16(-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2);
+      alpha128[0] = simde_mm_set_epi16(-MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, 0);
+      alpha128[1] = simde_mm_set_epi16(-MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2);
+      alpha128[2] = simde_mm_set_epi16(-MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2);
+      alpha128[3] = simde_mm_set_epi16(-MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2);
+      alpha128[4] = simde_mm_set_epi16(-MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2);
+      alpha128[5] = simde_mm_set_epi16(-MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2);
+      alpha128[6] = simde_mm_set_epi16(-MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2);
+      alpha128[7] = simde_mm_set_epi16(-MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2);
     } else {
       //set initial alpha in columns 1-7 from final alpha from last run in columns 0-6
-      alpha128[0] = _mm_slli_si128(alpha128[frame_length],2);
-      alpha128[1] = _mm_slli_si128(alpha128[1+frame_length],2);
-      alpha128[2] = _mm_slli_si128(alpha128[2+frame_length],2);
-      alpha128[3] = _mm_slli_si128(alpha128[3+frame_length],2);
-      alpha128[4] = _mm_slli_si128(alpha128[4+frame_length],2);
-      alpha128[5] = _mm_slli_si128(alpha128[5+frame_length],2);
-      alpha128[6] = _mm_slli_si128(alpha128[6+frame_length],2);
-      alpha128[7] = _mm_slli_si128(alpha128[7+frame_length],2);
+      alpha128[0] = simde_mm_slli_si128(alpha128[frame_length], 2);
+      alpha128[1] = simde_mm_slli_si128(alpha128[1 + frame_length], 2);
+      alpha128[2] = simde_mm_slli_si128(alpha128[2 + frame_length], 2);
+      alpha128[3] = simde_mm_slli_si128(alpha128[3 + frame_length], 2);
+      alpha128[4] = simde_mm_slli_si128(alpha128[4 + frame_length], 2);
+      alpha128[5] = simde_mm_slli_si128(alpha128[5 + frame_length], 2);
+      alpha128[6] = simde_mm_slli_si128(alpha128[6 + frame_length], 2);
+      alpha128[7] = simde_mm_slli_si128(alpha128[7 + frame_length], 2);
       // set initial alpha in column 0 to (0,-MAX/2,...,-MAX/2)
       alpha[8] = -MAX/2;
       alpha[16] = -MAX/2;
@@ -419,62 +419,62 @@ void compute_alpha(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned sho
     }
 
     alpha_ptr = &alpha128[0];
-    m11p = (__m128i *)m_11;
-    m10p = (__m128i *)m_10;
+    m11p = (simde__m128i *)m_11;
+    m10p = (simde__m128i *)m_10;
 
     for (k=0;
          k<l;
          k++) {
-      a1=_mm_load_si128(&alpha_ptr[1]);
-      a3=_mm_load_si128(&alpha_ptr[3]);
-      a5=_mm_load_si128(&alpha_ptr[5]);
-      a7=_mm_load_si128(&alpha_ptr[7]);
-      m_b0 = _mm_adds_epi16(a1,*m11p);  // m11
-      m_b4 = _mm_subs_epi16(a1,*m11p);  // m00=-m11
-      m_b1 = _mm_subs_epi16(a3,*m10p);  // m01=-m10
-      m_b5 = _mm_adds_epi16(a3,*m10p);  // m10
-      m_b2 = _mm_adds_epi16(a5,*m10p);  // m10
-      m_b6 = _mm_subs_epi16(a5,*m10p);  // m01=-m10
-      m_b3 = _mm_subs_epi16(a7,*m11p);  // m00=-m11
-      m_b7 = _mm_adds_epi16(a7,*m11p);  // m11
-      a0=_mm_load_si128(&alpha_ptr[0]);
-      a2=_mm_load_si128(&alpha_ptr[2]);
-      a4=_mm_load_si128(&alpha_ptr[4]);
-      a6=_mm_load_si128(&alpha_ptr[6]);
-      new0 = _mm_subs_epi16(a0,*m11p);  // m00=-m11
-      new4 = _mm_adds_epi16(a0,*m11p);  // m11
-      new1 = _mm_adds_epi16(a2,*m10p);  // m10
-      new5 = _mm_subs_epi16(a2,*m10p);  // m01=-m10
-      new2 = _mm_subs_epi16(a4,*m10p);  // m01=-m10
-      new6 = _mm_adds_epi16(a4,*m10p);  // m10
-      new3 = _mm_adds_epi16(a6,*m11p);  // m11
-      new7 = _mm_subs_epi16(a6,*m11p);  // m00=-m11
-      a0 = _mm_max_epi16(m_b0,new0);
-      a1 = _mm_max_epi16(m_b1,new1);
-      a2 = _mm_max_epi16(m_b2,new2);
-      a3 = _mm_max_epi16(m_b3,new3);
-      a4 = _mm_max_epi16(m_b4,new4);
-      a5 = _mm_max_epi16(m_b5,new5);
-      a6 = _mm_max_epi16(m_b6,new6);
-      a7 = _mm_max_epi16(m_b7,new7);
-      alpha_max = _mm_max_epi16(a0,a1);
-      alpha_max = _mm_max_epi16(alpha_max,a2);
-      alpha_max = _mm_max_epi16(alpha_max,a3);
-      alpha_max = _mm_max_epi16(alpha_max,a4);
-      alpha_max = _mm_max_epi16(alpha_max,a5);
-      alpha_max = _mm_max_epi16(alpha_max,a6);
-      alpha_max = _mm_max_epi16(alpha_max,a7);
+      a1 = simde_mm_load_si128(&alpha_ptr[1]);
+      a3 = simde_mm_load_si128(&alpha_ptr[3]);
+      a5 = simde_mm_load_si128(&alpha_ptr[5]);
+      a7 = simde_mm_load_si128(&alpha_ptr[7]);
+      m_b0 = simde_mm_adds_epi16(a1, *m11p); // m11
+      m_b4 = simde_mm_subs_epi16(a1, *m11p); // m00=-m11
+      m_b1 = simde_mm_subs_epi16(a3, *m10p); // m01=-m10
+      m_b5 = simde_mm_adds_epi16(a3, *m10p); // m10
+      m_b2 = simde_mm_adds_epi16(a5, *m10p); // m10
+      m_b6 = simde_mm_subs_epi16(a5, *m10p); // m01=-m10
+      m_b3 = simde_mm_subs_epi16(a7, *m11p); // m00=-m11
+      m_b7 = simde_mm_adds_epi16(a7, *m11p); // m11
+      a0 = simde_mm_load_si128(&alpha_ptr[0]);
+      a2 = simde_mm_load_si128(&alpha_ptr[2]);
+      a4 = simde_mm_load_si128(&alpha_ptr[4]);
+      a6 = simde_mm_load_si128(&alpha_ptr[6]);
+      new0 = simde_mm_subs_epi16(a0, *m11p); // m00=-m11
+      new4 = simde_mm_adds_epi16(a0, *m11p); // m11
+      new1 = simde_mm_adds_epi16(a2, *m10p); // m10
+      new5 = simde_mm_subs_epi16(a2, *m10p); // m01=-m10
+      new2 = simde_mm_subs_epi16(a4, *m10p); // m01=-m10
+      new6 = simde_mm_adds_epi16(a4, *m10p); // m10
+      new3 = simde_mm_adds_epi16(a6, *m11p); // m11
+      new7 = simde_mm_subs_epi16(a6, *m11p); // m00=-m11
+      a0 = simde_mm_max_epi16(m_b0, new0);
+      a1 = simde_mm_max_epi16(m_b1, new1);
+      a2 = simde_mm_max_epi16(m_b2, new2);
+      a3 = simde_mm_max_epi16(m_b3, new3);
+      a4 = simde_mm_max_epi16(m_b4, new4);
+      a5 = simde_mm_max_epi16(m_b5, new5);
+      a6 = simde_mm_max_epi16(m_b6, new6);
+      a7 = simde_mm_max_epi16(m_b7, new7);
+      alpha_max = simde_mm_max_epi16(a0, a1);
+      alpha_max = simde_mm_max_epi16(alpha_max, a2);
+      alpha_max = simde_mm_max_epi16(alpha_max, a3);
+      alpha_max = simde_mm_max_epi16(alpha_max, a4);
+      alpha_max = simde_mm_max_epi16(alpha_max, a5);
+      alpha_max = simde_mm_max_epi16(alpha_max, a6);
+      alpha_max = simde_mm_max_epi16(alpha_max, a7);
       alpha_ptr+=8;
       m11p++;
       m10p++;
-      alpha_ptr[0] = _mm_subs_epi16(a0,alpha_max);
-      alpha_ptr[1] = _mm_subs_epi16(a1,alpha_max);
-      alpha_ptr[2] = _mm_subs_epi16(a2,alpha_max);
-      alpha_ptr[3] = _mm_subs_epi16(a3,alpha_max);
-      alpha_ptr[4] = _mm_subs_epi16(a4,alpha_max);
-      alpha_ptr[5] = _mm_subs_epi16(a5,alpha_max);
-      alpha_ptr[6] = _mm_subs_epi16(a6,alpha_max);
-      alpha_ptr[7] = _mm_subs_epi16(a7,alpha_max);
+      alpha_ptr[0] = simde_mm_subs_epi16(a0, alpha_max);
+      alpha_ptr[1] = simde_mm_subs_epi16(a1, alpha_max);
+      alpha_ptr[2] = simde_mm_subs_epi16(a2, alpha_max);
+      alpha_ptr[3] = simde_mm_subs_epi16(a3, alpha_max);
+      alpha_ptr[4] = simde_mm_subs_epi16(a4, alpha_max);
+      alpha_ptr[5] = simde_mm_subs_epi16(a5, alpha_max);
+      alpha_ptr[6] = simde_mm_subs_epi16(a6, alpha_max);
+      alpha_ptr[7] = simde_mm_subs_epi16(a7, alpha_max);
     }
 
     /*
@@ -484,46 +484,46 @@ void compute_alpha(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned sho
     k+=8){
 
 
-      //      m11_0=((__m128i*)m_11)[k];
-      //      m10_0=((__m128i*)m_10)[k];
+      //      m11_0=((simde__m128i*)m_11)[k];
+      //      m10_0=((simde__m128i*)m_10)[k];
 
       // 0/7
-      a1=_mm_load_si128(&alpha_ptr[1]);
-      a3=_mm_load_si128(&alpha_ptr[3]);
-      a5=_mm_load_si128(&alpha_ptr[5]);
-      a7=_mm_load_si128(&alpha_ptr[7]);
-
-      m_b0 = _mm_adds_epi16(a1,*m11p);  // m11
-      m_b4 = _mm_subs_epi16(a1,*m11p);  // m00=-m11
-      m_b1 = _mm_subs_epi16(a3,*m10p);  // m01=-m10
-      m_b5 = _mm_adds_epi16(a3,*m10p);  // m10
-      m_b2 = _mm_adds_epi16(a5,*m10p);  // m10
-      m_b6 = _mm_subs_epi16(a5,*m10p);  // m01=-m10
-      m_b3 = _mm_subs_epi16(a7,*m11p);  // m00=-m11
-      m_b7 = _mm_adds_epi16(a7,*m11p);  // m11
-
-      a0=_mm_load_si128(&alpha_ptr[0]);
-      a2=_mm_load_si128(&alpha_ptr[2]);
-      a4=_mm_load_si128(&alpha_ptr[4]);
-      a6=_mm_load_si128(&alpha_ptr[6]);
-
-      new0 = _mm_subs_epi16(a0,*m11p);  // m00=-m11
-      new4 = _mm_adds_epi16(a0,*m11p);  // m11
-      new1 = _mm_adds_epi16(a2,*m10p);  // m10
-      new5 = _mm_subs_epi16(a2,*m10p);  // m01=-m10
-      new2 = _mm_subs_epi16(a4,*m10p);  // m01=-m10
-      new6 = _mm_adds_epi16(a4,*m10p);  // m10
-      new3 = _mm_adds_epi16(a6,*m11p);  // m11
-      new7 = _mm_subs_epi16(a6,*m11p);  // m00=-m11
-
-      a0 = _mm_max_epi16(m_b0,new0);
-      a1 = _mm_max_epi16(m_b1,new1);
-      a2 = _mm_max_epi16(m_b2,new2);
-      a3 = _mm_max_epi16(m_b3,new3);
-      a4 = _mm_max_epi16(m_b4,new4);
-      a5 = _mm_max_epi16(m_b5,new5);
-      a6 = _mm_max_epi16(m_b6,new6);
-      a7 = _mm_max_epi16(m_b7,new7);
+      a1=simde_mm_load_si128(&alpha_ptr[1]);
+      a3=simde_mm_load_si128(&alpha_ptr[3]);
+      a5=simde_mm_load_si128(&alpha_ptr[5]);
+      a7=simde_mm_load_si128(&alpha_ptr[7]);
+
+      m_b0 = simde_mm_adds_epi16(a1,*m11p);  // m11
+      m_b4 = simde_mm_subs_epi16(a1,*m11p);  // m00=-m11
+      m_b1 = simde_mm_subs_epi16(a3,*m10p);  // m01=-m10
+      m_b5 = simde_mm_adds_epi16(a3,*m10p);  // m10
+      m_b2 = simde_mm_adds_epi16(a5,*m10p);  // m10
+      m_b6 = simde_mm_subs_epi16(a5,*m10p);  // m01=-m10
+      m_b3 = simde_mm_subs_epi16(a7,*m11p);  // m00=-m11
+      m_b7 = simde_mm_adds_epi16(a7,*m11p);  // m11
+
+      a0=simde_mm_load_si128(&alpha_ptr[0]);
+      a2=simde_mm_load_si128(&alpha_ptr[2]);
+      a4=simde_mm_load_si128(&alpha_ptr[4]);
+      a6=simde_mm_load_si128(&alpha_ptr[6]);
+
+      new0 = simde_mm_subs_epi16(a0,*m11p);  // m00=-m11
+      new4 = simde_mm_adds_epi16(a0,*m11p);  // m11
+      new1 = simde_mm_adds_epi16(a2,*m10p);  // m10
+      new5 = simde_mm_subs_epi16(a2,*m10p);  // m01=-m10
+      new2 = simde_mm_subs_epi16(a4,*m10p);  // m01=-m10
+      new6 = simde_mm_adds_epi16(a4,*m10p);  // m10
+      new3 = simde_mm_adds_epi16(a6,*m11p);  // m11
+      new7 = simde_mm_subs_epi16(a6,*m11p);  // m00=-m11
+
+      a0 = simde_mm_max_epi16(m_b0,new0);
+      a1 = simde_mm_max_epi16(m_b1,new1);
+      a2 = simde_mm_max_epi16(m_b2,new2);
+      a3 = simde_mm_max_epi16(m_b3,new3);
+      a4 = simde_mm_max_epi16(m_b4,new4);
+      a5 = simde_mm_max_epi16(m_b5,new5);
+      a6 = simde_mm_max_epi16(m_b6,new6);
+      a7 = simde_mm_max_epi16(m_b7,new7);
 
       alpha_ptr += 8;
       m11p++;
@@ -538,42 +538,42 @@ void compute_alpha(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned sho
       alpha_ptr[7] = a7;
 
       // 1/7
-      a1=_mm_load_si128(&alpha_ptr[1]);
-      a3=_mm_load_si128(&alpha_ptr[3]);
-      a5=_mm_load_si128(&alpha_ptr[5]);
-      a7=_mm_load_si128(&alpha_ptr[7]);
-
-      m_b0 = _mm_adds_epi16(a1,*m11p);  // m11
-      m_b4 = _mm_subs_epi16(a1,*m11p);  // m00=-m11
-      m_b1 = _mm_subs_epi16(a3,*m10p);  // m01=-m10
-      m_b5 = _mm_adds_epi16(a3,*m10p);  // m10
-      m_b2 = _mm_adds_epi16(a5,*m10p);  // m10
-      m_b6 = _mm_subs_epi16(a5,*m10p);  // m01=-m10
-      m_b3 = _mm_subs_epi16(a7,*m11p);  // m00=-m11
-      m_b7 = _mm_adds_epi16(a7,*m11p);  // m11
-
-      a0=_mm_load_si128(&alpha_ptr[0]);
-      a2=_mm_load_si128(&alpha_ptr[2]);
-      a4=_mm_load_si128(&alpha_ptr[4]);
-      a6=_mm_load_si128(&alpha_ptr[6]);
-
-      new0 = _mm_subs_epi16(a0,*m11p);  // m00=-m11
-      new4 = _mm_adds_epi16(a0,*m11p);  // m11
-      new1 = _mm_adds_epi16(a2,*m10p);  // m10
-      new5 = _mm_subs_epi16(a2,*m10p);  // m01=-m10
-      new2 = _mm_subs_epi16(a4,*m10p);  // m01=-m10
-      new6 = _mm_adds_epi16(a4,*m10p);  // m10
-      new3 = _mm_adds_epi16(a6,*m11p);  // m11
-      new7 = _mm_subs_epi16(a6,*m11p);  // m00=-m11
-
-      a0 = _mm_max_epi16(m_b0,new0);
-      a1 = _mm_max_epi16(m_b1,new1);
-      a2 = _mm_max_epi16(m_b2,new2);
-      a3 = _mm_max_epi16(m_b3,new3);
-      a4 = _mm_max_epi16(m_b4,new4);
-      a5 = _mm_max_epi16(m_b5,new5);
-      a6 = _mm_max_epi16(m_b6,new6);
-      a7 = _mm_max_epi16(m_b7,new7);
+      a1=simde_mm_load_si128(&alpha_ptr[1]);
+      a3=simde_mm_load_si128(&alpha_ptr[3]);
+      a5=simde_mm_load_si128(&alpha_ptr[5]);
+      a7=simde_mm_load_si128(&alpha_ptr[7]);
+
+      m_b0 = simde_mm_adds_epi16(a1,*m11p);  // m11
+      m_b4 = simde_mm_subs_epi16(a1,*m11p);  // m00=-m11
+      m_b1 = simde_mm_subs_epi16(a3,*m10p);  // m01=-m10
+      m_b5 = simde_mm_adds_epi16(a3,*m10p);  // m10
+      m_b2 = simde_mm_adds_epi16(a5,*m10p);  // m10
+      m_b6 = simde_mm_subs_epi16(a5,*m10p);  // m01=-m10
+      m_b3 = simde_mm_subs_epi16(a7,*m11p);  // m00=-m11
+      m_b7 = simde_mm_adds_epi16(a7,*m11p);  // m11
+
+      a0=simde_mm_load_si128(&alpha_ptr[0]);
+      a2=simde_mm_load_si128(&alpha_ptr[2]);
+      a4=simde_mm_load_si128(&alpha_ptr[4]);
+      a6=simde_mm_load_si128(&alpha_ptr[6]);
+
+      new0 = simde_mm_subs_epi16(a0,*m11p);  // m00=-m11
+      new4 = simde_mm_adds_epi16(a0,*m11p);  // m11
+      new1 = simde_mm_adds_epi16(a2,*m10p);  // m10
+      new5 = simde_mm_subs_epi16(a2,*m10p);  // m01=-m10
+      new2 = simde_mm_subs_epi16(a4,*m10p);  // m01=-m10
+      new6 = simde_mm_adds_epi16(a4,*m10p);  // m10
+      new3 = simde_mm_adds_epi16(a6,*m11p);  // m11
+      new7 = simde_mm_subs_epi16(a6,*m11p);  // m00=-m11
+
+      a0 = simde_mm_max_epi16(m_b0,new0);
+      a1 = simde_mm_max_epi16(m_b1,new1);
+      a2 = simde_mm_max_epi16(m_b2,new2);
+      a3 = simde_mm_max_epi16(m_b3,new3);
+      a4 = simde_mm_max_epi16(m_b4,new4);
+      a5 = simde_mm_max_epi16(m_b5,new5);
+      a6 = simde_mm_max_epi16(m_b6,new6);
+      a7 = simde_mm_max_epi16(m_b7,new7);
 
       alpha_ptr += 8;
       m11p++;
@@ -588,42 +588,42 @@ void compute_alpha(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned sho
       alpha_ptr[7] = a7;
 
       // 2/7
-      a1=_mm_load_si128(&alpha_ptr[1]);
-      a3=_mm_load_si128(&alpha_ptr[3]);
-      a5=_mm_load_si128(&alpha_ptr[5]);
-      a7=_mm_load_si128(&alpha_ptr[7]);
-
-      m_b0 = _mm_adds_epi16(a1,*m11p);  // m11
-      m_b4 = _mm_subs_epi16(a1,*m11p);  // m00=-m11
-      m_b1 = _mm_subs_epi16(a3,*m10p);  // m01=-m10
-      m_b5 = _mm_adds_epi16(a3,*m10p);  // m10
-      m_b2 = _mm_adds_epi16(a5,*m10p);  // m10
-      m_b6 = _mm_subs_epi16(a5,*m10p);  // m01=-m10
-      m_b3 = _mm_subs_epi16(a7,*m11p);  // m00=-m11
-      m_b7 = _mm_adds_epi16(a7,*m11p);  // m11
-
-      a0=_mm_load_si128(&alpha_ptr[0]);
-      a2=_mm_load_si128(&alpha_ptr[2]);
-      a4=_mm_load_si128(&alpha_ptr[4]);
-      a6=_mm_load_si128(&alpha_ptr[6]);
-
-      new0 = _mm_subs_epi16(a0,*m11p);  // m00=-m11
-      new4 = _mm_adds_epi16(a0,*m11p);  // m11
-      new1 = _mm_adds_epi16(a2,*m10p);  // m10
-      new5 = _mm_subs_epi16(a2,*m10p);  // m01=-m10
-      new2 = _mm_subs_epi16(a4,*m10p);  // m01=-m10
-      new6 = _mm_adds_epi16(a4,*m10p);  // m10
-      new3 = _mm_adds_epi16(a6,*m11p);  // m11
-      new7 = _mm_subs_epi16(a6,*m11p);  // m00=-m11
-
-      a0 = _mm_max_epi16(m_b0,new0);
-      a1 = _mm_max_epi16(m_b1,new1);
-      a2 = _mm_max_epi16(m_b2,new2);
-      a3 = _mm_max_epi16(m_b3,new3);
-      a4 = _mm_max_epi16(m_b4,new4);
-      a5 = _mm_max_epi16(m_b5,new5);
-      a6 = _mm_max_epi16(m_b6,new6);
-      a7 = _mm_max_epi16(m_b7,new7);
+      a1=simde_mm_load_si128(&alpha_ptr[1]);
+      a3=simde_mm_load_si128(&alpha_ptr[3]);
+      a5=simde_mm_load_si128(&alpha_ptr[5]);
+      a7=simde_mm_load_si128(&alpha_ptr[7]);
+
+      m_b0 = simde_mm_adds_epi16(a1,*m11p);  // m11
+      m_b4 = simde_mm_subs_epi16(a1,*m11p);  // m00=-m11
+      m_b1 = simde_mm_subs_epi16(a3,*m10p);  // m01=-m10
+      m_b5 = simde_mm_adds_epi16(a3,*m10p);  // m10
+      m_b2 = simde_mm_adds_epi16(a5,*m10p);  // m10
+      m_b6 = simde_mm_subs_epi16(a5,*m10p);  // m01=-m10
+      m_b3 = simde_mm_subs_epi16(a7,*m11p);  // m00=-m11
+      m_b7 = simde_mm_adds_epi16(a7,*m11p);  // m11
+
+      a0=simde_mm_load_si128(&alpha_ptr[0]);
+      a2=simde_mm_load_si128(&alpha_ptr[2]);
+      a4=simde_mm_load_si128(&alpha_ptr[4]);
+      a6=simde_mm_load_si128(&alpha_ptr[6]);
+
+      new0 = simde_mm_subs_epi16(a0,*m11p);  // m00=-m11
+      new4 = simde_mm_adds_epi16(a0,*m11p);  // m11
+      new1 = simde_mm_adds_epi16(a2,*m10p);  // m10
+      new5 = simde_mm_subs_epi16(a2,*m10p);  // m01=-m10
+      new2 = simde_mm_subs_epi16(a4,*m10p);  // m01=-m10
+      new6 = simde_mm_adds_epi16(a4,*m10p);  // m10
+      new3 = simde_mm_adds_epi16(a6,*m11p);  // m11
+      new7 = simde_mm_subs_epi16(a6,*m11p);  // m00=-m11
+
+      a0 = simde_mm_max_epi16(m_b0,new0);
+      a1 = simde_mm_max_epi16(m_b1,new1);
+      a2 = simde_mm_max_epi16(m_b2,new2);
+      a3 = simde_mm_max_epi16(m_b3,new3);
+      a4 = simde_mm_max_epi16(m_b4,new4);
+      a5 = simde_mm_max_epi16(m_b5,new5);
+      a6 = simde_mm_max_epi16(m_b6,new6);
+      a7 = simde_mm_max_epi16(m_b7,new7);
 
       alpha_ptr += 8;
       m11p++;
@@ -638,42 +638,42 @@ void compute_alpha(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned sho
       alpha_ptr[7] = a7;
 
       // 3/7
-      a1=_mm_load_si128(&alpha_ptr[1]);
-      a3=_mm_load_si128(&alpha_ptr[3]);
-      a5=_mm_load_si128(&alpha_ptr[5]);
-      a7=_mm_load_si128(&alpha_ptr[7]);
-
-      m_b0 = _mm_adds_epi16(a1,*m11p);  // m11
-      m_b4 = _mm_subs_epi16(a1,*m11p);  // m00=-m11
-      m_b1 = _mm_subs_epi16(a3,*m10p);  // m01=-m10
-      m_b5 = _mm_adds_epi16(a3,*m10p);  // m10
-      m_b2 = _mm_adds_epi16(a5,*m10p);  // m10
-      m_b6 = _mm_subs_epi16(a5,*m10p);  // m01=-m10
-      m_b3 = _mm_subs_epi16(a7,*m11p);  // m00=-m11
-      m_b7 = _mm_adds_epi16(a7,*m11p);  // m11
-
-      a0=_mm_load_si128(&alpha_ptr[0]);
-      a2=_mm_load_si128(&alpha_ptr[2]);
-      a4=_mm_load_si128(&alpha_ptr[4]);
-      a6=_mm_load_si128(&alpha_ptr[6]);
-
-      new0 = _mm_subs_epi16(a0,*m11p);  // m00=-m11
-      new4 = _mm_adds_epi16(a0,*m11p);  // m11
-      new1 = _mm_adds_epi16(a2,*m10p);  // m10
-      new5 = _mm_subs_epi16(a2,*m10p);  // m01=-m10
-      new2 = _mm_subs_epi16(a4,*m10p);  // m01=-m10
-      new6 = _mm_adds_epi16(a4,*m10p);  // m10
-      new3 = _mm_adds_epi16(a6,*m11p);  // m11
-      new7 = _mm_subs_epi16(a6,*m11p);  // m00=-m11
-
-      a0 = _mm_max_epi16(m_b0,new0);
-      a1 = _mm_max_epi16(m_b1,new1);
-      a2 = _mm_max_epi16(m_b2,new2);
-      a3 = _mm_max_epi16(m_b3,new3);
-      a4 = _mm_max_epi16(m_b4,new4);
-      a5 = _mm_max_epi16(m_b5,new5);
-      a6 = _mm_max_epi16(m_b6,new6);
-      a7 = _mm_max_epi16(m_b7,new7);
+      a1=simde_mm_load_si128(&alpha_ptr[1]);
+      a3=simde_mm_load_si128(&alpha_ptr[3]);
+      a5=simde_mm_load_si128(&alpha_ptr[5]);
+      a7=simde_mm_load_si128(&alpha_ptr[7]);
+
+      m_b0 = simde_mm_adds_epi16(a1,*m11p);  // m11
+      m_b4 = simde_mm_subs_epi16(a1,*m11p);  // m00=-m11
+      m_b1 = simde_mm_subs_epi16(a3,*m10p);  // m01=-m10
+      m_b5 = simde_mm_adds_epi16(a3,*m10p);  // m10
+      m_b2 = simde_mm_adds_epi16(a5,*m10p);  // m10
+      m_b6 = simde_mm_subs_epi16(a5,*m10p);  // m01=-m10
+      m_b3 = simde_mm_subs_epi16(a7,*m11p);  // m00=-m11
+      m_b7 = simde_mm_adds_epi16(a7,*m11p);  // m11
+
+      a0=simde_mm_load_si128(&alpha_ptr[0]);
+      a2=simde_mm_load_si128(&alpha_ptr[2]);
+      a4=simde_mm_load_si128(&alpha_ptr[4]);
+      a6=simde_mm_load_si128(&alpha_ptr[6]);
+
+      new0 = simde_mm_subs_epi16(a0,*m11p);  // m00=-m11
+      new4 = simde_mm_adds_epi16(a0,*m11p);  // m11
+      new1 = simde_mm_adds_epi16(a2,*m10p);  // m10
+      new5 = simde_mm_subs_epi16(a2,*m10p);  // m01=-m10
+      new2 = simde_mm_subs_epi16(a4,*m10p);  // m01=-m10
+      new6 = simde_mm_adds_epi16(a4,*m10p);  // m10
+      new3 = simde_mm_adds_epi16(a6,*m11p);  // m11
+      new7 = simde_mm_subs_epi16(a6,*m11p);  // m00=-m11
+
+      a0 = simde_mm_max_epi16(m_b0,new0);
+      a1 = simde_mm_max_epi16(m_b1,new1);
+      a2 = simde_mm_max_epi16(m_b2,new2);
+      a3 = simde_mm_max_epi16(m_b3,new3);
+      a4 = simde_mm_max_epi16(m_b4,new4);
+      a5 = simde_mm_max_epi16(m_b5,new5);
+      a6 = simde_mm_max_epi16(m_b6,new6);
+      a7 = simde_mm_max_epi16(m_b7,new7);
 
       alpha_ptr += 8;
       m11p++;
@@ -688,42 +688,42 @@ void compute_alpha(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned sho
       alpha_ptr[7] = a7;
 
       // 4/7
-      a1=_mm_load_si128(&alpha_ptr[1]);
-      a3=_mm_load_si128(&alpha_ptr[3]);
-      a5=_mm_load_si128(&alpha_ptr[5]);
-      a7=_mm_load_si128(&alpha_ptr[7]);
-
-      m_b0 = _mm_adds_epi16(a1,*m11p);  // m11
-      m_b4 = _mm_subs_epi16(a1,*m11p);  // m00=-m11
-      m_b1 = _mm_subs_epi16(a3,*m10p);  // m01=-m10
-      m_b5 = _mm_adds_epi16(a3,*m10p);  // m10
-      m_b2 = _mm_adds_epi16(a5,*m10p);  // m10
-      m_b6 = _mm_subs_epi16(a5,*m10p);  // m01=-m10
-      m_b3 = _mm_subs_epi16(a7,*m11p);  // m00=-m11
-      m_b7 = _mm_adds_epi16(a7,*m11p);  // m11
-
-      a0=_mm_load_si128(&alpha_ptr[0]);
-      a2=_mm_load_si128(&alpha_ptr[2]);
-      a4=_mm_load_si128(&alpha_ptr[4]);
-      a6=_mm_load_si128(&alpha_ptr[6]);
-
-      new0 = _mm_subs_epi16(a0,*m11p);  // m00=-m11
-      new4 = _mm_adds_epi16(a0,*m11p);  // m11
-      new1 = _mm_adds_epi16(a2,*m10p);  // m10
-      new5 = _mm_subs_epi16(a2,*m10p);  // m01=-m10
-      new2 = _mm_subs_epi16(a4,*m10p);  // m01=-m10
-      new6 = _mm_adds_epi16(a4,*m10p);  // m10
-      new3 = _mm_adds_epi16(a6,*m11p);  // m11
-      new7 = _mm_subs_epi16(a6,*m11p);  // m00=-m11
-
-      a0 = _mm_max_epi16(m_b0,new0);
-      a1 = _mm_max_epi16(m_b1,new1);
-      a2 = _mm_max_epi16(m_b2,new2);
-      a3 = _mm_max_epi16(m_b3,new3);
-      a4 = _mm_max_epi16(m_b4,new4);
-      a5 = _mm_max_epi16(m_b5,new5);
-      a6 = _mm_max_epi16(m_b6,new6);
-      a7 = _mm_max_epi16(m_b7,new7);
+      a1=simde_mm_load_si128(&alpha_ptr[1]);
+      a3=simde_mm_load_si128(&alpha_ptr[3]);
+      a5=simde_mm_load_si128(&alpha_ptr[5]);
+      a7=simde_mm_load_si128(&alpha_ptr[7]);
+
+      m_b0 = simde_mm_adds_epi16(a1,*m11p);  // m11
+      m_b4 = simde_mm_subs_epi16(a1,*m11p);  // m00=-m11
+      m_b1 = simde_mm_subs_epi16(a3,*m10p);  // m01=-m10
+      m_b5 = simde_mm_adds_epi16(a3,*m10p);  // m10
+      m_b2 = simde_mm_adds_epi16(a5,*m10p);  // m10
+      m_b6 = simde_mm_subs_epi16(a5,*m10p);  // m01=-m10
+      m_b3 = simde_mm_subs_epi16(a7,*m11p);  // m00=-m11
+      m_b7 = simde_mm_adds_epi16(a7,*m11p);  // m11
+
+      a0=simde_mm_load_si128(&alpha_ptr[0]);
+      a2=simde_mm_load_si128(&alpha_ptr[2]);
+      a4=simde_mm_load_si128(&alpha_ptr[4]);
+      a6=simde_mm_load_si128(&alpha_ptr[6]);
+
+      new0 = simde_mm_subs_epi16(a0,*m11p);  // m00=-m11
+      new4 = simde_mm_adds_epi16(a0,*m11p);  // m11
+      new1 = simde_mm_adds_epi16(a2,*m10p);  // m10
+      new5 = simde_mm_subs_epi16(a2,*m10p);  // m01=-m10
+      new2 = simde_mm_subs_epi16(a4,*m10p);  // m01=-m10
+      new6 = simde_mm_adds_epi16(a4,*m10p);  // m10
+      new3 = simde_mm_adds_epi16(a6,*m11p);  // m11
+      new7 = simde_mm_subs_epi16(a6,*m11p);  // m00=-m11
+
+      a0 = simde_mm_max_epi16(m_b0,new0);
+      a1 = simde_mm_max_epi16(m_b1,new1);
+      a2 = simde_mm_max_epi16(m_b2,new2);
+      a3 = simde_mm_max_epi16(m_b3,new3);
+      a4 = simde_mm_max_epi16(m_b4,new4);
+      a5 = simde_mm_max_epi16(m_b5,new5);
+      a6 = simde_mm_max_epi16(m_b6,new6);
+      a7 = simde_mm_max_epi16(m_b7,new7);
 
       alpha_ptr += 8;
       m11p++;
@@ -738,42 +738,42 @@ void compute_alpha(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned sho
       alpha_ptr[7] = a7;
 
       // 5/7
-      a1=_mm_load_si128(&alpha_ptr[1]);
-      a3=_mm_load_si128(&alpha_ptr[3]);
-      a5=_mm_load_si128(&alpha_ptr[5]);
-      a7=_mm_load_si128(&alpha_ptr[7]);
-
-      m_b0 = _mm_adds_epi16(a1,*m11p);  // m11
-      m_b4 = _mm_subs_epi16(a1,*m11p);  // m00=-m11
-      m_b1 = _mm_subs_epi16(a3,*m10p);  // m01=-m10
-      m_b5 = _mm_adds_epi16(a3,*m10p);  // m10
-      m_b2 = _mm_adds_epi16(a5,*m10p);  // m10
-      m_b6 = _mm_subs_epi16(a5,*m10p);  // m01=-m10
-      m_b3 = _mm_subs_epi16(a7,*m11p);  // m00=-m11
-      m_b7 = _mm_adds_epi16(a7,*m11p);  // m11
-
-      a0=_mm_load_si128(&alpha_ptr[0]);
-      a2=_mm_load_si128(&alpha_ptr[2]);
-      a4=_mm_load_si128(&alpha_ptr[4]);
-      a6=_mm_load_si128(&alpha_ptr[6]);
-
-      new0 = _mm_subs_epi16(a0,*m11p);  // m00=-m11
-      new4 = _mm_adds_epi16(a0,*m11p);  // m11
-      new1 = _mm_adds_epi16(a2,*m10p);  // m10
-      new5 = _mm_subs_epi16(a2,*m10p);  // m01=-m10
-      new2 = _mm_subs_epi16(a4,*m10p);  // m01=-m10
-      new6 = _mm_adds_epi16(a4,*m10p);  // m10
-      new3 = _mm_adds_epi16(a6,*m11p);  // m11
-      new7 = _mm_subs_epi16(a6,*m11p);  // m00=-m11
-
-      a0 = _mm_max_epi16(m_b0,new0);
-      a1 = _mm_max_epi16(m_b1,new1);
-      a2 = _mm_max_epi16(m_b2,new2);
-      a3 = _mm_max_epi16(m_b3,new3);
-      a4 = _mm_max_epi16(m_b4,new4);
-      a5 = _mm_max_epi16(m_b5,new5);
-      a6 = _mm_max_epi16(m_b6,new6);
-      a7 = _mm_max_epi16(m_b7,new7);
+      a1=simde_mm_load_si128(&alpha_ptr[1]);
+      a3=simde_mm_load_si128(&alpha_ptr[3]);
+      a5=simde_mm_load_si128(&alpha_ptr[5]);
+      a7=simde_mm_load_si128(&alpha_ptr[7]);
+
+      m_b0 = simde_mm_adds_epi16(a1,*m11p);  // m11
+      m_b4 = simde_mm_subs_epi16(a1,*m11p);  // m00=-m11
+      m_b1 = simde_mm_subs_epi16(a3,*m10p);  // m01=-m10
+      m_b5 = simde_mm_adds_epi16(a3,*m10p);  // m10
+      m_b2 = simde_mm_adds_epi16(a5,*m10p);  // m10
+      m_b6 = simde_mm_subs_epi16(a5,*m10p);  // m01=-m10
+      m_b3 = simde_mm_subs_epi16(a7,*m11p);  // m00=-m11
+      m_b7 = simde_mm_adds_epi16(a7,*m11p);  // m11
+
+      a0=simde_mm_load_si128(&alpha_ptr[0]);
+      a2=simde_mm_load_si128(&alpha_ptr[2]);
+      a4=simde_mm_load_si128(&alpha_ptr[4]);
+      a6=simde_mm_load_si128(&alpha_ptr[6]);
+
+      new0 = simde_mm_subs_epi16(a0,*m11p);  // m00=-m11
+      new4 = simde_mm_adds_epi16(a0,*m11p);  // m11
+      new1 = simde_mm_adds_epi16(a2,*m10p);  // m10
+      new5 = simde_mm_subs_epi16(a2,*m10p);  // m01=-m10
+      new2 = simde_mm_subs_epi16(a4,*m10p);  // m01=-m10
+      new6 = simde_mm_adds_epi16(a4,*m10p);  // m10
+      new3 = simde_mm_adds_epi16(a6,*m11p);  // m11
+      new7 = simde_mm_subs_epi16(a6,*m11p);  // m00=-m11
+
+      a0 = simde_mm_max_epi16(m_b0,new0);
+      a1 = simde_mm_max_epi16(m_b1,new1);
+      a2 = simde_mm_max_epi16(m_b2,new2);
+      a3 = simde_mm_max_epi16(m_b3,new3);
+      a4 = simde_mm_max_epi16(m_b4,new4);
+      a5 = simde_mm_max_epi16(m_b5,new5);
+      a6 = simde_mm_max_epi16(m_b6,new6);
+      a7 = simde_mm_max_epi16(m_b7,new7);
 
       alpha_ptr += 8;
       m11p++;
@@ -788,42 +788,42 @@ void compute_alpha(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned sho
       alpha_ptr[7] = a7;
 
       // 6/7
-      a1=_mm_load_si128(&alpha_ptr[1]);
-      a3=_mm_load_si128(&alpha_ptr[3]);
-      a5=_mm_load_si128(&alpha_ptr[5]);
-      a7=_mm_load_si128(&alpha_ptr[7]);
-
-      m_b0 = _mm_adds_epi16(a1,*m11p);  // m11
-      m_b4 = _mm_subs_epi16(a1,*m11p);  // m00=-m11
-      m_b1 = _mm_subs_epi16(a3,*m10p);  // m01=-m10
-      m_b5 = _mm_adds_epi16(a3,*m10p);  // m10
-      m_b2 = _mm_adds_epi16(a5,*m10p);  // m10
-      m_b6 = _mm_subs_epi16(a5,*m10p);  // m01=-m10
-      m_b3 = _mm_subs_epi16(a7,*m11p);  // m00=-m11
-      m_b7 = _mm_adds_epi16(a7,*m11p);  // m11
-
-      a0=_mm_load_si128(&alpha_ptr[0]);
-      a2=_mm_load_si128(&alpha_ptr[2]);
-      a4=_mm_load_si128(&alpha_ptr[4]);
-      a6=_mm_load_si128(&alpha_ptr[6]);
-
-      new0 = _mm_subs_epi16(a0,*m11p);  // m00=-m11
-      new4 = _mm_adds_epi16(a0,*m11p);  // m11
-      new1 = _mm_adds_epi16(a2,*m10p);  // m10
-      new5 = _mm_subs_epi16(a2,*m10p);  // m01=-m10
-      new2 = _mm_subs_epi16(a4,*m10p);  // m01=-m10
-      new6 = _mm_adds_epi16(a4,*m10p);  // m10
-      new3 = _mm_adds_epi16(a6,*m11p);  // m11
-      new7 = _mm_subs_epi16(a6,*m11p);  // m00=-m11
-
-      a0 = _mm_max_epi16(m_b0,new0);
-      a1 = _mm_max_epi16(m_b1,new1);
-      a2 = _mm_max_epi16(m_b2,new2);
-      a3 = _mm_max_epi16(m_b3,new3);
-      a4 = _mm_max_epi16(m_b4,new4);
-      a5 = _mm_max_epi16(m_b5,new5);
-      a6 = _mm_max_epi16(m_b6,new6);
-      a7 = _mm_max_epi16(m_b7,new7);
+      a1=simde_mm_load_si128(&alpha_ptr[1]);
+      a3=simde_mm_load_si128(&alpha_ptr[3]);
+      a5=simde_mm_load_si128(&alpha_ptr[5]);
+      a7=simde_mm_load_si128(&alpha_ptr[7]);
+
+      m_b0 = simde_mm_adds_epi16(a1,*m11p);  // m11
+      m_b4 = simde_mm_subs_epi16(a1,*m11p);  // m00=-m11
+      m_b1 = simde_mm_subs_epi16(a3,*m10p);  // m01=-m10
+      m_b5 = simde_mm_adds_epi16(a3,*m10p);  // m10
+      m_b2 = simde_mm_adds_epi16(a5,*m10p);  // m10
+      m_b6 = simde_mm_subs_epi16(a5,*m10p);  // m01=-m10
+      m_b3 = simde_mm_subs_epi16(a7,*m11p);  // m00=-m11
+      m_b7 = simde_mm_adds_epi16(a7,*m11p);  // m11
+
+      a0=simde_mm_load_si128(&alpha_ptr[0]);
+      a2=simde_mm_load_si128(&alpha_ptr[2]);
+      a4=simde_mm_load_si128(&alpha_ptr[4]);
+      a6=simde_mm_load_si128(&alpha_ptr[6]);
+
+      new0 = simde_mm_subs_epi16(a0,*m11p);  // m00=-m11
+      new4 = simde_mm_adds_epi16(a0,*m11p);  // m11
+      new1 = simde_mm_adds_epi16(a2,*m10p);  // m10
+      new5 = simde_mm_subs_epi16(a2,*m10p);  // m01=-m10
+      new2 = simde_mm_subs_epi16(a4,*m10p);  // m01=-m10
+      new6 = simde_mm_adds_epi16(a4,*m10p);  // m10
+      new3 = simde_mm_adds_epi16(a6,*m11p);  // m11
+      new7 = simde_mm_subs_epi16(a6,*m11p);  // m00=-m11
+
+      a0 = simde_mm_max_epi16(m_b0,new0);
+      a1 = simde_mm_max_epi16(m_b1,new1);
+      a2 = simde_mm_max_epi16(m_b2,new2);
+      a3 = simde_mm_max_epi16(m_b3,new3);
+      a4 = simde_mm_max_epi16(m_b4,new4);
+      a5 = simde_mm_max_epi16(m_b5,new5);
+      a6 = simde_mm_max_epi16(m_b6,new6);
+      a7 = simde_mm_max_epi16(m_b7,new7);
 
       alpha_ptr += 8;
       m11p++;
@@ -838,61 +838,61 @@ void compute_alpha(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned sho
       alpha_ptr[7] = a7;
 
     // 7/7
-      a1=_mm_load_si128(&alpha_ptr[1]);
-      a3=_mm_load_si128(&alpha_ptr[3]);
-      a5=_mm_load_si128(&alpha_ptr[5]);
-      a7=_mm_load_si128(&alpha_ptr[7]);
-
-      m_b0 = _mm_adds_epi16(a1,*m11p);  // m11
-      m_b4 = _mm_subs_epi16(a1,*m11p);  // m00=-m11
-      m_b1 = _mm_subs_epi16(a3,*m10p);  // m01=-m10
-      m_b5 = _mm_adds_epi16(a3,*m10p);  // m10
-      m_b2 = _mm_adds_epi16(a5,*m10p);  // m10
-      m_b6 = _mm_subs_epi16(a5,*m10p);  // m01=-m10
-      m_b3 = _mm_subs_epi16(a7,*m11p);  // m00=-m11
-      m_b7 = _mm_adds_epi16(a7,*m11p);  // m11
-
-      a0=_mm_load_si128(&alpha_ptr[0]);
-      a2=_mm_load_si128(&alpha_ptr[2]);
-      a4=_mm_load_si128(&alpha_ptr[4]);
-      a6=_mm_load_si128(&alpha_ptr[6]);
-
-      new0 = _mm_subs_epi16(a0,*m11p);  // m00=-m11
-      new4 = _mm_adds_epi16(a0,*m11p);  // m11
-      new1 = _mm_adds_epi16(a2,*m10p);  // m10
-      new5 = _mm_subs_epi16(a2,*m10p);  // m01=-m10
-      new2 = _mm_subs_epi16(a4,*m10p);  // m01=-m10
-      new6 = _mm_adds_epi16(a4,*m10p);  // m10
-      new3 = _mm_adds_epi16(a6,*m11p);  // m11
-      new7 = _mm_subs_epi16(a6,*m11p);  // m00=-m11
-
-      a0 = _mm_max_epi16(m_b0,new0);
-      a1 = _mm_max_epi16(m_b1,new1);
-      a2 = _mm_max_epi16(m_b2,new2);
-      a3 = _mm_max_epi16(m_b3,new3);
-      a4 = _mm_max_epi16(m_b4,new4);
-      a5 = _mm_max_epi16(m_b5,new5);
-      a6 = _mm_max_epi16(m_b6,new6);
-      a7 = _mm_max_epi16(m_b7,new7);
+      a1=simde_mm_load_si128(&alpha_ptr[1]);
+      a3=simde_mm_load_si128(&alpha_ptr[3]);
+      a5=simde_mm_load_si128(&alpha_ptr[5]);
+      a7=simde_mm_load_si128(&alpha_ptr[7]);
+
+      m_b0 = simde_mm_adds_epi16(a1,*m11p);  // m11
+      m_b4 = simde_mm_subs_epi16(a1,*m11p);  // m00=-m11
+      m_b1 = simde_mm_subs_epi16(a3,*m10p);  // m01=-m10
+      m_b5 = simde_mm_adds_epi16(a3,*m10p);  // m10
+      m_b2 = simde_mm_adds_epi16(a5,*m10p);  // m10
+      m_b6 = simde_mm_subs_epi16(a5,*m10p);  // m01=-m10
+      m_b3 = simde_mm_subs_epi16(a7,*m11p);  // m00=-m11
+      m_b7 = simde_mm_adds_epi16(a7,*m11p);  // m11
+
+      a0=simde_mm_load_si128(&alpha_ptr[0]);
+      a2=simde_mm_load_si128(&alpha_ptr[2]);
+      a4=simde_mm_load_si128(&alpha_ptr[4]);
+      a6=simde_mm_load_si128(&alpha_ptr[6]);
+
+      new0 = simde_mm_subs_epi16(a0,*m11p);  // m00=-m11
+      new4 = simde_mm_adds_epi16(a0,*m11p);  // m11
+      new1 = simde_mm_adds_epi16(a2,*m10p);  // m10
+      new5 = simde_mm_subs_epi16(a2,*m10p);  // m01=-m10
+      new2 = simde_mm_subs_epi16(a4,*m10p);  // m01=-m10
+      new6 = simde_mm_adds_epi16(a4,*m10p);  // m10
+      new3 = simde_mm_adds_epi16(a6,*m11p);  // m11
+      new7 = simde_mm_subs_epi16(a6,*m11p);  // m00=-m11
+
+      a0 = simde_mm_max_epi16(m_b0,new0);
+      a1 = simde_mm_max_epi16(m_b1,new1);
+      a2 = simde_mm_max_epi16(m_b2,new2);
+      a3 = simde_mm_max_epi16(m_b3,new3);
+      a4 = simde_mm_max_epi16(m_b4,new4);
+      a5 = simde_mm_max_epi16(m_b5,new5);
+      a6 = simde_mm_max_epi16(m_b6,new6);
+      a7 = simde_mm_max_epi16(m_b7,new7);
     // compute and subtract maxima
-      alpha_max = _mm_max_epi16(a0,a1);
-      alpha_max = _mm_max_epi16(alpha_max,a2);
-      alpha_max = _mm_max_epi16(alpha_max,a3);
-      alpha_max = _mm_max_epi16(alpha_max,a4);
-      alpha_max = _mm_max_epi16(alpha_max,a5);
-      alpha_max = _mm_max_epi16(alpha_max,a6);
-      alpha_max = _mm_max_epi16(alpha_max,a7);
+      alpha_max = simde_mm_max_epi16(a0,a1);
+      alpha_max = simde_mm_max_epi16(alpha_max,a2);
+      alpha_max = simde_mm_max_epi16(alpha_max,a3);
+      alpha_max = simde_mm_max_epi16(alpha_max,a4);
+      alpha_max = simde_mm_max_epi16(alpha_max,a5);
+      alpha_max = simde_mm_max_epi16(alpha_max,a6);
+      alpha_max = simde_mm_max_epi16(alpha_max,a7);
       alpha_ptr += 8;
       m11p++;
       m10p++;
-      alpha_ptr[0] = _mm_subs_epi16(a0,alpha_max);
-      alpha_ptr[1] = _mm_subs_epi16(a1,alpha_max);
-      alpha_ptr[2] = _mm_subs_epi16(a2,alpha_max);
-      alpha_ptr[3] = _mm_subs_epi16(a3,alpha_max);
-      alpha_ptr[4] = _mm_subs_epi16(a4,alpha_max);
-      alpha_ptr[5] = _mm_subs_epi16(a5,alpha_max);
-      alpha_ptr[6] = _mm_subs_epi16(a6,alpha_max);
-      alpha_ptr[7] = _mm_subs_epi16(a7,alpha_max);
+      alpha_ptr[0] = simde_mm_subs_epi16(a0,alpha_max);
+      alpha_ptr[1] = simde_mm_subs_epi16(a1,alpha_max);
+      alpha_ptr[2] = simde_mm_subs_epi16(a2,alpha_max);
+      alpha_ptr[3] = simde_mm_subs_epi16(a3,alpha_max);
+      alpha_ptr[4] = simde_mm_subs_epi16(a4,alpha_max);
+      alpha_ptr[5] = simde_mm_subs_epi16(a5,alpha_max);
+      alpha_ptr[6] = simde_mm_subs_epi16(a6,alpha_max);
+      alpha_ptr[7] = simde_mm_subs_epi16(a7,alpha_max);
     }
     // fill in remainder
     for (k=0;
@@ -900,42 +900,42 @@ void compute_alpha(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned sho
     k++){
 
 
-      a1=_mm_load_si128(&alpha_ptr[1]);
-      a3=_mm_load_si128(&alpha_ptr[3]);
-      a5=_mm_load_si128(&alpha_ptr[5]);
-      a7=_mm_load_si128(&alpha_ptr[7]);
-
-      m_b0 = _mm_adds_epi16(a1,*m11p);  // m11
-      m_b4 = _mm_subs_epi16(a1,*m11p);  // m00=-m11
-      m_b1 = _mm_subs_epi16(a3,*m10p);  // m01=-m10
-      m_b5 = _mm_adds_epi16(a3,*m10p);  // m10
-      m_b2 = _mm_adds_epi16(a5,*m10p);  // m10
-      m_b6 = _mm_subs_epi16(a5,*m10p);  // m01=-m10
-      m_b3 = _mm_subs_epi16(a7,*m11p);  // m00=-m11
-      m_b7 = _mm_adds_epi16(a7,*m11p);  // m11
-
-      a0=_mm_load_si128(&alpha_ptr[0]);
-      a2=_mm_load_si128(&alpha_ptr[2]);
-      a4=_mm_load_si128(&alpha_ptr[4]);
-      a6=_mm_load_si128(&alpha_ptr[6]);
-
-      new0 = _mm_subs_epi16(a0,*m11p);  // m00=-m11
-      new4 = _mm_adds_epi16(a0,*m11p);  // m11
-      new1 = _mm_adds_epi16(a2,*m10p);  // m10
-      new5 = _mm_subs_epi16(a2,*m10p);  // m01=-m10
-      new2 = _mm_subs_epi16(a4,*m10p);  // m01=-m10
-      new6 = _mm_adds_epi16(a4,*m10p);  // m10
-      new3 = _mm_adds_epi16(a6,*m11p);  // m11
-      new7 = _mm_subs_epi16(a6,*m11p);  // m00=-m11
-
-      a0 = _mm_max_epi16(m_b0,new0);
-      a1 = _mm_max_epi16(m_b1,new1);
-      a2 = _mm_max_epi16(m_b2,new2);
-      a3 = _mm_max_epi16(m_b3,new3);
-      a4 = _mm_max_epi16(m_b4,new4);
-      a5 = _mm_max_epi16(m_b5,new5);
-      a6 = _mm_max_epi16(m_b6,new6);
-      a7 = _mm_max_epi16(m_b7,new7);
+      a1=simde_mm_load_si128(&alpha_ptr[1]);
+      a3=simde_mm_load_si128(&alpha_ptr[3]);
+      a5=simde_mm_load_si128(&alpha_ptr[5]);
+      a7=simde_mm_load_si128(&alpha_ptr[7]);
+
+      m_b0 = simde_mm_adds_epi16(a1,*m11p);  // m11
+      m_b4 = simde_mm_subs_epi16(a1,*m11p);  // m00=-m11
+      m_b1 = simde_mm_subs_epi16(a3,*m10p);  // m01=-m10
+      m_b5 = simde_mm_adds_epi16(a3,*m10p);  // m10
+      m_b2 = simde_mm_adds_epi16(a5,*m10p);  // m10
+      m_b6 = simde_mm_subs_epi16(a5,*m10p);  // m01=-m10
+      m_b3 = simde_mm_subs_epi16(a7,*m11p);  // m00=-m11
+      m_b7 = simde_mm_adds_epi16(a7,*m11p);  // m11
+
+      a0=simde_mm_load_si128(&alpha_ptr[0]);
+      a2=simde_mm_load_si128(&alpha_ptr[2]);
+      a4=simde_mm_load_si128(&alpha_ptr[4]);
+      a6=simde_mm_load_si128(&alpha_ptr[6]);
+
+      new0 = simde_mm_subs_epi16(a0,*m11p);  // m00=-m11
+      new4 = simde_mm_adds_epi16(a0,*m11p);  // m11
+      new1 = simde_mm_adds_epi16(a2,*m10p);  // m10
+      new5 = simde_mm_subs_epi16(a2,*m10p);  // m01=-m10
+      new2 = simde_mm_subs_epi16(a4,*m10p);  // m01=-m10
+      new6 = simde_mm_adds_epi16(a4,*m10p);  // m10
+      new3 = simde_mm_adds_epi16(a6,*m11p);  // m11
+      new7 = simde_mm_subs_epi16(a6,*m11p);  // m00=-m11
+
+      a0 = simde_mm_max_epi16(m_b0,new0);
+      a1 = simde_mm_max_epi16(m_b1,new1);
+      a2 = simde_mm_max_epi16(m_b2,new2);
+      a3 = simde_mm_max_epi16(m_b3,new3);
+      a4 = simde_mm_max_epi16(m_b4,new4);
+      a5 = simde_mm_max_epi16(m_b5,new5);
+      a6 = simde_mm_max_epi16(m_b6,new6);
+      a7 = simde_mm_max_epi16(m_b7,new7);
 
       alpha_ptr += 8;
       m11p++;
@@ -953,23 +953,143 @@ void compute_alpha(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned sho
 #else
 
     if (rerun_flag == 0) {
-      alpha128[0] = _mm_set_epi8(-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,0);
-      alpha128[1] = _mm_set_epi8(-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2);
-      alpha128[2] = _mm_set_epi8(-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2);
-      alpha128[3] = _mm_set_epi8(-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2);
-      alpha128[4] = _mm_set_epi8(-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2);
-      alpha128[5] = _mm_set_epi8(-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2);
-      alpha128[6] = _mm_set_epi8(-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2);
-      alpha128[7] = _mm_set_epi8(-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2);
+      alpha128[0] = simde_mm_set_epi8(-MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      0);
+      alpha128[1] = simde_mm_set_epi8(-MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2);
+      alpha128[2] = simde_mm_set_epi8(-MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2);
+      alpha128[3] = simde_mm_set_epi8(-MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2);
+      alpha128[4] = simde_mm_set_epi8(-MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2);
+      alpha128[5] = simde_mm_set_epi8(-MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2);
+      alpha128[6] = simde_mm_set_epi8(-MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2);
+      alpha128[7] = simde_mm_set_epi8(-MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2,
+                                      -MAX / 2);
     } else {
-      alpha128[0] = _mm_slli_si128(alpha128[(K1<<3)],1);
-      alpha128[1] = _mm_slli_si128(alpha128[1+(K1<<3)],1);
-      alpha128[2] = _mm_slli_si128(alpha128[2+(K1<<3)],1);
-      alpha128[3] = _mm_slli_si128(alpha128[3+(K1<<3)],1);
-      alpha128[4] = _mm_slli_si128(alpha128[4+(K1<<3)],1);
-      alpha128[5] = _mm_slli_si128(alpha128[5+(K1<<3)],1);
-      alpha128[6] = _mm_slli_si128(alpha128[6+(K1<<3)],1);
-      alpha128[7] = _mm_slli_si128(alpha128[7+(K1<<3)],1);
+      alpha128[0] = simde_mm_slli_si128(alpha128[(K1 << 3)], 1);
+      alpha128[1] = simde_mm_slli_si128(alpha128[1 + (K1 << 3)], 1);
+      alpha128[2] = simde_mm_slli_si128(alpha128[2 + (K1 << 3)], 1);
+      alpha128[3] = simde_mm_slli_si128(alpha128[3 + (K1 << 3)], 1);
+      alpha128[4] = simde_mm_slli_si128(alpha128[4 + (K1 << 3)], 1);
+      alpha128[5] = simde_mm_slli_si128(alpha128[5 + (K1 << 3)], 1);
+      alpha128[6] = simde_mm_slli_si128(alpha128[6 + (K1 << 3)], 1);
+      alpha128[7] = simde_mm_slli_si128(alpha128[7 + (K1 << 3)], 1);
       alpha[16] =  -MAX/2;
       alpha[32] = -MAX/2;
       alpha[48] = -MAX/2;
@@ -992,55 +1112,55 @@ void compute_alpha(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned sho
     print_bytes("a6:",&alpha_ptr[6]);
     print_bytes("a7:",&alpha_ptr[7]);
     */
-    m11p = (__m128i *)m_11;
-    m10p = (__m128i *)m_10;
+    m11p = (simde__m128i *)m_11;
+    m10p = (simde__m128i *)m_10;
 
     for (k=0;
          k<l;
          k++) {
-      m_b0 = _mm_adds_epi8(alpha_ptr[1],*m11p);  // m11
-      m_b4 = _mm_subs_epi8(alpha_ptr[1],*m11p);  // m00=-m11
-      m_b1 = _mm_subs_epi8(alpha_ptr[3],*m10p);  // m01=-m10
-      m_b5 = _mm_adds_epi8(alpha_ptr[3],*m10p);  // m10
-      m_b2 = _mm_adds_epi8(alpha_ptr[5],*m10p);  // m10
-      m_b6 = _mm_subs_epi8(alpha_ptr[5],*m10p);  // m01=-m10
-      m_b3 = _mm_subs_epi8(alpha_ptr[7],*m11p);  // m00=-m11
-      m_b7 = _mm_adds_epi8(alpha_ptr[7],*m11p);  // m11
-      new0 = _mm_subs_epi8(alpha_ptr[0],*m11p);  // m00=-m11
-      new4 = _mm_adds_epi8(alpha_ptr[0],*m11p);  // m11
-      new1 = _mm_adds_epi8(alpha_ptr[2],*m10p);  // m10
-      new5 = _mm_subs_epi8(alpha_ptr[2],*m10p);  // m01=-m10
-      new2 = _mm_subs_epi8(alpha_ptr[4],*m10p);  // m01=-m10
-      new6 = _mm_adds_epi8(alpha_ptr[4],*m10p);  // m10
-      new3 = _mm_adds_epi8(alpha_ptr[6],*m11p);  // m11
-      new7 = _mm_subs_epi8(alpha_ptr[6],*m11p);  // m00=-m11
+      m_b0 = simde_mm_adds_epi8(alpha_ptr[1], *m11p); // m11
+      m_b4 = simde_mm_subs_epi8(alpha_ptr[1], *m11p); // m00=-m11
+      m_b1 = simde_mm_subs_epi8(alpha_ptr[3], *m10p); // m01=-m10
+      m_b5 = simde_mm_adds_epi8(alpha_ptr[3], *m10p); // m10
+      m_b2 = simde_mm_adds_epi8(alpha_ptr[5], *m10p); // m10
+      m_b6 = simde_mm_subs_epi8(alpha_ptr[5], *m10p); // m01=-m10
+      m_b3 = simde_mm_subs_epi8(alpha_ptr[7], *m11p); // m00=-m11
+      m_b7 = simde_mm_adds_epi8(alpha_ptr[7], *m11p); // m11
+      new0 = simde_mm_subs_epi8(alpha_ptr[0], *m11p); // m00=-m11
+      new4 = simde_mm_adds_epi8(alpha_ptr[0], *m11p); // m11
+      new1 = simde_mm_adds_epi8(alpha_ptr[2], *m10p); // m10
+      new5 = simde_mm_subs_epi8(alpha_ptr[2], *m10p); // m01=-m10
+      new2 = simde_mm_subs_epi8(alpha_ptr[4], *m10p); // m01=-m10
+      new6 = simde_mm_adds_epi8(alpha_ptr[4], *m10p); // m10
+      new3 = simde_mm_adds_epi8(alpha_ptr[6], *m11p); // m11
+      new7 = simde_mm_subs_epi8(alpha_ptr[6], *m11p); // m00=-m11
       alpha_ptr += 8;
       m11p++;
       m10p++;
-      alpha_ptr[0] = _mm_max_epi8(m_b0,new0);
-      alpha_ptr[1] = _mm_max_epi8(m_b1,new1);
-      alpha_ptr[2] = _mm_max_epi8(m_b2,new2);
-      alpha_ptr[3] = _mm_max_epi8(m_b3,new3);
-      alpha_ptr[4] = _mm_max_epi8(m_b4,new4);
-      alpha_ptr[5] = _mm_max_epi8(m_b5,new5);
-      alpha_ptr[6] = _mm_max_epi8(m_b6,new6);
-      alpha_ptr[7] = _mm_max_epi8(m_b7,new7);
+      alpha_ptr[0] = simde_mm_max_epi8(m_b0, new0);
+      alpha_ptr[1] = simde_mm_max_epi8(m_b1, new1);
+      alpha_ptr[2] = simde_mm_max_epi8(m_b2, new2);
+      alpha_ptr[3] = simde_mm_max_epi8(m_b3, new3);
+      alpha_ptr[4] = simde_mm_max_epi8(m_b4, new4);
+      alpha_ptr[5] = simde_mm_max_epi8(m_b5, new5);
+      alpha_ptr[6] = simde_mm_max_epi8(m_b6, new6);
+      alpha_ptr[7] = simde_mm_max_epi8(m_b7, new7);
       // compute and subtract maxima
-      alpha_max = _mm_max_epi8(alpha_ptr[0],alpha_ptr[1]);
-      alpha_max = _mm_max_epi8(alpha_max,alpha_ptr[2]);
-      alpha_max = _mm_max_epi8(alpha_max,alpha_ptr[3]);
-      alpha_max = _mm_max_epi8(alpha_max,alpha_ptr[4]);
-      alpha_max = _mm_max_epi8(alpha_max,alpha_ptr[5]);
-      alpha_max = _mm_max_epi8(alpha_max,alpha_ptr[6]);
-      alpha_max = _mm_max_epi8(alpha_max,alpha_ptr[7]);
-      alpha_ptr[0] = _mm_subs_epi8(alpha_ptr[0],alpha_max);
-      alpha_ptr[1] = _mm_subs_epi8(alpha_ptr[1],alpha_max);
-      alpha_ptr[2] = _mm_subs_epi8(alpha_ptr[2],alpha_max);
-      alpha_ptr[3] = _mm_subs_epi8(alpha_ptr[3],alpha_max);
-      alpha_ptr[4] = _mm_subs_epi8(alpha_ptr[4],alpha_max);
-      alpha_ptr[5] = _mm_subs_epi8(alpha_ptr[5],alpha_max);
-      alpha_ptr[6] = _mm_subs_epi8(alpha_ptr[6],alpha_max);
-      alpha_ptr[7] = _mm_subs_epi8(alpha_ptr[7],alpha_max);
+      alpha_max = simde_mm_max_epi8(alpha_ptr[0], alpha_ptr[1]);
+      alpha_max = simde_mm_max_epi8(alpha_max, alpha_ptr[2]);
+      alpha_max = simde_mm_max_epi8(alpha_max, alpha_ptr[3]);
+      alpha_max = simde_mm_max_epi8(alpha_max, alpha_ptr[4]);
+      alpha_max = simde_mm_max_epi8(alpha_max, alpha_ptr[5]);
+      alpha_max = simde_mm_max_epi8(alpha_max, alpha_ptr[6]);
+      alpha_max = simde_mm_max_epi8(alpha_max, alpha_ptr[7]);
+      alpha_ptr[0] = simde_mm_subs_epi8(alpha_ptr[0], alpha_max);
+      alpha_ptr[1] = simde_mm_subs_epi8(alpha_ptr[1], alpha_max);
+      alpha_ptr[2] = simde_mm_subs_epi8(alpha_ptr[2], alpha_max);
+      alpha_ptr[3] = simde_mm_subs_epi8(alpha_ptr[3], alpha_max);
+      alpha_ptr[4] = simde_mm_subs_epi8(alpha_ptr[4], alpha_max);
+      alpha_ptr[5] = simde_mm_subs_epi8(alpha_ptr[5], alpha_max);
+      alpha_ptr[6] = simde_mm_subs_epi8(alpha_ptr[6], alpha_max);
+      alpha_ptr[7] = simde_mm_subs_epi8(alpha_ptr[7], alpha_max);
       /*
       printf("alpha k %d (%d) (%p)\n",k+1,(k+1)<<4,alpha_ptr);
 
@@ -1063,23 +1183,23 @@ void compute_alpha(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned sho
       break;
   }
 
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 
 void compute_beta(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned short frame_length,unsigned char F,int offset8_flag) {
   int k,rerun_flag=0;
-  __m128i m11_128,m10_128;
-  __m128i m_b0,m_b1,m_b2,m_b3,m_b4,m_b5,m_b6,m_b7;
-  __m128i new0,new1,new2,new3,new4,new5,new6,new7;
-  __m128i *beta128,*alpha128,*beta_ptr;
-  __m128i beta_max;
+  simde__m128i m11_128, m10_128;
+  simde__m128i m_b0, m_b1, m_b2, m_b3, m_b4, m_b5, m_b6, m_b7;
+  simde__m128i new0, new1, new2, new3, new4, new5, new6, new7;
+  simde__m128i *beta128, *alpha128, *beta_ptr;
+  simde__m128i beta_max;
   int16_t m11,m10,beta0_16,beta1_16,beta2_16,beta3_16,beta4_16,beta5_16,beta6_16,beta7_16,beta0_2,beta1_2,beta2_2,beta3_2,beta_m;
   llr_t beta0,beta1;
 #ifdef LLR8
   llr_t beta2,beta3,beta4,beta5,beta6,beta7;
-  __m128i beta_16;
+  simde__m128i beta_16;
 #endif
 #ifdef DEBUG_LOGMAP
   msg("compute_beta, %p,%p,%p,%p,framelength %d,F %d\n",
@@ -1126,22 +1246,22 @@ void compute_beta(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned shor
   beta6_16=beta6_16-beta_m;
   beta7_16=beta7_16-beta_m;
 #ifdef LLR8
-  beta_16 = _mm_set_epi16(beta7_16,beta6_16,beta5_16,beta4_16,beta3_16,beta2_16,beta1_16,beta0_16);
-  beta_16 = _mm_packs_epi16(beta_16,beta_16);
-  beta0 = _mm_extract_epi8(beta_16,0);
-  beta1 = _mm_extract_epi8(beta_16,1);
-  beta2 = _mm_extract_epi8(beta_16,2);
-  beta3 = _mm_extract_epi8(beta_16,3);
-  beta4 = _mm_extract_epi8(beta_16,4);
-  beta5 = _mm_extract_epi8(beta_16,5);
-  beta6 = _mm_extract_epi8(beta_16,6);
-  beta7 = _mm_extract_epi8(beta_16,7);
+  beta_16 = simde_mm_set_epi16(beta7_16, beta6_16, beta5_16, beta4_16, beta3_16, beta2_16, beta1_16, beta0_16);
+  beta_16 = simde_mm_packs_epi16(beta_16, beta_16);
+  beta0 = simde_mm_extract_epi8(beta_16, 0);
+  beta1 = simde_mm_extract_epi8(beta_16, 1);
+  beta2 = simde_mm_extract_epi8(beta_16, 2);
+  beta3 = simde_mm_extract_epi8(beta_16, 3);
+  beta4 = simde_mm_extract_epi8(beta_16, 4);
+  beta5 = simde_mm_extract_epi8(beta_16, 5);
+  beta6 = simde_mm_extract_epi8(beta_16, 6);
+  beta7 = simde_mm_extract_epi8(beta_16, 7);
   //  printf("beta (init)    : %d,%d,%d,%d,%d,%d,%d,%d\n",(uint8_t)beta0,(uint8_t)beta1,(uint8_t)beta2,(uint8_t)beta3,(uint8_t)beta4,(uint8_t)beta5,(uint8_t)beta6,(uint8_t)beta7);
 #endif
 
   for (rerun_flag=0;; rerun_flag=1) {
-    beta_ptr   = (__m128i *)&beta[frame_length<<3];
-    alpha128   = (__m128i *)&alpha[0];
+    beta_ptr = (simde__m128i *)&beta[frame_length << 3];
+    alpha128 = (simde__m128i *)&alpha[0];
 
     if (rerun_flag == 0) {
 #ifndef LLR8
@@ -1164,37 +1284,37 @@ void compute_beta(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned shor
       beta_ptr[7] = alpha128[7+(frame_length>>1)];
 #endif
     } else {
-      beta128 = (__m128i *)&beta[0];
+      beta128 = (simde__m128i *)&beta[0];
 #ifndef LLR8
-      beta_ptr[0] = _mm_srli_si128(beta128[0],2);
-      beta_ptr[1] = _mm_srli_si128(beta128[1],2);
-      beta_ptr[2] = _mm_srli_si128(beta128[2],2);
-      beta_ptr[3] = _mm_srli_si128(beta128[3],2);
-      beta_ptr[4] = _mm_srli_si128(beta128[4],2);
-      beta_ptr[5] = _mm_srli_si128(beta128[5],2);
-      beta_ptr[6] = _mm_srli_si128(beta128[6],2);
-      beta_ptr[7] = _mm_srli_si128(beta128[7],2);
+      beta_ptr[0] = simde_mm_srli_si128(beta128[0], 2);
+      beta_ptr[1] = simde_mm_srli_si128(beta128[1], 2);
+      beta_ptr[2] = simde_mm_srli_si128(beta128[2], 2);
+      beta_ptr[3] = simde_mm_srli_si128(beta128[3], 2);
+      beta_ptr[4] = simde_mm_srli_si128(beta128[4], 2);
+      beta_ptr[5] = simde_mm_srli_si128(beta128[5], 2);
+      beta_ptr[6] = simde_mm_srli_si128(beta128[6], 2);
+      beta_ptr[7] = simde_mm_srli_si128(beta128[7], 2);
 #else
-      beta_ptr[0] = _mm_srli_si128(beta128[0],1);
-      beta_ptr[1] = _mm_srli_si128(beta128[1],1);
-      beta_ptr[2] = _mm_srli_si128(beta128[2],1);
-      beta_ptr[3] = _mm_srli_si128(beta128[3],1);
-      beta_ptr[4] = _mm_srli_si128(beta128[4],1);
-      beta_ptr[5] = _mm_srli_si128(beta128[5],1);
-      beta_ptr[6] = _mm_srli_si128(beta128[6],1);
-      beta_ptr[7] = _mm_srli_si128(beta128[7],1);
+      beta_ptr[0] = simde_mm_srli_si128(beta128[0], 1);
+      beta_ptr[1] = simde_mm_srli_si128(beta128[1], 1);
+      beta_ptr[2] = simde_mm_srli_si128(beta128[2], 1);
+      beta_ptr[3] = simde_mm_srli_si128(beta128[3], 1);
+      beta_ptr[4] = simde_mm_srli_si128(beta128[4], 1);
+      beta_ptr[5] = simde_mm_srli_si128(beta128[5], 1);
+      beta_ptr[6] = simde_mm_srli_si128(beta128[6], 1);
+      beta_ptr[7] = simde_mm_srli_si128(beta128[7], 1);
 #endif
     }
 
 #ifndef LLR8
-    beta_ptr[0] = _mm_insert_epi16(beta_ptr[0],beta0_16,7);
-    beta_ptr[1] = _mm_insert_epi16(beta_ptr[1],beta1_16,7);
-    beta_ptr[2] = _mm_insert_epi16(beta_ptr[2],beta2_16,7);
-    beta_ptr[3] = _mm_insert_epi16(beta_ptr[3],beta3_16,7);
-    beta_ptr[4] = _mm_insert_epi16(beta_ptr[4],beta4_16,7);
-    beta_ptr[5] = _mm_insert_epi16(beta_ptr[5],beta5_16,7);
-    beta_ptr[6] = _mm_insert_epi16(beta_ptr[6],beta6_16,7);
-    beta_ptr[7] = _mm_insert_epi16(beta_ptr[7],beta7_16,7);
+    beta_ptr[0] = simde_mm_insert_epi16(beta_ptr[0], beta0_16, 7);
+    beta_ptr[1] = simde_mm_insert_epi16(beta_ptr[1], beta1_16, 7);
+    beta_ptr[2] = simde_mm_insert_epi16(beta_ptr[2], beta2_16, 7);
+    beta_ptr[3] = simde_mm_insert_epi16(beta_ptr[3], beta3_16, 7);
+    beta_ptr[4] = simde_mm_insert_epi16(beta_ptr[4], beta4_16, 7);
+    beta_ptr[5] = simde_mm_insert_epi16(beta_ptr[5], beta5_16, 7);
+    beta_ptr[6] = simde_mm_insert_epi16(beta_ptr[6], beta6_16, 7);
+    beta_ptr[7] = simde_mm_insert_epi16(beta_ptr[7], beta7_16, 7);
     /*
       beta[7+(frame_length<<3)] = beta0_16;
       beta[15+(frame_length<<3)] = beta1_16;
@@ -1207,14 +1327,14 @@ void compute_beta(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned shor
 #else
 
     if (offset8_flag==0) {
-      beta_ptr[0] = _mm_insert_epi8(beta_ptr[0],beta0,15);
-      beta_ptr[1] = _mm_insert_epi8(beta_ptr[1],beta1,15);
-      beta_ptr[2] = _mm_insert_epi8(beta_ptr[2],beta2,15);
-      beta_ptr[3] = _mm_insert_epi8(beta_ptr[3],beta3,15);
-      beta_ptr[4] = _mm_insert_epi8(beta_ptr[4],beta4,15);
-      beta_ptr[5] = _mm_insert_epi8(beta_ptr[5],beta5,15);
-      beta_ptr[6] = _mm_insert_epi8(beta_ptr[6],beta6,15);
-      beta_ptr[7] = _mm_insert_epi8(beta_ptr[7],beta7,15);
+      beta_ptr[0] = simde_mm_insert_epi8(beta_ptr[0], beta0, 15);
+      beta_ptr[1] = simde_mm_insert_epi8(beta_ptr[1], beta1, 15);
+      beta_ptr[2] = simde_mm_insert_epi8(beta_ptr[2], beta2, 15);
+      beta_ptr[3] = simde_mm_insert_epi8(beta_ptr[3], beta3, 15);
+      beta_ptr[4] = simde_mm_insert_epi8(beta_ptr[4], beta4, 15);
+      beta_ptr[5] = simde_mm_insert_epi8(beta_ptr[5], beta5, 15);
+      beta_ptr[6] = simde_mm_insert_epi8(beta_ptr[6], beta6, 15);
+      beta_ptr[7] = simde_mm_insert_epi8(beta_ptr[7], beta7, 15);
     } else {
     }
 
@@ -1223,401 +1343,401 @@ void compute_beta(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned shor
     int loopval=((rerun_flag==0)?0:((frame_length-L)>>3));
 
     for (k=(frame_length>>3)-1; k>=loopval; k--) {
-      m11_128=((__m128i *)m_11)[k];
-      m10_128=((__m128i *)m_10)[k];
-      m_b0 = _mm_adds_epi16(beta_ptr[4],m11_128);  //m11
-      m_b1 = _mm_subs_epi16(beta_ptr[4],m11_128);  //m00
-      m_b2 = _mm_subs_epi16(beta_ptr[5],m10_128);  //m01
-      m_b3 = _mm_adds_epi16(beta_ptr[5],m10_128);  //m10
-      m_b4 = _mm_adds_epi16(beta_ptr[6],m10_128);  //m10
-      m_b5 = _mm_subs_epi16(beta_ptr[6],m10_128);  //m01
-      m_b6 = _mm_subs_epi16(beta_ptr[7],m11_128);  //m00
-      m_b7 = _mm_adds_epi16(beta_ptr[7],m11_128);  //m11
-      new0 = _mm_subs_epi16(beta_ptr[0],m11_128);  //m00
-      new1 = _mm_adds_epi16(beta_ptr[0],m11_128);  //m11
-      new2 = _mm_adds_epi16(beta_ptr[1],m10_128);  //m10
-      new3 = _mm_subs_epi16(beta_ptr[1],m10_128);  //m01
-      new4 = _mm_subs_epi16(beta_ptr[2],m10_128);  //m01
-      new5 = _mm_adds_epi16(beta_ptr[2],m10_128);  //m10
-      new6 = _mm_adds_epi16(beta_ptr[3],m11_128);  //m11
-      new7 = _mm_subs_epi16(beta_ptr[3],m11_128);  //m00
+      m11_128 = ((simde__m128i *)m_11)[k];
+      m10_128 = ((simde__m128i *)m_10)[k];
+      m_b0 = simde_mm_adds_epi16(beta_ptr[4], m11_128); // m11
+      m_b1 = simde_mm_subs_epi16(beta_ptr[4], m11_128); // m00
+      m_b2 = simde_mm_subs_epi16(beta_ptr[5], m10_128); // m01
+      m_b3 = simde_mm_adds_epi16(beta_ptr[5], m10_128); // m10
+      m_b4 = simde_mm_adds_epi16(beta_ptr[6], m10_128); // m10
+      m_b5 = simde_mm_subs_epi16(beta_ptr[6], m10_128); // m01
+      m_b6 = simde_mm_subs_epi16(beta_ptr[7], m11_128); // m00
+      m_b7 = simde_mm_adds_epi16(beta_ptr[7], m11_128); // m11
+      new0 = simde_mm_subs_epi16(beta_ptr[0], m11_128); // m00
+      new1 = simde_mm_adds_epi16(beta_ptr[0], m11_128); // m11
+      new2 = simde_mm_adds_epi16(beta_ptr[1], m10_128); // m10
+      new3 = simde_mm_subs_epi16(beta_ptr[1], m10_128); // m01
+      new4 = simde_mm_subs_epi16(beta_ptr[2], m10_128); // m01
+      new5 = simde_mm_adds_epi16(beta_ptr[2], m10_128); // m10
+      new6 = simde_mm_adds_epi16(beta_ptr[3], m11_128); // m11
+      new7 = simde_mm_subs_epi16(beta_ptr[3], m11_128); // m00
       beta_ptr-=8;
-      beta_ptr[0] = _mm_max_epi16(m_b0,new0);
-      beta_ptr[1] = _mm_max_epi16(m_b1,new1);
-      beta_ptr[2] = _mm_max_epi16(m_b2,new2);
-      beta_ptr[3] = _mm_max_epi16(m_b3,new3);
-      beta_ptr[4] = _mm_max_epi16(m_b4,new4);
-      beta_ptr[5] = _mm_max_epi16(m_b5,new5);
-      beta_ptr[6] = _mm_max_epi16(m_b6,new6);
-      beta_ptr[7] = _mm_max_epi16(m_b7,new7);
-      beta_max = _mm_max_epi16(beta_ptr[0],beta_ptr[1]);
-      beta_max = _mm_max_epi16(beta_max   ,beta_ptr[2]);
-      beta_max = _mm_max_epi16(beta_max   ,beta_ptr[3]);
-      beta_max = _mm_max_epi16(beta_max   ,beta_ptr[4]);
-      beta_max = _mm_max_epi16(beta_max   ,beta_ptr[5]);
-      beta_max = _mm_max_epi16(beta_max   ,beta_ptr[6]);
-      beta_max = _mm_max_epi16(beta_max   ,beta_ptr[7]);
-      beta_ptr[0] = _mm_subs_epi16(beta_ptr[0],beta_max);
-      beta_ptr[1] = _mm_subs_epi16(beta_ptr[1],beta_max);
-      beta_ptr[2] = _mm_subs_epi16(beta_ptr[2],beta_max);
-      beta_ptr[3] = _mm_subs_epi16(beta_ptr[3],beta_max);
-      beta_ptr[4] = _mm_subs_epi16(beta_ptr[4],beta_max);
-      beta_ptr[5] = _mm_subs_epi16(beta_ptr[5],beta_max);
-      beta_ptr[6] = _mm_subs_epi16(beta_ptr[6],beta_max);
-      beta_ptr[7] = _mm_subs_epi16(beta_ptr[7],beta_max);
+      beta_ptr[0] = simde_mm_max_epi16(m_b0, new0);
+      beta_ptr[1] = simde_mm_max_epi16(m_b1, new1);
+      beta_ptr[2] = simde_mm_max_epi16(m_b2, new2);
+      beta_ptr[3] = simde_mm_max_epi16(m_b3, new3);
+      beta_ptr[4] = simde_mm_max_epi16(m_b4, new4);
+      beta_ptr[5] = simde_mm_max_epi16(m_b5, new5);
+      beta_ptr[6] = simde_mm_max_epi16(m_b6, new6);
+      beta_ptr[7] = simde_mm_max_epi16(m_b7, new7);
+      beta_max = simde_mm_max_epi16(beta_ptr[0], beta_ptr[1]);
+      beta_max = simde_mm_max_epi16(beta_max, beta_ptr[2]);
+      beta_max = simde_mm_max_epi16(beta_max, beta_ptr[3]);
+      beta_max = simde_mm_max_epi16(beta_max, beta_ptr[4]);
+      beta_max = simde_mm_max_epi16(beta_max, beta_ptr[5]);
+      beta_max = simde_mm_max_epi16(beta_max, beta_ptr[6]);
+      beta_max = simde_mm_max_epi16(beta_max, beta_ptr[7]);
+      beta_ptr[0] = simde_mm_subs_epi16(beta_ptr[0], beta_max);
+      beta_ptr[1] = simde_mm_subs_epi16(beta_ptr[1], beta_max);
+      beta_ptr[2] = simde_mm_subs_epi16(beta_ptr[2], beta_max);
+      beta_ptr[3] = simde_mm_subs_epi16(beta_ptr[3], beta_max);
+      beta_ptr[4] = simde_mm_subs_epi16(beta_ptr[4], beta_max);
+      beta_ptr[5] = simde_mm_subs_epi16(beta_ptr[5], beta_max);
+      beta_ptr[6] = simde_mm_subs_epi16(beta_ptr[6], beta_max);
+      beta_ptr[7] = simde_mm_subs_epi16(beta_ptr[7], beta_max);
     }
 
 #else
 #ifdef DEBUG_LOGMAP
     printf("beta0 %u:  %03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d\n",
-           (frame_length>>4),
-           _mm_extract_epi8(beta_ptr[0],0),
-           _mm_extract_epi8(beta_ptr[0],1),
-           _mm_extract_epi8(beta_ptr[0],2),
-           _mm_extract_epi8(beta_ptr[0],3),
-           _mm_extract_epi8(beta_ptr[0],4),
-           _mm_extract_epi8(beta_ptr[0],5),
-           _mm_extract_epi8(beta_ptr[0],6),
-           _mm_extract_epi8(beta_ptr[0],7),
-           _mm_extract_epi8(beta_ptr[0],8),
-           _mm_extract_epi8(beta_ptr[0],9),
-           _mm_extract_epi8(beta_ptr[0],10),
-           _mm_extract_epi8(beta_ptr[0],11),
-           _mm_extract_epi8(beta_ptr[0],12),
-           _mm_extract_epi8(beta_ptr[0],13),
-           _mm_extract_epi8(beta_ptr[0],14),
-           _mm_extract_epi8(beta_ptr[0],15));
+           (frame_length >> 4),
+           simde_mm_extract_epi8(beta_ptr[0], 0),
+           simde_mm_extract_epi8(beta_ptr[0], 1),
+           simde_mm_extract_epi8(beta_ptr[0], 2),
+           simde_mm_extract_epi8(beta_ptr[0], 3),
+           simde_mm_extract_epi8(beta_ptr[0], 4),
+           simde_mm_extract_epi8(beta_ptr[0], 5),
+           simde_mm_extract_epi8(beta_ptr[0], 6),
+           simde_mm_extract_epi8(beta_ptr[0], 7),
+           simde_mm_extract_epi8(beta_ptr[0], 8),
+           simde_mm_extract_epi8(beta_ptr[0], 9),
+           simde_mm_extract_epi8(beta_ptr[0], 10),
+           simde_mm_extract_epi8(beta_ptr[0], 11),
+           simde_mm_extract_epi8(beta_ptr[0], 12),
+           simde_mm_extract_epi8(beta_ptr[0], 13),
+           simde_mm_extract_epi8(beta_ptr[0], 14),
+           simde_mm_extract_epi8(beta_ptr[0], 15));
     printf("beta1 %u:  %03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d\n",
-           (frame_length>>4),
-           _mm_extract_epi8(beta_ptr[1],0),
-           _mm_extract_epi8(beta_ptr[1],1),
-           _mm_extract_epi8(beta_ptr[1],2),
-           _mm_extract_epi8(beta_ptr[1],3),
-           _mm_extract_epi8(beta_ptr[1],4),
-           _mm_extract_epi8(beta_ptr[1],5),
-           _mm_extract_epi8(beta_ptr[1],6),
-           _mm_extract_epi8(beta_ptr[1],7),
-           _mm_extract_epi8(beta_ptr[1],8),
-           _mm_extract_epi8(beta_ptr[1],9),
-           _mm_extract_epi8(beta_ptr[1],10),
-           _mm_extract_epi8(beta_ptr[1],11),
-           _mm_extract_epi8(beta_ptr[1],12),
-           _mm_extract_epi8(beta_ptr[1],13),
-           _mm_extract_epi8(beta_ptr[1],14),
-           _mm_extract_epi8(beta_ptr[1],15));
+           (frame_length >> 4),
+           simde_mm_extract_epi8(beta_ptr[1], 0),
+           simde_mm_extract_epi8(beta_ptr[1], 1),
+           simde_mm_extract_epi8(beta_ptr[1], 2),
+           simde_mm_extract_epi8(beta_ptr[1], 3),
+           simde_mm_extract_epi8(beta_ptr[1], 4),
+           simde_mm_extract_epi8(beta_ptr[1], 5),
+           simde_mm_extract_epi8(beta_ptr[1], 6),
+           simde_mm_extract_epi8(beta_ptr[1], 7),
+           simde_mm_extract_epi8(beta_ptr[1], 8),
+           simde_mm_extract_epi8(beta_ptr[1], 9),
+           simde_mm_extract_epi8(beta_ptr[1], 10),
+           simde_mm_extract_epi8(beta_ptr[1], 11),
+           simde_mm_extract_epi8(beta_ptr[1], 12),
+           simde_mm_extract_epi8(beta_ptr[1], 13),
+           simde_mm_extract_epi8(beta_ptr[1], 14),
+           simde_mm_extract_epi8(beta_ptr[1], 15));
     printf("beta2 %u:  %03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d\n",
-           (frame_length>>4),
-           _mm_extract_epi8(beta_ptr[2],0),
-           _mm_extract_epi8(beta_ptr[2],1),
-           _mm_extract_epi8(beta_ptr[2],2),
-           _mm_extract_epi8(beta_ptr[2],3),
-           _mm_extract_epi8(beta_ptr[2],4),
-           _mm_extract_epi8(beta_ptr[2],5),
-           _mm_extract_epi8(beta_ptr[2],6),
-           _mm_extract_epi8(beta_ptr[2],7),
-           _mm_extract_epi8(beta_ptr[2],8),
-           _mm_extract_epi8(beta_ptr[2],9),
-           _mm_extract_epi8(beta_ptr[2],10),
-           _mm_extract_epi8(beta_ptr[2],11),
-           _mm_extract_epi8(beta_ptr[2],12),
-           _mm_extract_epi8(beta_ptr[2],13),
-           _mm_extract_epi8(beta_ptr[2],14),
-           _mm_extract_epi8(beta_ptr[2],15));
+           (frame_length >> 4),
+           simde_mm_extract_epi8(beta_ptr[2], 0),
+           simde_mm_extract_epi8(beta_ptr[2], 1),
+           simde_mm_extract_epi8(beta_ptr[2], 2),
+           simde_mm_extract_epi8(beta_ptr[2], 3),
+           simde_mm_extract_epi8(beta_ptr[2], 4),
+           simde_mm_extract_epi8(beta_ptr[2], 5),
+           simde_mm_extract_epi8(beta_ptr[2], 6),
+           simde_mm_extract_epi8(beta_ptr[2], 7),
+           simde_mm_extract_epi8(beta_ptr[2], 8),
+           simde_mm_extract_epi8(beta_ptr[2], 9),
+           simde_mm_extract_epi8(beta_ptr[2], 10),
+           simde_mm_extract_epi8(beta_ptr[2], 11),
+           simde_mm_extract_epi8(beta_ptr[2], 12),
+           simde_mm_extract_epi8(beta_ptr[2], 13),
+           simde_mm_extract_epi8(beta_ptr[2], 14),
+           simde_mm_extract_epi8(beta_ptr[2], 15));
     printf("beta3 %u:  %03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d\n",
-           (frame_length>>4),
-           _mm_extract_epi8(beta_ptr[3],0),
-           _mm_extract_epi8(beta_ptr[3],1),
-           _mm_extract_epi8(beta_ptr[3],2),
-           _mm_extract_epi8(beta_ptr[3],3),
-           _mm_extract_epi8(beta_ptr[3],4),
-           _mm_extract_epi8(beta_ptr[3],5),
-           _mm_extract_epi8(beta_ptr[3],6),
-           _mm_extract_epi8(beta_ptr[3],7),
-           _mm_extract_epi8(beta_ptr[3],8),
-           _mm_extract_epi8(beta_ptr[3],9),
-           _mm_extract_epi8(beta_ptr[3],10),
-           _mm_extract_epi8(beta_ptr[3],11),
-           _mm_extract_epi8(beta_ptr[3],12),
-           _mm_extract_epi8(beta_ptr[3],13),
-           _mm_extract_epi8(beta_ptr[3],14),
-           _mm_extract_epi8(beta_ptr[3],15));
+           (frame_length >> 4),
+           simde_mm_extract_epi8(beta_ptr[3], 0),
+           simde_mm_extract_epi8(beta_ptr[3], 1),
+           simde_mm_extract_epi8(beta_ptr[3], 2),
+           simde_mm_extract_epi8(beta_ptr[3], 3),
+           simde_mm_extract_epi8(beta_ptr[3], 4),
+           simde_mm_extract_epi8(beta_ptr[3], 5),
+           simde_mm_extract_epi8(beta_ptr[3], 6),
+           simde_mm_extract_epi8(beta_ptr[3], 7),
+           simde_mm_extract_epi8(beta_ptr[3], 8),
+           simde_mm_extract_epi8(beta_ptr[3], 9),
+           simde_mm_extract_epi8(beta_ptr[3], 10),
+           simde_mm_extract_epi8(beta_ptr[3], 11),
+           simde_mm_extract_epi8(beta_ptr[3], 12),
+           simde_mm_extract_epi8(beta_ptr[3], 13),
+           simde_mm_extract_epi8(beta_ptr[3], 14),
+           simde_mm_extract_epi8(beta_ptr[3], 15));
     printf("beta4 %u:  %03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d\n",
-           (frame_length>>4),
-           _mm_extract_epi8(beta_ptr[4],0),
-           _mm_extract_epi8(beta_ptr[4],1),
-           _mm_extract_epi8(beta_ptr[4],2),
-           _mm_extract_epi8(beta_ptr[4],3),
-           _mm_extract_epi8(beta_ptr[4],4),
-           _mm_extract_epi8(beta_ptr[4],5),
-           _mm_extract_epi8(beta_ptr[4],6),
-           _mm_extract_epi8(beta_ptr[4],7),
-           _mm_extract_epi8(beta_ptr[4],8),
-           _mm_extract_epi8(beta_ptr[4],9),
-           _mm_extract_epi8(beta_ptr[4],10),
-           _mm_extract_epi8(beta_ptr[4],11),
-           _mm_extract_epi8(beta_ptr[4],12),
-           _mm_extract_epi8(beta_ptr[4],13),
-           _mm_extract_epi8(beta_ptr[4],14),
-           _mm_extract_epi8(beta_ptr[4],15));
+           (frame_length >> 4),
+           simde_mm_extract_epi8(beta_ptr[4], 0),
+           simde_mm_extract_epi8(beta_ptr[4], 1),
+           simde_mm_extract_epi8(beta_ptr[4], 2),
+           simde_mm_extract_epi8(beta_ptr[4], 3),
+           simde_mm_extract_epi8(beta_ptr[4], 4),
+           simde_mm_extract_epi8(beta_ptr[4], 5),
+           simde_mm_extract_epi8(beta_ptr[4], 6),
+           simde_mm_extract_epi8(beta_ptr[4], 7),
+           simde_mm_extract_epi8(beta_ptr[4], 8),
+           simde_mm_extract_epi8(beta_ptr[4], 9),
+           simde_mm_extract_epi8(beta_ptr[4], 10),
+           simde_mm_extract_epi8(beta_ptr[4], 11),
+           simde_mm_extract_epi8(beta_ptr[4], 12),
+           simde_mm_extract_epi8(beta_ptr[4], 13),
+           simde_mm_extract_epi8(beta_ptr[4], 14),
+           simde_mm_extract_epi8(beta_ptr[4], 15));
     printf("beta5 %u:  %03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d\n",
-           (frame_length>>4),
-           _mm_extract_epi8(beta_ptr[5],0),
-           _mm_extract_epi8(beta_ptr[5],1),
-           _mm_extract_epi8(beta_ptr[5],2),
-           _mm_extract_epi8(beta_ptr[5],3),
-           _mm_extract_epi8(beta_ptr[5],4),
-           _mm_extract_epi8(beta_ptr[5],5),
-           _mm_extract_epi8(beta_ptr[5],6),
-           _mm_extract_epi8(beta_ptr[5],7),
-           _mm_extract_epi8(beta_ptr[5],8),
-           _mm_extract_epi8(beta_ptr[5],9),
-           _mm_extract_epi8(beta_ptr[5],10),
-           _mm_extract_epi8(beta_ptr[5],11),
-           _mm_extract_epi8(beta_ptr[5],12),
-           _mm_extract_epi8(beta_ptr[5],13),
-           _mm_extract_epi8(beta_ptr[5],14),
-           _mm_extract_epi8(beta_ptr[5],15));
+           (frame_length >> 4),
+           simde_mm_extract_epi8(beta_ptr[5], 0),
+           simde_mm_extract_epi8(beta_ptr[5], 1),
+           simde_mm_extract_epi8(beta_ptr[5], 2),
+           simde_mm_extract_epi8(beta_ptr[5], 3),
+           simde_mm_extract_epi8(beta_ptr[5], 4),
+           simde_mm_extract_epi8(beta_ptr[5], 5),
+           simde_mm_extract_epi8(beta_ptr[5], 6),
+           simde_mm_extract_epi8(beta_ptr[5], 7),
+           simde_mm_extract_epi8(beta_ptr[5], 8),
+           simde_mm_extract_epi8(beta_ptr[5], 9),
+           simde_mm_extract_epi8(beta_ptr[5], 10),
+           simde_mm_extract_epi8(beta_ptr[5], 11),
+           simde_mm_extract_epi8(beta_ptr[5], 12),
+           simde_mm_extract_epi8(beta_ptr[5], 13),
+           simde_mm_extract_epi8(beta_ptr[5], 14),
+           simde_mm_extract_epi8(beta_ptr[5], 15));
     printf("beta6 %u:  %03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d\n",
-           (frame_length>>4),
-           _mm_extract_epi8(beta_ptr[6],0),
-           _mm_extract_epi8(beta_ptr[6],1),
-           _mm_extract_epi8(beta_ptr[6],2),
-           _mm_extract_epi8(beta_ptr[6],3),
-           _mm_extract_epi8(beta_ptr[6],4),
-           _mm_extract_epi8(beta_ptr[6],5),
-           _mm_extract_epi8(beta_ptr[6],6),
-           _mm_extract_epi8(beta_ptr[6],7),
-           _mm_extract_epi8(beta_ptr[6],8),
-           _mm_extract_epi8(beta_ptr[6],9),
-           _mm_extract_epi8(beta_ptr[6],10),
-           _mm_extract_epi8(beta_ptr[6],11),
-           _mm_extract_epi8(beta_ptr[6],12),
-           _mm_extract_epi8(beta_ptr[6],13),
-           _mm_extract_epi8(beta_ptr[6],14),
-           _mm_extract_epi8(beta_ptr[6],15));
+           (frame_length >> 4),
+           simde_mm_extract_epi8(beta_ptr[6], 0),
+           simde_mm_extract_epi8(beta_ptr[6], 1),
+           simde_mm_extract_epi8(beta_ptr[6], 2),
+           simde_mm_extract_epi8(beta_ptr[6], 3),
+           simde_mm_extract_epi8(beta_ptr[6], 4),
+           simde_mm_extract_epi8(beta_ptr[6], 5),
+           simde_mm_extract_epi8(beta_ptr[6], 6),
+           simde_mm_extract_epi8(beta_ptr[6], 7),
+           simde_mm_extract_epi8(beta_ptr[6], 8),
+           simde_mm_extract_epi8(beta_ptr[6], 9),
+           simde_mm_extract_epi8(beta_ptr[6], 10),
+           simde_mm_extract_epi8(beta_ptr[6], 11),
+           simde_mm_extract_epi8(beta_ptr[6], 12),
+           simde_mm_extract_epi8(beta_ptr[6], 13),
+           simde_mm_extract_epi8(beta_ptr[6], 14),
+           simde_mm_extract_epi8(beta_ptr[6], 15));
     printf("beta7 %u:  %03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d\n",
-           (frame_length>>4),
-           _mm_extract_epi8(beta_ptr[7],0),
-           _mm_extract_epi8(beta_ptr[7],1),
-           _mm_extract_epi8(beta_ptr[7],2),
-           _mm_extract_epi8(beta_ptr[7],3),
-           _mm_extract_epi8(beta_ptr[7],4),
-           _mm_extract_epi8(beta_ptr[7],5),
-           _mm_extract_epi8(beta_ptr[7],6),
-           _mm_extract_epi8(beta_ptr[7],7),
-           _mm_extract_epi8(beta_ptr[7],8),
-           _mm_extract_epi8(beta_ptr[7],9),
-           _mm_extract_epi8(beta_ptr[7],10),
-           _mm_extract_epi8(beta_ptr[7],11),
-           _mm_extract_epi8(beta_ptr[7],12),
-           _mm_extract_epi8(beta_ptr[7],13),
-           _mm_extract_epi8(beta_ptr[7],14),
-           _mm_extract_epi8(beta_ptr[7],15));
+           (frame_length >> 4),
+           simde_mm_extract_epi8(beta_ptr[7], 0),
+           simde_mm_extract_epi8(beta_ptr[7], 1),
+           simde_mm_extract_epi8(beta_ptr[7], 2),
+           simde_mm_extract_epi8(beta_ptr[7], 3),
+           simde_mm_extract_epi8(beta_ptr[7], 4),
+           simde_mm_extract_epi8(beta_ptr[7], 5),
+           simde_mm_extract_epi8(beta_ptr[7], 6),
+           simde_mm_extract_epi8(beta_ptr[7], 7),
+           simde_mm_extract_epi8(beta_ptr[7], 8),
+           simde_mm_extract_epi8(beta_ptr[7], 9),
+           simde_mm_extract_epi8(beta_ptr[7], 10),
+           simde_mm_extract_epi8(beta_ptr[7], 11),
+           simde_mm_extract_epi8(beta_ptr[7], 12),
+           simde_mm_extract_epi8(beta_ptr[7], 13),
+           simde_mm_extract_epi8(beta_ptr[7], 14),
+           simde_mm_extract_epi8(beta_ptr[7], 15));
 #endif
     int loopval=(rerun_flag==0)?0:((frame_length-L)>>4);
-    __m128i max_val=_mm_set1_epi8(32);
-    __m128i zeros=_mm_set1_epi8(0);
+    simde__m128i max_val = simde_mm_set1_epi8(32);
+    simde__m128i zeros = simde_mm_set1_epi8(0);
 
     for (k=(frame_length>>4)-1; k>=loopval; k--) {
-      m11_128=((__m128i *)m_11)[k];
-      m10_128=((__m128i *)m_10)[k];
+      m11_128 = ((simde__m128i *)m_11)[k];
+      m10_128 = ((simde__m128i *)m_10)[k];
       /*
       if ((offset8_flag==1) && (k==((frame_length>>4)-9))) {
-      beta_ptr[0] = _mm_insert_epi8(beta_ptr[0],beta0,15);
-      beta_ptr[1] = _mm_insert_epi8(beta_ptr[1],beta1,15);
-      beta_ptr[2] = _mm_insert_epi8(beta_ptr[2],beta2,15);
-      beta_ptr[3] = _mm_insert_epi8(beta_ptr[3],beta3,15);
-      beta_ptr[4] = _mm_insert_epi8(beta_ptr[4],beta4,15);
-      beta_ptr[5] = _mm_insert_epi8(beta_ptr[5],beta5,15);
-      beta_ptr[6] = _mm_insert_epi8(beta_ptr[6],beta6,15);
-      beta_ptr[7] = _mm_insert_epi8(beta_ptr[7],beta7,15);
+      beta_ptr[0] = simde_mm_insert_epi8(beta_ptr[0],beta0,15);
+      beta_ptr[1] = simde_mm_insert_epi8(beta_ptr[1],beta1,15);
+      beta_ptr[2] = simde_mm_insert_epi8(beta_ptr[2],beta2,15);
+      beta_ptr[3] = simde_mm_insert_epi8(beta_ptr[3],beta3,15);
+      beta_ptr[4] = simde_mm_insert_epi8(beta_ptr[4],beta4,15);
+      beta_ptr[5] = simde_mm_insert_epi8(beta_ptr[5],beta5,15);
+      beta_ptr[6] = simde_mm_insert_epi8(beta_ptr[6],beta6,15);
+      beta_ptr[7] = simde_mm_insert_epi8(beta_ptr[7],beta7,15);
       }*/
       //      print_bytes("m11:",&m11_128);
-      m_b0 = _mm_adds_epi8(beta_ptr[4],m11_128);  //m11
-      m_b1 = _mm_subs_epi8(beta_ptr[4],m11_128);  //m00
-      m_b2 = _mm_subs_epi8(beta_ptr[5],m10_128);  //m01
-      m_b3 = _mm_adds_epi8(beta_ptr[5],m10_128);  //m10
-      m_b4 = _mm_adds_epi8(beta_ptr[6],m10_128);  //m10
-      m_b5 = _mm_subs_epi8(beta_ptr[6],m10_128);  //m01
-      m_b6 = _mm_subs_epi8(beta_ptr[7],m11_128);  //m00
-      m_b7 = _mm_adds_epi8(beta_ptr[7],m11_128);  //m11
-      new0 = _mm_subs_epi8(beta_ptr[0],m11_128);  //m00
-      new1 = _mm_adds_epi8(beta_ptr[0],m11_128);  //m11
-      new2 = _mm_adds_epi8(beta_ptr[1],m10_128);  //m10
-      new3 = _mm_subs_epi8(beta_ptr[1],m10_128);  //m01
-      new4 = _mm_subs_epi8(beta_ptr[2],m10_128);  //m01
-      new5 = _mm_adds_epi8(beta_ptr[2],m10_128);  //m10
-      new6 = _mm_adds_epi8(beta_ptr[3],m11_128);  //m11
-      new7 = _mm_subs_epi8(beta_ptr[3],m11_128);  //m00
+      m_b0 = simde_mm_adds_epi8(beta_ptr[4], m11_128); // m11
+      m_b1 = simde_mm_subs_epi8(beta_ptr[4], m11_128); // m00
+      m_b2 = simde_mm_subs_epi8(beta_ptr[5], m10_128); // m01
+      m_b3 = simde_mm_adds_epi8(beta_ptr[5], m10_128); // m10
+      m_b4 = simde_mm_adds_epi8(beta_ptr[6], m10_128); // m10
+      m_b5 = simde_mm_subs_epi8(beta_ptr[6], m10_128); // m01
+      m_b6 = simde_mm_subs_epi8(beta_ptr[7], m11_128); // m00
+      m_b7 = simde_mm_adds_epi8(beta_ptr[7], m11_128); // m11
+      new0 = simde_mm_subs_epi8(beta_ptr[0], m11_128); // m00
+      new1 = simde_mm_adds_epi8(beta_ptr[0], m11_128); // m11
+      new2 = simde_mm_adds_epi8(beta_ptr[1], m10_128); // m10
+      new3 = simde_mm_subs_epi8(beta_ptr[1], m10_128); // m01
+      new4 = simde_mm_subs_epi8(beta_ptr[2], m10_128); // m01
+      new5 = simde_mm_adds_epi8(beta_ptr[2], m10_128); // m10
+      new6 = simde_mm_adds_epi8(beta_ptr[3], m11_128); // m11
+      new7 = simde_mm_subs_epi8(beta_ptr[3], m11_128); // m00
       beta_ptr-=8;
-      beta_ptr[0] = _mm_max_epi8(m_b0,new0);
-      beta_ptr[1] = _mm_max_epi8(m_b1,new1);
-      beta_ptr[2] = _mm_max_epi8(m_b2,new2);
-      beta_ptr[3] = _mm_max_epi8(m_b3,new3);
-      beta_ptr[4] = _mm_max_epi8(m_b4,new4);
-      beta_ptr[5] = _mm_max_epi8(m_b5,new5);
-      beta_ptr[6] = _mm_max_epi8(m_b6,new6);
-      beta_ptr[7] = _mm_max_epi8(m_b7,new7);
-      beta_max = _mm_max_epi8(beta_ptr[0],beta_ptr[1]);
-      beta_max = _mm_max_epi8(beta_max   ,beta_ptr[2]);
-      beta_max = _mm_max_epi8(beta_max   ,beta_ptr[3]);
-      beta_max = _mm_max_epi8(beta_max   ,beta_ptr[4]);
-      beta_max = _mm_max_epi8(beta_max   ,beta_ptr[5]);
-      beta_max = _mm_max_epi8(beta_max   ,beta_ptr[6]);
-      beta_max = _mm_max_epi8(beta_max   ,beta_ptr[7]);
-      beta_ptr[0] = _mm_subs_epi8(beta_ptr[0],beta_max);
-      beta_ptr[1] = _mm_subs_epi8(beta_ptr[1],beta_max);
-      beta_ptr[2] = _mm_subs_epi8(beta_ptr[2],beta_max);
-      beta_ptr[3] = _mm_subs_epi8(beta_ptr[3],beta_max);
-      beta_ptr[4] = _mm_subs_epi8(beta_ptr[4],beta_max);
-      beta_ptr[5] = _mm_subs_epi8(beta_ptr[5],beta_max);
-      beta_ptr[6] = _mm_subs_epi8(beta_ptr[6],beta_max);
-      beta_ptr[7] = _mm_subs_epi8(beta_ptr[7],beta_max);
+      beta_ptr[0] = simde_mm_max_epi8(m_b0, new0);
+      beta_ptr[1] = simde_mm_max_epi8(m_b1, new1);
+      beta_ptr[2] = simde_mm_max_epi8(m_b2, new2);
+      beta_ptr[3] = simde_mm_max_epi8(m_b3, new3);
+      beta_ptr[4] = simde_mm_max_epi8(m_b4, new4);
+      beta_ptr[5] = simde_mm_max_epi8(m_b5, new5);
+      beta_ptr[6] = simde_mm_max_epi8(m_b6, new6);
+      beta_ptr[7] = simde_mm_max_epi8(m_b7, new7);
+      beta_max = simde_mm_max_epi8(beta_ptr[0], beta_ptr[1]);
+      beta_max = simde_mm_max_epi8(beta_max, beta_ptr[2]);
+      beta_max = simde_mm_max_epi8(beta_max, beta_ptr[3]);
+      beta_max = simde_mm_max_epi8(beta_max, beta_ptr[4]);
+      beta_max = simde_mm_max_epi8(beta_max, beta_ptr[5]);
+      beta_max = simde_mm_max_epi8(beta_max, beta_ptr[6]);
+      beta_max = simde_mm_max_epi8(beta_max, beta_ptr[7]);
+      beta_ptr[0] = simde_mm_subs_epi8(beta_ptr[0], beta_max);
+      beta_ptr[1] = simde_mm_subs_epi8(beta_ptr[1], beta_max);
+      beta_ptr[2] = simde_mm_subs_epi8(beta_ptr[2], beta_max);
+      beta_ptr[3] = simde_mm_subs_epi8(beta_ptr[3], beta_max);
+      beta_ptr[4] = simde_mm_subs_epi8(beta_ptr[4], beta_max);
+      beta_ptr[5] = simde_mm_subs_epi8(beta_ptr[5], beta_max);
+      beta_ptr[6] = simde_mm_subs_epi8(beta_ptr[6], beta_max);
+      beta_ptr[7] = simde_mm_subs_epi8(beta_ptr[7], beta_max);
       /*
       printf("beta0 %d:  %03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d\n",
       k,
-      _mm_extract_epi8(beta_ptr[0],0),
-      _mm_extract_epi8(beta_ptr[0],1),
-      _mm_extract_epi8(beta_ptr[0],2),
-      _mm_extract_epi8(beta_ptr[0],3),
-      _mm_extract_epi8(beta_ptr[0],4),
-      _mm_extract_epi8(beta_ptr[0],5),
-      _mm_extract_epi8(beta_ptr[0],6),
-      _mm_extract_epi8(beta_ptr[0],7),
-      _mm_extract_epi8(beta_ptr[0],8),
-      _mm_extract_epi8(beta_ptr[0],9),
-      _mm_extract_epi8(beta_ptr[0],10),
-      _mm_extract_epi8(beta_ptr[0],11),
-      _mm_extract_epi8(beta_ptr[0],12),
-      _mm_extract_epi8(beta_ptr[0],13),
-      _mm_extract_epi8(beta_ptr[0],14),
-      _mm_extract_epi8(beta_ptr[0],15));
+      simde_mm_extract_epi8(beta_ptr[0],0),
+      simde_mm_extract_epi8(beta_ptr[0],1),
+      simde_mm_extract_epi8(beta_ptr[0],2),
+      simde_mm_extract_epi8(beta_ptr[0],3),
+      simde_mm_extract_epi8(beta_ptr[0],4),
+      simde_mm_extract_epi8(beta_ptr[0],5),
+      simde_mm_extract_epi8(beta_ptr[0],6),
+      simde_mm_extract_epi8(beta_ptr[0],7),
+      simde_mm_extract_epi8(beta_ptr[0],8),
+      simde_mm_extract_epi8(beta_ptr[0],9),
+      simde_mm_extract_epi8(beta_ptr[0],10),
+      simde_mm_extract_epi8(beta_ptr[0],11),
+      simde_mm_extract_epi8(beta_ptr[0],12),
+      simde_mm_extract_epi8(beta_ptr[0],13),
+      simde_mm_extract_epi8(beta_ptr[0],14),
+      simde_mm_extract_epi8(beta_ptr[0],15));
       printf("beta1 %d:  %03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d\n",
       k,
-      _mm_extract_epi8(beta_ptr[1],0),
-      _mm_extract_epi8(beta_ptr[1],1),
-      _mm_extract_epi8(beta_ptr[1],2),
-      _mm_extract_epi8(beta_ptr[1],3),
-      _mm_extract_epi8(beta_ptr[1],4),
-      _mm_extract_epi8(beta_ptr[1],5),
-      _mm_extract_epi8(beta_ptr[1],6),
-      _mm_extract_epi8(beta_ptr[1],7),
-      _mm_extract_epi8(beta_ptr[1],8),
-      _mm_extract_epi8(beta_ptr[1],9),
-      _mm_extract_epi8(beta_ptr[1],10),
-      _mm_extract_epi8(beta_ptr[1],11),
-      _mm_extract_epi8(beta_ptr[1],12),
-      _mm_extract_epi8(beta_ptr[1],13),
-      _mm_extract_epi8(beta_ptr[1],14),
-      _mm_extract_epi8(beta_ptr[1],15));
+      simde_mm_extract_epi8(beta_ptr[1],0),
+      simde_mm_extract_epi8(beta_ptr[1],1),
+      simde_mm_extract_epi8(beta_ptr[1],2),
+      simde_mm_extract_epi8(beta_ptr[1],3),
+      simde_mm_extract_epi8(beta_ptr[1],4),
+      simde_mm_extract_epi8(beta_ptr[1],5),
+      simde_mm_extract_epi8(beta_ptr[1],6),
+      simde_mm_extract_epi8(beta_ptr[1],7),
+      simde_mm_extract_epi8(beta_ptr[1],8),
+      simde_mm_extract_epi8(beta_ptr[1],9),
+      simde_mm_extract_epi8(beta_ptr[1],10),
+      simde_mm_extract_epi8(beta_ptr[1],11),
+      simde_mm_extract_epi8(beta_ptr[1],12),
+      simde_mm_extract_epi8(beta_ptr[1],13),
+      simde_mm_extract_epi8(beta_ptr[1],14),
+      simde_mm_extract_epi8(beta_ptr[1],15));
       printf("beta2 %d:  %03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d\n",
       k,
-      _mm_extract_epi8(beta_ptr[2],0),
-      _mm_extract_epi8(beta_ptr[2],1),
-      _mm_extract_epi8(beta_ptr[2],2),
-      _mm_extract_epi8(beta_ptr[2],3),
-      _mm_extract_epi8(beta_ptr[2],4),
-      _mm_extract_epi8(beta_ptr[2],5),
-      _mm_extract_epi8(beta_ptr[2],6),
-      _mm_extract_epi8(beta_ptr[2],7),
-      _mm_extract_epi8(beta_ptr[2],8),
-      _mm_extract_epi8(beta_ptr[2],9),
-      _mm_extract_epi8(beta_ptr[2],10),
-      _mm_extract_epi8(beta_ptr[2],11),
-      _mm_extract_epi8(beta_ptr[2],12),
-      _mm_extract_epi8(beta_ptr[2],13),
-      _mm_extract_epi8(beta_ptr[2],14),
-      _mm_extract_epi8(beta_ptr[2],15));
+      simde_mm_extract_epi8(beta_ptr[2],0),
+      simde_mm_extract_epi8(beta_ptr[2],1),
+      simde_mm_extract_epi8(beta_ptr[2],2),
+      simde_mm_extract_epi8(beta_ptr[2],3),
+      simde_mm_extract_epi8(beta_ptr[2],4),
+      simde_mm_extract_epi8(beta_ptr[2],5),
+      simde_mm_extract_epi8(beta_ptr[2],6),
+      simde_mm_extract_epi8(beta_ptr[2],7),
+      simde_mm_extract_epi8(beta_ptr[2],8),
+      simde_mm_extract_epi8(beta_ptr[2],9),
+      simde_mm_extract_epi8(beta_ptr[2],10),
+      simde_mm_extract_epi8(beta_ptr[2],11),
+      simde_mm_extract_epi8(beta_ptr[2],12),
+      simde_mm_extract_epi8(beta_ptr[2],13),
+      simde_mm_extract_epi8(beta_ptr[2],14),
+      simde_mm_extract_epi8(beta_ptr[2],15));
       printf("beta3 %d:  %03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d\n",
       k,
-      _mm_extract_epi8(beta_ptr[3],0),
-      _mm_extract_epi8(beta_ptr[3],1),
-      _mm_extract_epi8(beta_ptr[3],2),
-      _mm_extract_epi8(beta_ptr[3],3),
-      _mm_extract_epi8(beta_ptr[3],4),
-      _mm_extract_epi8(beta_ptr[3],5),
-      _mm_extract_epi8(beta_ptr[3],6),
-      _mm_extract_epi8(beta_ptr[3],7),
-      _mm_extract_epi8(beta_ptr[3],8),
-      _mm_extract_epi8(beta_ptr[3],9),
-      _mm_extract_epi8(beta_ptr[3],10),
-      _mm_extract_epi8(beta_ptr[3],11),
-      _mm_extract_epi8(beta_ptr[3],12),
-      _mm_extract_epi8(beta_ptr[3],13),
-      _mm_extract_epi8(beta_ptr[3],14),
-      _mm_extract_epi8(beta_ptr[3],15));
+      simde_mm_extract_epi8(beta_ptr[3],0),
+      simde_mm_extract_epi8(beta_ptr[3],1),
+      simde_mm_extract_epi8(beta_ptr[3],2),
+      simde_mm_extract_epi8(beta_ptr[3],3),
+      simde_mm_extract_epi8(beta_ptr[3],4),
+      simde_mm_extract_epi8(beta_ptr[3],5),
+      simde_mm_extract_epi8(beta_ptr[3],6),
+      simde_mm_extract_epi8(beta_ptr[3],7),
+      simde_mm_extract_epi8(beta_ptr[3],8),
+      simde_mm_extract_epi8(beta_ptr[3],9),
+      simde_mm_extract_epi8(beta_ptr[3],10),
+      simde_mm_extract_epi8(beta_ptr[3],11),
+      simde_mm_extract_epi8(beta_ptr[3],12),
+      simde_mm_extract_epi8(beta_ptr[3],13),
+      simde_mm_extract_epi8(beta_ptr[3],14),
+      simde_mm_extract_epi8(beta_ptr[3],15));
       printf("beta4 %d:  %03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d\n",
       k,
-      _mm_extract_epi8(beta_ptr[4],0),
-      _mm_extract_epi8(beta_ptr[4],1),
-      _mm_extract_epi8(beta_ptr[4],2),
-      _mm_extract_epi8(beta_ptr[4],3),
-      _mm_extract_epi8(beta_ptr[4],4),
-      _mm_extract_epi8(beta_ptr[4],5),
-      _mm_extract_epi8(beta_ptr[4],6),
-      _mm_extract_epi8(beta_ptr[4],7),
-      _mm_extract_epi8(beta_ptr[4],8),
-      _mm_extract_epi8(beta_ptr[4],9),
-      _mm_extract_epi8(beta_ptr[4],10),
-      _mm_extract_epi8(beta_ptr[4],11),
-      _mm_extract_epi8(beta_ptr[4],12),
-      _mm_extract_epi8(beta_ptr[4],13),
-      _mm_extract_epi8(beta_ptr[4],14),
-      _mm_extract_epi8(beta_ptr[4],15));
+      simde_mm_extract_epi8(beta_ptr[4],0),
+      simde_mm_extract_epi8(beta_ptr[4],1),
+      simde_mm_extract_epi8(beta_ptr[4],2),
+      simde_mm_extract_epi8(beta_ptr[4],3),
+      simde_mm_extract_epi8(beta_ptr[4],4),
+      simde_mm_extract_epi8(beta_ptr[4],5),
+      simde_mm_extract_epi8(beta_ptr[4],6),
+      simde_mm_extract_epi8(beta_ptr[4],7),
+      simde_mm_extract_epi8(beta_ptr[4],8),
+      simde_mm_extract_epi8(beta_ptr[4],9),
+      simde_mm_extract_epi8(beta_ptr[4],10),
+      simde_mm_extract_epi8(beta_ptr[4],11),
+      simde_mm_extract_epi8(beta_ptr[4],12),
+      simde_mm_extract_epi8(beta_ptr[4],13),
+      simde_mm_extract_epi8(beta_ptr[4],14),
+      simde_mm_extract_epi8(beta_ptr[4],15));
       printf("beta5 %d:  %03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d\n",
       k,
-      _mm_extract_epi8(beta_ptr[5],0),
-      _mm_extract_epi8(beta_ptr[5],1),
-      _mm_extract_epi8(beta_ptr[5],2),
-      _mm_extract_epi8(beta_ptr[5],3),
-      _mm_extract_epi8(beta_ptr[5],4),
-      _mm_extract_epi8(beta_ptr[5],5),
-      _mm_extract_epi8(beta_ptr[5],6),
-      _mm_extract_epi8(beta_ptr[5],7),
-      _mm_extract_epi8(beta_ptr[5],8),
-      _mm_extract_epi8(beta_ptr[5],9),
-      _mm_extract_epi8(beta_ptr[5],10),
-      _mm_extract_epi8(beta_ptr[5],11),
-      _mm_extract_epi8(beta_ptr[5],12),
-      _mm_extract_epi8(beta_ptr[5],13),
-      _mm_extract_epi8(beta_ptr[5],14),
-      _mm_extract_epi8(beta_ptr[5],15));
+      simde_mm_extract_epi8(beta_ptr[5],0),
+      simde_mm_extract_epi8(beta_ptr[5],1),
+      simde_mm_extract_epi8(beta_ptr[5],2),
+      simde_mm_extract_epi8(beta_ptr[5],3),
+      simde_mm_extract_epi8(beta_ptr[5],4),
+      simde_mm_extract_epi8(beta_ptr[5],5),
+      simde_mm_extract_epi8(beta_ptr[5],6),
+      simde_mm_extract_epi8(beta_ptr[5],7),
+      simde_mm_extract_epi8(beta_ptr[5],8),
+      simde_mm_extract_epi8(beta_ptr[5],9),
+      simde_mm_extract_epi8(beta_ptr[5],10),
+      simde_mm_extract_epi8(beta_ptr[5],11),
+      simde_mm_extract_epi8(beta_ptr[5],12),
+      simde_mm_extract_epi8(beta_ptr[5],13),
+      simde_mm_extract_epi8(beta_ptr[5],14),
+      simde_mm_extract_epi8(beta_ptr[5],15));
       printf("beta6 %d:  %03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d\n",
       k,
-      _mm_extract_epi8(beta_ptr[6],0),
-      _mm_extract_epi8(beta_ptr[6],1),
-      _mm_extract_epi8(beta_ptr[6],2),
-      _mm_extract_epi8(beta_ptr[6],3),
-      _mm_extract_epi8(beta_ptr[6],4),
-      _mm_extract_epi8(beta_ptr[6],5),
-      _mm_extract_epi8(beta_ptr[6],6),
-      _mm_extract_epi8(beta_ptr[6],7),
-      _mm_extract_epi8(beta_ptr[6],8),
-      _mm_extract_epi8(beta_ptr[6],9),
-      _mm_extract_epi8(beta_ptr[6],10),
-      _mm_extract_epi8(beta_ptr[6],11),
-      _mm_extract_epi8(beta_ptr[6],12),
-      _mm_extract_epi8(beta_ptr[6],13),
-      _mm_extract_epi8(beta_ptr[6],14),
-      _mm_extract_epi8(beta_ptr[6],15));
+      simde_mm_extract_epi8(beta_ptr[6],0),
+      simde_mm_extract_epi8(beta_ptr[6],1),
+      simde_mm_extract_epi8(beta_ptr[6],2),
+      simde_mm_extract_epi8(beta_ptr[6],3),
+      simde_mm_extract_epi8(beta_ptr[6],4),
+      simde_mm_extract_epi8(beta_ptr[6],5),
+      simde_mm_extract_epi8(beta_ptr[6],6),
+      simde_mm_extract_epi8(beta_ptr[6],7),
+      simde_mm_extract_epi8(beta_ptr[6],8),
+      simde_mm_extract_epi8(beta_ptr[6],9),
+      simde_mm_extract_epi8(beta_ptr[6],10),
+      simde_mm_extract_epi8(beta_ptr[6],11),
+      simde_mm_extract_epi8(beta_ptr[6],12),
+      simde_mm_extract_epi8(beta_ptr[6],13),
+      simde_mm_extract_epi8(beta_ptr[6],14),
+      simde_mm_extract_epi8(beta_ptr[6],15));
       printf("beta7 %d:  %03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d,%03d\n",
       k,
-      _mm_extract_epi8(beta_ptr[7],0),
-      _mm_extract_epi8(beta_ptr[7],1),
-      _mm_extract_epi8(beta_ptr[7],2),
-      _mm_extract_epi8(beta_ptr[7],3),
-      _mm_extract_epi8(beta_ptr[7],4),
-      _mm_extract_epi8(beta_ptr[7],5),
-      _mm_extract_epi8(beta_ptr[7],6),
-      _mm_extract_epi8(beta_ptr[7],7),
-      _mm_extract_epi8(beta_ptr[7],8),
-      _mm_extract_epi8(beta_ptr[7],9),
-      _mm_extract_epi8(beta_ptr[7],10),
-      _mm_extract_epi8(beta_ptr[7],11),
-      _mm_extract_epi8(beta_ptr[7],12),
-      _mm_extract_epi8(beta_ptr[7],13),
-      _mm_extract_epi8(beta_ptr[7],14),
-      _mm_extract_epi8(beta_ptr[7],15));
+      simde_mm_extract_epi8(beta_ptr[7],0),
+      simde_mm_extract_epi8(beta_ptr[7],1),
+      simde_mm_extract_epi8(beta_ptr[7],2),
+      simde_mm_extract_epi8(beta_ptr[7],3),
+      simde_mm_extract_epi8(beta_ptr[7],4),
+      simde_mm_extract_epi8(beta_ptr[7],5),
+      simde_mm_extract_epi8(beta_ptr[7],6),
+      simde_mm_extract_epi8(beta_ptr[7],7),
+      simde_mm_extract_epi8(beta_ptr[7],8),
+      simde_mm_extract_epi8(beta_ptr[7],9),
+      simde_mm_extract_epi8(beta_ptr[7],10),
+      simde_mm_extract_epi8(beta_ptr[7],11),
+      simde_mm_extract_epi8(beta_ptr[7],12),
+      simde_mm_extract_epi8(beta_ptr[7],13),
+      simde_mm_extract_epi8(beta_ptr[7],14),
+      simde_mm_extract_epi8(beta_ptr[7],15));
       */
     }
 
@@ -1627,19 +1747,19 @@ void compute_beta(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned shor
       break;
   }
 
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void compute_ext(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,llr_t *ext, llr_t *systematic,unsigned short frame_length) {
-  __m128i *alpha128=(__m128i *)alpha;
-  __m128i *beta128=(__m128i *)beta;
-  __m128i *m11_128,*m10_128,*ext_128;
-  __m128i *alpha_ptr,*beta_ptr;
-  __m128i m00_1,m00_2,m00_3,m00_4;
-  __m128i m01_1,m01_2,m01_3,m01_4;
-  __m128i m10_1,m10_2,m10_3,m10_4;
-  __m128i m11_1,m11_2,m11_3,m11_4;
+  simde__m128i *alpha128 = (simde__m128i *)alpha;
+  simde__m128i *beta128 = (simde__m128i *)beta;
+  simde__m128i *m11_128, *m10_128, *ext_128;
+  simde__m128i *alpha_ptr, *beta_ptr;
+  simde__m128i m00_1, m00_2, m00_3, m00_4;
+  simde__m128i m01_1, m01_2, m01_3, m01_4;
+  simde__m128i m10_1, m10_2, m10_3, m10_4;
+  simde__m128i m11_1, m11_2, m11_3, m11_4;
   int k;
   //
   // LLR computation, 8 consequtive bits per loop
@@ -1652,9 +1772,9 @@ void compute_ext(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,llr_t *ext, ll
 #ifndef LLR8
 
   for (k=0; k<(frame_length>>3); k++) {
-    m11_128        = (__m128i *)&m_11[k<<3];
-    m10_128        = (__m128i *)&m_10[k<<3];
-    ext_128        = (__m128i *)&ext[k<<3];
+    m11_128 = (simde__m128i *)&m_11[k << 3];
+    m10_128 = (simde__m128i *)&m_10[k << 3];
+    ext_128 = (simde__m128i *)&ext[k << 3];
     /*
       printf("EXT %03d\n",k);
       print_shorts("a0:",&alpha_ptr[0]);
@@ -1674,22 +1794,22 @@ void compute_ext(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,llr_t *ext, ll
       print_shorts("b6:",&beta_ptr[6]);
       print_shorts("b7:",&beta_ptr[7]);
     */
-    m00_4 = _mm_adds_epi16(alpha_ptr[7],beta_ptr[3]); //ALPHA_BETA_4m00;
-    m11_4 = _mm_adds_epi16(alpha_ptr[7],beta_ptr[7]); //ALPHA_BETA_4m11;
-    m00_3 = _mm_adds_epi16(alpha_ptr[6],beta_ptr[7]); //ALPHA_BETA_3m00;
-    m11_3 = _mm_adds_epi16(alpha_ptr[6],beta_ptr[3]); //ALPHA_BETA_3m11;
-    m00_2 = _mm_adds_epi16(alpha_ptr[1],beta_ptr[4]); //ALPHA_BETA_2m00;
-    m11_2 = _mm_adds_epi16(alpha_ptr[1],beta_ptr[0]); //ALPHA_BETA_2m11;
-    m11_1 = _mm_adds_epi16(alpha_ptr[0],beta_ptr[4]); //ALPHA_BETA_1m11;
-    m00_1 = _mm_adds_epi16(alpha_ptr[0],beta_ptr[0]); //ALPHA_BETA_1m00;
-    m01_4 = _mm_adds_epi16(alpha_ptr[5],beta_ptr[6]); //ALPHA_BETA_4m01;
-    m10_4 = _mm_adds_epi16(alpha_ptr[5],beta_ptr[2]); //ALPHA_BETA_4m10;
-    m01_3 = _mm_adds_epi16(alpha_ptr[4],beta_ptr[2]); //ALPHA_BETA_3m01;
-    m10_3 = _mm_adds_epi16(alpha_ptr[4],beta_ptr[6]); //ALPHA_BETA_3m10;
-    m01_2 = _mm_adds_epi16(alpha_ptr[3],beta_ptr[1]); //ALPHA_BETA_2m01;
-    m10_2 = _mm_adds_epi16(alpha_ptr[3],beta_ptr[5]); //ALPHA_BETA_2m10;
-    m10_1 = _mm_adds_epi16(alpha_ptr[2],beta_ptr[1]); //ALPHA_BETA_1m10;
-    m01_1 = _mm_adds_epi16(alpha_ptr[2],beta_ptr[5]); //ALPHA_BETA_1m01;
+    m00_4 = simde_mm_adds_epi16(alpha_ptr[7], beta_ptr[3]); // ALPHA_BETA_4m00;
+    m11_4 = simde_mm_adds_epi16(alpha_ptr[7], beta_ptr[7]); // ALPHA_BETA_4m11;
+    m00_3 = simde_mm_adds_epi16(alpha_ptr[6], beta_ptr[7]); // ALPHA_BETA_3m00;
+    m11_3 = simde_mm_adds_epi16(alpha_ptr[6], beta_ptr[3]); // ALPHA_BETA_3m11;
+    m00_2 = simde_mm_adds_epi16(alpha_ptr[1], beta_ptr[4]); // ALPHA_BETA_2m00;
+    m11_2 = simde_mm_adds_epi16(alpha_ptr[1], beta_ptr[0]); // ALPHA_BETA_2m11;
+    m11_1 = simde_mm_adds_epi16(alpha_ptr[0], beta_ptr[4]); // ALPHA_BETA_1m11;
+    m00_1 = simde_mm_adds_epi16(alpha_ptr[0], beta_ptr[0]); // ALPHA_BETA_1m00;
+    m01_4 = simde_mm_adds_epi16(alpha_ptr[5], beta_ptr[6]); // ALPHA_BETA_4m01;
+    m10_4 = simde_mm_adds_epi16(alpha_ptr[5], beta_ptr[2]); // ALPHA_BETA_4m10;
+    m01_3 = simde_mm_adds_epi16(alpha_ptr[4], beta_ptr[2]); // ALPHA_BETA_3m01;
+    m10_3 = simde_mm_adds_epi16(alpha_ptr[4], beta_ptr[6]); // ALPHA_BETA_3m10;
+    m01_2 = simde_mm_adds_epi16(alpha_ptr[3], beta_ptr[1]); // ALPHA_BETA_2m01;
+    m10_2 = simde_mm_adds_epi16(alpha_ptr[3], beta_ptr[5]); // ALPHA_BETA_2m10;
+    m10_1 = simde_mm_adds_epi16(alpha_ptr[2], beta_ptr[1]); // ALPHA_BETA_1m10;
+    m01_1 = simde_mm_adds_epi16(alpha_ptr[2], beta_ptr[5]); // ALPHA_BETA_1m01;
     /*
       print_shorts("m11_1:",&m11_1);
       print_shorts("m11_2:",&m11_2);
@@ -1708,30 +1828,30 @@ void compute_ext(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,llr_t *ext, ll
       print_shorts("m01_3:",&m01_3);
       print_shorts("m01_4:",&m01_4);
     */
-    m01_1 = _mm_max_epi16(m01_1,m01_2);
-    m01_1 = _mm_max_epi16(m01_1,m01_3);
-    m01_1 = _mm_max_epi16(m01_1,m01_4);
-    m00_1 = _mm_max_epi16(m00_1,m00_2);
-    m00_1 = _mm_max_epi16(m00_1,m00_3);
-    m00_1 = _mm_max_epi16(m00_1,m00_4);
-    m10_1 = _mm_max_epi16(m10_1,m10_2);
-    m10_1 = _mm_max_epi16(m10_1,m10_3);
-    m10_1 = _mm_max_epi16(m10_1,m10_4);
-    m11_1 = _mm_max_epi16(m11_1,m11_2);
-    m11_1 = _mm_max_epi16(m11_1,m11_3);
-    m11_1 = _mm_max_epi16(m11_1,m11_4);
+    m01_1 = simde_mm_max_epi16(m01_1, m01_2);
+    m01_1 = simde_mm_max_epi16(m01_1, m01_3);
+    m01_1 = simde_mm_max_epi16(m01_1, m01_4);
+    m00_1 = simde_mm_max_epi16(m00_1, m00_2);
+    m00_1 = simde_mm_max_epi16(m00_1, m00_3);
+    m00_1 = simde_mm_max_epi16(m00_1, m00_4);
+    m10_1 = simde_mm_max_epi16(m10_1, m10_2);
+    m10_1 = simde_mm_max_epi16(m10_1, m10_3);
+    m10_1 = simde_mm_max_epi16(m10_1, m10_4);
+    m11_1 = simde_mm_max_epi16(m11_1, m11_2);
+    m11_1 = simde_mm_max_epi16(m11_1, m11_3);
+    m11_1 = simde_mm_max_epi16(m11_1, m11_4);
     //      print_shorts("m11_1:",&m11_1);
-    m01_1 = _mm_subs_epi16(m01_1,*m10_128);
-    m00_1 = _mm_subs_epi16(m00_1,*m11_128);
-    m10_1 = _mm_adds_epi16(m10_1,*m10_128);
-    m11_1 = _mm_adds_epi16(m11_1,*m11_128);
+    m01_1 = simde_mm_subs_epi16(m01_1, *m10_128);
+    m00_1 = simde_mm_subs_epi16(m00_1, *m11_128);
+    m10_1 = simde_mm_adds_epi16(m10_1, *m10_128);
+    m11_1 = simde_mm_adds_epi16(m11_1, *m11_128);
     //      print_shorts("m10_1:",&m10_1);
     //      print_shorts("m11_1:",&m11_1);
-    m01_1 = _mm_max_epi16(m01_1,m00_1);
-    m10_1 = _mm_max_epi16(m10_1,m11_1);
+    m01_1 = simde_mm_max_epi16(m01_1, m00_1);
+    m10_1 = simde_mm_max_epi16(m10_1, m11_1);
     //      print_shorts("m01_1:",&m01_1);
     //      print_shorts("m10_1:",&m10_1);
-    *ext_128 = _mm_subs_epi16(m10_1,m01_1);
+    *ext_128 = simde_mm_subs_epi16(m10_1, m01_1);
     /*
       print_shorts("ext:",ext_128);
       print_shorts("m11:",m11_128);
@@ -1747,51 +1867,51 @@ void compute_ext(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,llr_t *ext, ll
 #else
 
   for (k=0; k<(frame_length>>4); k++) {
-    m11_128        = (__m128i *)&m_11[k<<4];
-    m10_128        = (__m128i *)&m_10[k<<4];
-    ext_128        = (__m128i *)&ext[k<<4];
-    m00_4 = _mm_adds_epi8(alpha_ptr[7],beta_ptr[3]); //ALPHA_BETA_4m00;
-    m11_4 = _mm_adds_epi8(alpha_ptr[7],beta_ptr[7]); //ALPHA_BETA_4m11;
-    m00_3 = _mm_adds_epi8(alpha_ptr[6],beta_ptr[7]); //ALPHA_BETA_3m00;
-    m11_3 = _mm_adds_epi8(alpha_ptr[6],beta_ptr[3]); //ALPHA_BETA_3m11;
-    m00_2 = _mm_adds_epi8(alpha_ptr[1],beta_ptr[4]); //ALPHA_BETA_2m00;
-    m11_2 = _mm_adds_epi8(alpha_ptr[1],beta_ptr[0]); //ALPHA_BETA_2m11;
-    m11_1 = _mm_adds_epi8(alpha_ptr[0],beta_ptr[4]); //ALPHA_BETA_1m11;
-    m00_1 = _mm_adds_epi8(alpha_ptr[0],beta_ptr[0]); //ALPHA_BETA_1m00;
-    m01_4 = _mm_adds_epi8(alpha_ptr[5],beta_ptr[6]); //ALPHA_BETA_4m01;
-    m10_4 = _mm_adds_epi8(alpha_ptr[5],beta_ptr[2]); //ALPHA_BETA_4m10;
-    m01_3 = _mm_adds_epi8(alpha_ptr[4],beta_ptr[2]); //ALPHA_BETA_3m01;
-    m10_3 = _mm_adds_epi8(alpha_ptr[4],beta_ptr[6]); //ALPHA_BETA_3m10;
-    m01_2 = _mm_adds_epi8(alpha_ptr[3],beta_ptr[1]); //ALPHA_BETA_2m01;
-    m10_2 = _mm_adds_epi8(alpha_ptr[3],beta_ptr[5]); //ALPHA_BETA_2m10;
-    m10_1 = _mm_adds_epi8(alpha_ptr[2],beta_ptr[1]); //ALPHA_BETA_1m10;
-    m01_1 = _mm_adds_epi8(alpha_ptr[2],beta_ptr[5]); //ALPHA_BETA_1m01;
-    m01_1 = _mm_max_epi8(m01_1,m01_2);
-    m01_1 = _mm_max_epi8(m01_1,m01_3);
-    m01_1 = _mm_max_epi8(m01_1,m01_4);
-    m00_1 = _mm_max_epi8(m00_1,m00_2);
-    m00_1 = _mm_max_epi8(m00_1,m00_3);
-    m00_1 = _mm_max_epi8(m00_1,m00_4);
-    m10_1 = _mm_max_epi8(m10_1,m10_2);
-    m10_1 = _mm_max_epi8(m10_1,m10_3);
-    m10_1 = _mm_max_epi8(m10_1,m10_4);
-    m11_1 = _mm_max_epi8(m11_1,m11_2);
-    m11_1 = _mm_max_epi8(m11_1,m11_3);
-    m11_1 = _mm_max_epi8(m11_1,m11_4);
-    m01_1 = _mm_subs_epi8(m01_1,*m10_128);
-    m00_1 = _mm_subs_epi8(m00_1,*m11_128);
-    m10_1 = _mm_adds_epi8(m10_1,*m10_128);
-    m11_1 = _mm_adds_epi8(m11_1,*m11_128);
-    m01_1 = _mm_max_epi8(m01_1,m00_1);
-    m10_1 = _mm_max_epi8(m10_1,m11_1);
-    *ext_128 = _mm_subs_epi8(m10_1,m01_1);
+    m11_128 = (simde__m128i *)&m_11[k << 4];
+    m10_128 = (simde__m128i *)&m_10[k << 4];
+    ext_128 = (simde__m128i *)&ext[k << 4];
+    m00_4 = simde_mm_adds_epi8(alpha_ptr[7], beta_ptr[3]); // ALPHA_BETA_4m00;
+    m11_4 = simde_mm_adds_epi8(alpha_ptr[7], beta_ptr[7]); // ALPHA_BETA_4m11;
+    m00_3 = simde_mm_adds_epi8(alpha_ptr[6], beta_ptr[7]); // ALPHA_BETA_3m00;
+    m11_3 = simde_mm_adds_epi8(alpha_ptr[6], beta_ptr[3]); // ALPHA_BETA_3m11;
+    m00_2 = simde_mm_adds_epi8(alpha_ptr[1], beta_ptr[4]); // ALPHA_BETA_2m00;
+    m11_2 = simde_mm_adds_epi8(alpha_ptr[1], beta_ptr[0]); // ALPHA_BETA_2m11;
+    m11_1 = simde_mm_adds_epi8(alpha_ptr[0], beta_ptr[4]); // ALPHA_BETA_1m11;
+    m00_1 = simde_mm_adds_epi8(alpha_ptr[0], beta_ptr[0]); // ALPHA_BETA_1m00;
+    m01_4 = simde_mm_adds_epi8(alpha_ptr[5], beta_ptr[6]); // ALPHA_BETA_4m01;
+    m10_4 = simde_mm_adds_epi8(alpha_ptr[5], beta_ptr[2]); // ALPHA_BETA_4m10;
+    m01_3 = simde_mm_adds_epi8(alpha_ptr[4], beta_ptr[2]); // ALPHA_BETA_3m01;
+    m10_3 = simde_mm_adds_epi8(alpha_ptr[4], beta_ptr[6]); // ALPHA_BETA_3m10;
+    m01_2 = simde_mm_adds_epi8(alpha_ptr[3], beta_ptr[1]); // ALPHA_BETA_2m01;
+    m10_2 = simde_mm_adds_epi8(alpha_ptr[3], beta_ptr[5]); // ALPHA_BETA_2m10;
+    m10_1 = simde_mm_adds_epi8(alpha_ptr[2], beta_ptr[1]); // ALPHA_BETA_1m10;
+    m01_1 = simde_mm_adds_epi8(alpha_ptr[2], beta_ptr[5]); // ALPHA_BETA_1m01;
+    m01_1 = simde_mm_max_epi8(m01_1, m01_2);
+    m01_1 = simde_mm_max_epi8(m01_1, m01_3);
+    m01_1 = simde_mm_max_epi8(m01_1, m01_4);
+    m00_1 = simde_mm_max_epi8(m00_1, m00_2);
+    m00_1 = simde_mm_max_epi8(m00_1, m00_3);
+    m00_1 = simde_mm_max_epi8(m00_1, m00_4);
+    m10_1 = simde_mm_max_epi8(m10_1, m10_2);
+    m10_1 = simde_mm_max_epi8(m10_1, m10_3);
+    m10_1 = simde_mm_max_epi8(m10_1, m10_4);
+    m11_1 = simde_mm_max_epi8(m11_1, m11_2);
+    m11_1 = simde_mm_max_epi8(m11_1, m11_3);
+    m11_1 = simde_mm_max_epi8(m11_1, m11_4);
+    m01_1 = simde_mm_subs_epi8(m01_1, *m10_128);
+    m00_1 = simde_mm_subs_epi8(m00_1, *m11_128);
+    m10_1 = simde_mm_adds_epi8(m10_1, *m10_128);
+    m11_1 = simde_mm_adds_epi8(m11_1, *m11_128);
+    m01_1 = simde_mm_max_epi8(m01_1, m00_1);
+    m10_1 = simde_mm_max_epi8(m10_1, m11_1);
+    *ext_128 = simde_mm_subs_epi8(m10_1, m01_1);
     alpha_ptr+=8;
     beta_ptr+=8;
   }
 
 #endif
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 
@@ -1895,17 +2015,17 @@ unsigned char phy_threegpplte_turbo_decoder(short *y,
   llr_t m10[n+16] __attribute__ ((aligned(16)));
   int *pi2_p,*pi4_p,*pi5_p,*pi6_p;
   llr_t *s,*s1,*s2,*yp1,*yp2,*yp;
-  __m128i *yp128;
+  simde__m128i *yp128;
   unsigned int i,j,iind;//,pi;
   unsigned char iteration_cnt=0;
   uint32_t crc, oldcrc, crc_len;
   uint8_t temp;
-  __m128i tmp128[(n+8)>>3];
-  __m128i tmp, zeros=_mm_setzero_si128();
+  simde__m128i tmp128[(n + 8) >> 3];
+  simde__m128i tmp, zeros = simde_mm_setzero_si128();
 #ifdef LLR8
-  __m128i MAX128=_mm_set1_epi16(MAX/2);
+  simde__m128i MAX128 = simde_mm_set1_epi16(MAX / 2);
 #endif
-  register __m128i tmpe;
+  register simde__m128i tmpe;
   int offset8_flag=0;
 
   if (crc_type > 3) {
@@ -1954,13 +2074,14 @@ unsigned char phy_threegpplte_turbo_decoder(short *y,
 #ifdef LLR8
 
   for (i=0,j=0; i<(3*(n2>>4))+1; i++,j+=2) {
-    ((__m128i *)y8)[i] = _mm_packs_epi16(_mm_srai_epi16(((__m128i *)y)[j],1),_mm_srai_epi16(((__m128i *)y)[j+1],1));
-    //((__m128i *)y8)[i] = _mm_packs_epi16(((__m128i *)y)[j],((__m128i *)y)[j+1]);
+    ((simde__m128i *)y8)[i] =
+        simde_mm_packs_epi16(simde_mm_srai_epi16(((simde__m128i *)y)[j], 1), simde_mm_srai_epi16(((simde__m128i *)y)[j + 1], 1));
+    //((simde__m128i *)y8)[i] = simde_mm_packs_epi16(((simde__m128i *)y)[j],((simde__m128i *)y)[j+1]);
   }
 
-  yp128 = (__m128i *)y8;
+  yp128 = (simde__m128i *)y8;
 #else
-  yp128 = (__m128i *)y;
+  yp128 = (simde__m128i *)y;
 #endif
   s = systematic0;
   s1 = systematic1;
@@ -1972,47 +2093,47 @@ unsigned char phy_threegpplte_turbo_decoder(short *y,
   for (i=0; i<n2; i+=8) {
     pi2_p = &pi2tab[iind][i];
     j=pi2_p[0];
-    tmpe = _mm_load_si128(yp128);
-    s[j]   = _mm_extract_epi16(tmpe,0);
-    yp1[j] = _mm_extract_epi16(tmpe,1);
-    yp2[j] = _mm_extract_epi16(tmpe,2);
+    tmpe = simde_mm_load_si128(yp128);
+    s[j] = simde_mm_extract_epi16(tmpe, 0);
+    yp1[j] = simde_mm_extract_epi16(tmpe, 1);
+    yp2[j] = simde_mm_extract_epi16(tmpe, 2);
     //    printf("init: j %d, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
     j=pi2_p[1];
-    s[j]   = _mm_extract_epi16(tmpe,3);
-    yp1[j] = _mm_extract_epi16(tmpe,4);
-    yp2[j] = _mm_extract_epi16(tmpe,5);
+    s[j] = simde_mm_extract_epi16(tmpe, 3);
+    yp1[j] = simde_mm_extract_epi16(tmpe, 4);
+    yp2[j] = simde_mm_extract_epi16(tmpe, 5);
     //    printf("init: j %d, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
     j=pi2_p[2];
-    s[j]   = _mm_extract_epi16(tmpe,6);
-    yp1[j] = _mm_extract_epi16(tmpe,7);
-    tmpe = _mm_load_si128(&yp128[1]);
-    yp2[j] = _mm_extract_epi16(tmpe,0);
+    s[j] = simde_mm_extract_epi16(tmpe, 6);
+    yp1[j] = simde_mm_extract_epi16(tmpe, 7);
+    tmpe = simde_mm_load_si128(&yp128[1]);
+    yp2[j] = simde_mm_extract_epi16(tmpe, 0);
     //    printf("init: j %d, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
     j=pi2_p[3];
-    s[j]   = _mm_extract_epi16(tmpe,1);
-    yp1[j] = _mm_extract_epi16(tmpe,2);
-    yp2[j] = _mm_extract_epi16(tmpe,3);
+    s[j] = simde_mm_extract_epi16(tmpe, 1);
+    yp1[j] = simde_mm_extract_epi16(tmpe, 2);
+    yp2[j] = simde_mm_extract_epi16(tmpe, 3);
     //    printf("init: j %d, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
     j=pi2_p[4];
-    s[j]   = _mm_extract_epi16(tmpe,4);
-    yp1[j] = _mm_extract_epi16(tmpe,5);
-    yp2[j] = _mm_extract_epi16(tmpe,6);
+    s[j] = simde_mm_extract_epi16(tmpe, 4);
+    yp1[j] = simde_mm_extract_epi16(tmpe, 5);
+    yp2[j] = simde_mm_extract_epi16(tmpe, 6);
     //    printf("init: j %d, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
     j=pi2_p[5];
-    s[j]   = _mm_extract_epi16(tmpe,7);
-    tmpe = _mm_load_si128(&yp128[2]);
-    yp1[j] = _mm_extract_epi16(tmpe,0);
-    yp2[j] = _mm_extract_epi16(tmpe,1);
+    s[j] = simde_mm_extract_epi16(tmpe, 7);
+    tmpe = simde_mm_load_si128(&yp128[2]);
+    yp1[j] = simde_mm_extract_epi16(tmpe, 0);
+    yp2[j] = simde_mm_extract_epi16(tmpe, 1);
     //    printf("init: j %d, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
     j=pi2_p[6];
-    s[j]   = _mm_extract_epi16(tmpe,2);
-    yp1[j] = _mm_extract_epi16(tmpe,3);
-    yp2[j] = _mm_extract_epi16(tmpe,4);
+    s[j] = simde_mm_extract_epi16(tmpe, 2);
+    yp1[j] = simde_mm_extract_epi16(tmpe, 3);
+    yp2[j] = simde_mm_extract_epi16(tmpe, 4);
     //    printf("init: j %d, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
     j=pi2_p[7];
-    s[j]   = _mm_extract_epi16(tmpe,5);
-    yp1[j] = _mm_extract_epi16(tmpe,6);
-    yp2[j] = _mm_extract_epi16(tmpe,7);
+    s[j] = simde_mm_extract_epi16(tmpe, 5);
+    yp1[j] = simde_mm_extract_epi16(tmpe, 6);
+    yp2[j] = simde_mm_extract_epi16(tmpe, 7);
     //    printf("init: j %d, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
     yp128+=3;
   }
@@ -2022,84 +2143,84 @@ unsigned char phy_threegpplte_turbo_decoder(short *y,
   for (i=0; i<n2; i+=16) {
     pi2_p = &pi2tab[iind][i];
     j=pi2_p[0];
-    s[j]   = _mm_extract_epi8(yp128[0],0);
-    yp1[j] = _mm_extract_epi8(yp128[0],1);
-    yp2[j] = _mm_extract_epi8(yp128[0],2);
+    s[j] = simde_mm_extract_epi8(yp128[0], 0);
+    yp1[j] = simde_mm_extract_epi8(yp128[0], 1);
+    yp2[j] = simde_mm_extract_epi8(yp128[0], 2);
     //    printf("init: j %d, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
     j=pi2_p[1];
-    s[j]   = _mm_extract_epi8(yp128[0],3);
-    yp1[j] = _mm_extract_epi8(yp128[0],4);
-    yp2[j] = _mm_extract_epi8(yp128[0],5);
+    s[j] = simde_mm_extract_epi8(yp128[0], 3);
+    yp1[j] = simde_mm_extract_epi8(yp128[0], 4);
+    yp2[j] = simde_mm_extract_epi8(yp128[0], 5);
     //    printf("init: j %d, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
     j=pi2_p[2];
-    s[j]   = _mm_extract_epi8(yp128[0],6);
-    yp1[j] = _mm_extract_epi8(yp128[0],7);
-    yp2[j] = _mm_extract_epi8(yp128[0],8);
+    s[j] = simde_mm_extract_epi8(yp128[0], 6);
+    yp1[j] = simde_mm_extract_epi8(yp128[0], 7);
+    yp2[j] = simde_mm_extract_epi8(yp128[0], 8);
     //    printf("init: j %d, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
     j=pi2_p[3];
-    s[j]   = _mm_extract_epi8(yp128[0],9);
-    yp1[j] = _mm_extract_epi8(yp128[0],10);
-    yp2[j] = _mm_extract_epi8(yp128[0],11);
+    s[j] = simde_mm_extract_epi8(yp128[0], 9);
+    yp1[j] = simde_mm_extract_epi8(yp128[0], 10);
+    yp2[j] = simde_mm_extract_epi8(yp128[0], 11);
     //    printf("init: j %d, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
     j=pi2_p[4];
-    s[j]   = _mm_extract_epi8(yp128[0],12);
-    yp1[j] = _mm_extract_epi8(yp128[0],13);
-    yp2[j] = _mm_extract_epi8(yp128[0],14);
+    s[j] = simde_mm_extract_epi8(yp128[0], 12);
+    yp1[j] = simde_mm_extract_epi8(yp128[0], 13);
+    yp2[j] = simde_mm_extract_epi8(yp128[0], 14);
     //    printf("init: j %d, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
     j=pi2_p[5];
-    s[j]   = _mm_extract_epi8(yp128[0],15);
-    yp1[j] = _mm_extract_epi8(yp128[1],0);
-    yp2[j] = _mm_extract_epi8(yp128[1],1);
+    s[j] = simde_mm_extract_epi8(yp128[0], 15);
+    yp1[j] = simde_mm_extract_epi8(yp128[1], 0);
+    yp2[j] = simde_mm_extract_epi8(yp128[1], 1);
     //    printf("init: j %d, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
     j=pi2_p[6];
-    s[j]   = _mm_extract_epi8(yp128[1],2);
-    yp1[j] = _mm_extract_epi8(yp128[1],3);
-    yp2[j] = _mm_extract_epi8(yp128[1],4);
+    s[j] = simde_mm_extract_epi8(yp128[1], 2);
+    yp1[j] = simde_mm_extract_epi8(yp128[1], 3);
+    yp2[j] = simde_mm_extract_epi8(yp128[1], 4);
     //    printf("init: j %d, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
     j=pi2_p[7];
-    s[j]   = _mm_extract_epi8(yp128[1],5);
-    yp1[j] = _mm_extract_epi8(yp128[1],6);
-    yp2[j] = _mm_extract_epi8(yp128[1],7);
+    s[j] = simde_mm_extract_epi8(yp128[1], 5);
+    yp1[j] = simde_mm_extract_epi8(yp128[1], 6);
+    yp2[j] = simde_mm_extract_epi8(yp128[1], 7);
     //    printf("init: j %d, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
     j=pi2_p[8];
-    s[j]   = _mm_extract_epi8(yp128[1],8);
-    yp1[j] = _mm_extract_epi8(yp128[1],9);
-    yp2[j] = _mm_extract_epi8(yp128[1],10);
+    s[j] = simde_mm_extract_epi8(yp128[1], 8);
+    yp1[j] = simde_mm_extract_epi8(yp128[1], 9);
+    yp2[j] = simde_mm_extract_epi8(yp128[1], 10);
     //    printf("init: j %d, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
     j=pi2_p[9];
-    s[j]   = _mm_extract_epi8(yp128[1],11);
-    yp1[j] = _mm_extract_epi8(yp128[1],12);
-    yp2[j] = _mm_extract_epi8(yp128[1],13);
+    s[j] = simde_mm_extract_epi8(yp128[1], 11);
+    yp1[j] = simde_mm_extract_epi8(yp128[1], 12);
+    yp2[j] = simde_mm_extract_epi8(yp128[1], 13);
     //    printf("init: j %d, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
     j=pi2_p[10];
-    s[j]   = _mm_extract_epi8(yp128[1],14);
-    yp1[j] = _mm_extract_epi8(yp128[1],15);
-    yp2[j] = _mm_extract_epi8(yp128[2],0);
+    s[j] = simde_mm_extract_epi8(yp128[1], 14);
+    yp1[j] = simde_mm_extract_epi8(yp128[1], 15);
+    yp2[j] = simde_mm_extract_epi8(yp128[2], 0);
     //    printf("init: j %d, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
     j=pi2_p[11];
-    s[j]   = _mm_extract_epi8(yp128[2],1);
-    yp1[j] = _mm_extract_epi8(yp128[2],2);
-    yp2[j] = _mm_extract_epi8(yp128[2],3);
+    s[j] = simde_mm_extract_epi8(yp128[2], 1);
+    yp1[j] = simde_mm_extract_epi8(yp128[2], 2);
+    yp2[j] = simde_mm_extract_epi8(yp128[2], 3);
     //    printf("init: j %d, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
     j=pi2_p[12];
-    s[j]   = _mm_extract_epi8(yp128[2],4);
-    yp1[j] = _mm_extract_epi8(yp128[2],5);
-    yp2[j] = _mm_extract_epi8(yp128[2],6);
+    s[j] = simde_mm_extract_epi8(yp128[2], 4);
+    yp1[j] = simde_mm_extract_epi8(yp128[2], 5);
+    yp2[j] = simde_mm_extract_epi8(yp128[2], 6);
     //    printf("init: j %d, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
     j=pi2_p[13];
-    s[j]   = _mm_extract_epi8(yp128[2],7);
-    yp1[j] = _mm_extract_epi8(yp128[2],8);
-    yp2[j] = _mm_extract_epi8(yp128[2],9);
+    s[j] = simde_mm_extract_epi8(yp128[2], 7);
+    yp1[j] = simde_mm_extract_epi8(yp128[2], 8);
+    yp2[j] = simde_mm_extract_epi8(yp128[2], 9);
     //    printf("init: j %d, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
     j=pi2_p[14];
-    s[j]   = _mm_extract_epi8(yp128[2],10);
-    yp1[j] = _mm_extract_epi8(yp128[2],11);
-    yp2[j] = _mm_extract_epi8(yp128[2],12);
+    s[j] = simde_mm_extract_epi8(yp128[2], 10);
+    yp1[j] = simde_mm_extract_epi8(yp128[2], 11);
+    yp2[j] = simde_mm_extract_epi8(yp128[2], 12);
     //    printf("init: j %d, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
     j=pi2_p[15];
-    s[j]   = _mm_extract_epi8(yp128[2],13);
-    yp1[j] = _mm_extract_epi8(yp128[2],14);
-    yp2[j] = _mm_extract_epi8(yp128[2],15);
+    s[j] = simde_mm_extract_epi8(yp128[2], 13);
+    yp1[j] = simde_mm_extract_epi8(yp128[2], 14);
+    yp2[j] = simde_mm_extract_epi8(yp128[2], 15);
     //    printf("init: j %d, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
     yp128+=3;
   }
@@ -2180,36 +2301,36 @@ unsigned char phy_threegpplte_turbo_decoder(short *y,
     pi4_p=pi4tab[iind];
 
     for (i=0; i<(n2>>3); i++) { // steady-state portion
-      ((__m128i *)systematic2)[i]=_mm_insert_epi16(((__m128i *)systematic2)[i],((llr_t *)ext)[*pi4_p++],0);
-      ((__m128i *)systematic2)[i]=_mm_insert_epi16(((__m128i *)systematic2)[i],((llr_t *)ext)[*pi4_p++],1);
-      ((__m128i *)systematic2)[i]=_mm_insert_epi16(((__m128i *)systematic2)[i],((llr_t *)ext)[*pi4_p++],2);
-      ((__m128i *)systematic2)[i]=_mm_insert_epi16(((__m128i *)systematic2)[i],((llr_t *)ext)[*pi4_p++],3);
-      ((__m128i *)systematic2)[i]=_mm_insert_epi16(((__m128i *)systematic2)[i],((llr_t *)ext)[*pi4_p++],4);
-      ((__m128i *)systematic2)[i]=_mm_insert_epi16(((__m128i *)systematic2)[i],((llr_t *)ext)[*pi4_p++],5);
-      ((__m128i *)systematic2)[i]=_mm_insert_epi16(((__m128i *)systematic2)[i],((llr_t *)ext)[*pi4_p++],6);
-      ((__m128i *)systematic2)[i]=_mm_insert_epi16(((__m128i *)systematic2)[i],((llr_t *)ext)[*pi4_p++],7);
+      ((simde__m128i *)systematic2)[i] = simde_mm_insert_epi16(((simde__m128i *)systematic2)[i], ((llr_t *)ext)[*pi4_p++], 0);
+      ((simde__m128i *)systematic2)[i] = simde_mm_insert_epi16(((simde__m128i *)systematic2)[i], ((llr_t *)ext)[*pi4_p++], 1);
+      ((simde__m128i *)systematic2)[i] = simde_mm_insert_epi16(((simde__m128i *)systematic2)[i], ((llr_t *)ext)[*pi4_p++], 2);
+      ((simde__m128i *)systematic2)[i] = simde_mm_insert_epi16(((simde__m128i *)systematic2)[i], ((llr_t *)ext)[*pi4_p++], 3);
+      ((simde__m128i *)systematic2)[i] = simde_mm_insert_epi16(((simde__m128i *)systematic2)[i], ((llr_t *)ext)[*pi4_p++], 4);
+      ((simde__m128i *)systematic2)[i] = simde_mm_insert_epi16(((simde__m128i *)systematic2)[i], ((llr_t *)ext)[*pi4_p++], 5);
+      ((simde__m128i *)systematic2)[i] = simde_mm_insert_epi16(((simde__m128i *)systematic2)[i], ((llr_t *)ext)[*pi4_p++], 6);
+      ((simde__m128i *)systematic2)[i] = simde_mm_insert_epi16(((simde__m128i *)systematic2)[i], ((llr_t *)ext)[*pi4_p++], 7);
     }
 
 #else
     pi4_p=pi4tab[iind];
 
     for (i=0; i<(n2>>4); i++) { // steady-state portion
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],0);
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],1);
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],2);
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],3);
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],4);
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],5);
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],6);
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],7);
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],8);
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],9);
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],10);
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],11);
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],12);
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],13);
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],14);
-      ((__m128i *)systematic2)[i]=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],15);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 0);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 1);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 2);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 3);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 4);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 5);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 6);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 7);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 8);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 9);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 10);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 11);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 12);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 13);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 14);
+      ((simde__m128i *)systematic2)[i] = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 15);
     }
 
 #endif
@@ -2220,15 +2341,16 @@ unsigned char phy_threegpplte_turbo_decoder(short *y,
     pi5_p=pi5tab[iind];
 
     for (i=0; i<(n>>3); i++) {
-      tmp=_mm_insert_epi16(tmp,ext2[*pi5_p++],0);
-      tmp=_mm_insert_epi16(tmp,ext2[*pi5_p++],1);
-      tmp=_mm_insert_epi16(tmp,ext2[*pi5_p++],2);
-      tmp=_mm_insert_epi16(tmp,ext2[*pi5_p++],3);
-      tmp=_mm_insert_epi16(tmp,ext2[*pi5_p++],4);
-      tmp=_mm_insert_epi16(tmp,ext2[*pi5_p++],5);
-      tmp=_mm_insert_epi16(tmp,ext2[*pi5_p++],6);
-      tmp=_mm_insert_epi16(tmp,ext2[*pi5_p++],7);
-      ((__m128i *)systematic1)[i] = _mm_adds_epi16(_mm_subs_epi16(tmp,((__m128i *)ext)[i]),((__m128i *)systematic0)[i]);
+      tmp = simde_mm_insert_epi16(tmp, ext2[*pi5_p++], 0);
+      tmp = simde_mm_insert_epi16(tmp, ext2[*pi5_p++], 1);
+      tmp = simde_mm_insert_epi16(tmp, ext2[*pi5_p++], 2);
+      tmp = simde_mm_insert_epi16(tmp, ext2[*pi5_p++], 3);
+      tmp = simde_mm_insert_epi16(tmp, ext2[*pi5_p++], 4);
+      tmp = simde_mm_insert_epi16(tmp, ext2[*pi5_p++], 5);
+      tmp = simde_mm_insert_epi16(tmp, ext2[*pi5_p++], 6);
+      tmp = simde_mm_insert_epi16(tmp, ext2[*pi5_p++], 7);
+      ((simde__m128i *)systematic1)[i] =
+          simde_mm_adds_epi16(simde_mm_subs_epi16(tmp, ((simde__m128i *)ext)[i]), ((simde__m128i *)systematic0)[i]);
     }
 
     if (iteration_cnt>1) {
@@ -2236,16 +2358,16 @@ unsigned char phy_threegpplte_turbo_decoder(short *y,
       pi6_p=pi6tab[iind];
 
       for (i=0; i<(n2>>3); i++) {
-        tmp=_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++],7);
-        tmp=_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++],6);
-        tmp=_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++],5);
-        tmp=_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++],4);
-        tmp=_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++],3);
-        tmp=_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++],2);
-        tmp=_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++],1);
-        tmp=_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++],0);
-        tmp=_mm_cmpgt_epi8(_mm_packs_epi16(tmp,zeros),zeros);
-        decoded_bytes[i]=(unsigned char)_mm_movemask_epi8(tmp);
+        tmp = simde_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++], 7);
+        tmp = simde_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++], 6);
+        tmp = simde_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++], 5);
+        tmp = simde_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++], 4);
+        tmp = simde_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++], 3);
+        tmp = simde_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++], 2);
+        tmp = simde_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++], 1);
+        tmp = simde_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++], 0);
+        tmp = simde_mm_cmpgt_epi8(simde_mm_packs_epi16(tmp, zeros), zeros);
+        decoded_bytes[i] = (unsigned char)simde_mm_movemask_epi8(tmp);
       }
     }
 
@@ -2254,51 +2376,52 @@ unsigned char phy_threegpplte_turbo_decoder(short *y,
     uint16_t decoded_bytes_interl[6144/16];
 
     for (i=0; i<(n2>>4); i++) {
-      tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],0);
-      tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],1);
-      tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],2);
-      tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],3);
-      tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],4);
-      tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],5);
-      tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],6);
-      tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],7);
-      tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],8);
-      tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],9);
-      tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],10);
-      tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],11);
-      tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],12);
-      tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],13);
-      tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],14);
-      tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],15);
-      //decoded_bytes_interl[i]=(uint16_t) _mm_movemask_epi8(_mm_cmpgt_epi8(tmp,zeros));
-      tmp128[i] = _mm_adds_epi8(((__m128i *)ext2)[i],((__m128i *)systematic2)[i]);
-      ((__m128i *)systematic1)[i] = _mm_adds_epi8(_mm_subs_epi8(tmp,((__m128i *)ext)[i]),((__m128i *)systematic0)[i]);
+      tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 0);
+      tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 1);
+      tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 2);
+      tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 3);
+      tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 4);
+      tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 5);
+      tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 6);
+      tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 7);
+      tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 8);
+      tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 9);
+      tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 10);
+      tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 11);
+      tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 12);
+      tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 13);
+      tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 14);
+      tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 15);
+      // decoded_bytes_interl[i]=(uint16_t) simde_mm_movemask_epi8(simde_mm_cmpgt_epi8(tmp,zeros));
+      tmp128[i] = simde_mm_adds_epi8(((simde__m128i *)ext2)[i], ((simde__m128i *)systematic2)[i]);
+      ((simde__m128i *)systematic1)[i] =
+          simde_mm_adds_epi8(simde_mm_subs_epi8(tmp, ((simde__m128i *)ext)[i]), ((simde__m128i *)systematic0)[i]);
     }
 
     /* LT modification, something wrong here
     if (iteration_cnt>1) {
       start_meas(intl2_stats);
       pi6_p=pi6tab[iind];
-      __m128i* dbytes=(__m128i*)decoded_bytes_interl;
-      __m128i shuffle=SHUFFLE16(7,6,5,4,3,2,1,0);
-      __m128i mask  __attribute__((aligned(16)));
+      simde__m128i* dbytes=(simde__m128i*)decoded_bytes_interl;
+      simde__m128i shuffle=SHUFFLE16(7,6,5,4,3,2,1,0);
+      simde__m128i mask  __attribute__((aligned(16)));
       int n_128=n2>>7;
       for (i=0;i<n_128;i++) {
-        mask=_mm_set1_epi16(1);
-        __m128i tmp __attribute__((aligned(16)));
-        tmp=_mm_shuffle_epi8(dbytes[i],shuffle);
-        __m128i tmp2 __attribute__((aligned(16))) ;
+        mask=simde_mm_set1_epi16(1);
+        simde__m128i tmp __attribute__((aligned(16)));
+        tmp=simde_mm_shuffle_epi8(dbytes[i],shuffle);
+        simde__m128i tmp2 __attribute__((aligned(16))) ;
 
-        tmp2=_mm_and_si128(tmp,mask);
-        tmp2=_mm_cmpeq_epi16(tmp2,mask);
-        decoded_bytes[n_128*0+i]=(uint8_t) _mm_movemask_epi8(_mm_packs_epi16(tmp2,zeros));
+        tmp2=simde_mm_and_si128(tmp,mask);
+        tmp2=simde_mm_cmpeq_epi16(tmp2,mask);
+        decoded_bytes[n_128*0+i]=(uint8_t) simde_mm_movemask_epi8(simde_mm_packs_epi16(tmp2,zeros));
 
         int j;
         for (j=1; j<16; j++) {
-    mask=_mm_slli_epi16(mask,1);
-    tmp2=_mm_and_si128(tmp,mask);
-    tmp2=_mm_cmpeq_epi16(tmp2,mask);
-    decoded_bytes[n_128*j +i]=(uint8_t) _mm_movemask_epi8(_mm_packs_epi16(tmp2,zeros));
+    mask=simde_mm_slli_epi16(mask,1);
+    tmp2=simde_mm_and_si128(tmp,mask);
+    tmp2=simde_mm_cmpeq_epi16(tmp2,mask);
+    decoded_bytes[n_128*j +i]=(uint8_t) simde_mm_movemask_epi8(simde_mm_packs_epi16(tmp2,zeros));
         }
       }
     }
@@ -2309,24 +2432,24 @@ unsigned char phy_threegpplte_turbo_decoder(short *y,
       pi6_p=pi6tab[iind];
 
       for (i=0; i<(n2>>4); i++) {
-        tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],7);
-        tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],6);
-        tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],5);
-        tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],4);
-        tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],3);
-        tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],2);
-        tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],1);
-        tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],0);
-        tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],15);
-        tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],14);
-        tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],13);
-        tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],12);
-        tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],11);
-        tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],10);
-        tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],9);
-        tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],8);
-        tmp=_mm_cmpgt_epi8(tmp,zeros);
-        ((uint16_t *)decoded_bytes)[i]=(uint16_t)_mm_movemask_epi8(tmp);
+        tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 7);
+        tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 6);
+        tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 5);
+        tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 4);
+        tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 3);
+        tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 2);
+        tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 1);
+        tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 0);
+        tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 15);
+        tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 14);
+        tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 13);
+        tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 12);
+        tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 11);
+        tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 10);
+        tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 9);
+        tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 8);
+        tmp = simde_mm_cmpgt_epi8(tmp, zeros);
+        ((uint16_t *)decoded_bytes)[i] = (uint16_t)simde_mm_movemask_epi8(tmp);
       }
     }
 
@@ -2383,14 +2506,14 @@ unsigned char phy_threegpplte_turbo_decoder(short *y,
     // do log_map from first parity bit
     if (iteration_cnt < max_iterations) {
       log_map(systematic1,yparity1,m11,m10,alpha,beta,ext,n2,0,F,offset8_flag,alpha_stats,beta_stats,gamma_stats,ext_stats);
-      __m128i *ext_128=(__m128i *) ext;
-      __m128i *s1_128=(__m128i *) systematic1;
-      __m128i *s0_128=(__m128i *) systematic0;
+      simde__m128i *ext_128 = (simde__m128i *)ext;
+      simde__m128i *s1_128 = (simde__m128i *)systematic1;
+      simde__m128i *s0_128 = (simde__m128i *)systematic0;
 #ifndef LLR8
       int myloop=n2>>3;
 
       for (i=0; i<myloop; i++) {
-        *ext_128=_mm_adds_epi16(_mm_subs_epi16(*ext_128,*s1_128++),*s0_128++);
+        *ext_128 = simde_mm_adds_epi16(simde_mm_subs_epi16(*ext_128, *s1_128++), *s0_128++);
         ext_128++;
       }
 
@@ -2398,7 +2521,7 @@ unsigned char phy_threegpplte_turbo_decoder(short *y,
       int myloop=n2>>4;
 
       for (i=0; i<myloop; i++) {
-        *ext_128=_mm_adds_epi8(_mm_subs_epi8(*ext_128,*s1_128++),*s0_128++);
+        *ext_128 = simde_mm_adds_epi8(simde_mm_subs_epi8(*ext_128, *s1_128++), *s0_128++);
         ext_128++;
       }
 
diff --git a/openair1/PHY/CODING/3gpplte_turbo_decoder_sse_16bit.c b/openair1/PHY/CODING/3gpplte_turbo_decoder_sse_16bit.c
index 110a659000c69e5076935827b847273b01ea7e4b..c7f2c7663a3aff742658eae8c365cdd7af1d39b7 100644
--- a/openair1/PHY/CODING/3gpplte_turbo_decoder_sse_16bit.c
+++ b/openair1/PHY/CODING/3gpplte_turbo_decoder_sse_16bit.c
@@ -114,10 +114,10 @@ void compute_gamma16(llr_t *m11,llr_t *m10,llr_t *systematic,channel_t *y_parity
                      unsigned short frame_length,unsigned char term_flag) {
   int k,K1;
 #if defined(__x86_64__)||defined(__i386__)
-  __m128i *systematic128 = (__m128i *)systematic;
-  __m128i *y_parity128   = (__m128i *)y_parity;
-  __m128i *m10_128        = (__m128i *)m10;
-  __m128i *m11_128        = (__m128i *)m11;
+  simde__m128i *systematic128 = (simde__m128i *)systematic;
+  simde__m128i *y_parity128 = (simde__m128i *)y_parity;
+  simde__m128i *m10_128 = (simde__m128i *)m10;
+  simde__m128i *m11_128 = (simde__m128i *)m11;
 #elif defined(__arm__) || defined(__aarch64__)
   int16x8_t *systematic128  = (int16x8_t *)systematic;
   int16x8_t *y_parity128    = (int16x8_t *)y_parity;
@@ -131,8 +131,8 @@ void compute_gamma16(llr_t *m11,llr_t *m10,llr_t *systematic,channel_t *y_parity
 
   for (k=0; k<K1; k++) {
 #if defined(__x86_64__) || defined(__i386__)
-    m11_128[k] = _mm_srai_epi16(_mm_adds_epi16(systematic128[k],y_parity128[k]),1);
-    m10_128[k] = _mm_srai_epi16(_mm_subs_epi16(systematic128[k],y_parity128[k]),1);
+    m11_128[k] = simde_mm_srai_epi16(simde_mm_adds_epi16(systematic128[k], y_parity128[k]), 1);
+    m10_128[k] = simde_mm_srai_epi16(simde_mm_subs_epi16(systematic128[k], y_parity128[k]), 1);
 #elif defined(__arm__) || defined(__aarch64__)
     m11_128[k] = vhaddq_s16(systematic128[k],y_parity128[k]);
     m10_128[k] = vhsubq_s16(systematic128[k],y_parity128[k]);
@@ -149,11 +149,11 @@ void compute_gamma16(llr_t *m11,llr_t *m10,llr_t *systematic,channel_t *y_parity
   k=frame_length>>3;
   // Termination
 #if defined(__x86_64__) || defined(__i386__)
-  m11_128[k] = _mm_srai_epi16(_mm_adds_epi16(systematic128[k+term_flag],y_parity128[k]),1);
+  m11_128[k] = simde_mm_srai_epi16(simde_mm_adds_epi16(systematic128[k + term_flag], y_parity128[k]), 1);
 #if 1
-  m10_128[k] = _mm_srai_epi16(_mm_subs_epi16(systematic128[k+term_flag],y_parity128[k]),1);
+  m10_128[k] = simde_mm_srai_epi16(simde_mm_subs_epi16(systematic128[k + term_flag], y_parity128[k]), 1);
 #else
-  m10_128[k] = _mm_srai_epi16(_mm_subs_epi16(y_parity128[k],systematic128[k+term_flag]),1);
+  m10_128[k] = simde_mm_srai_epi16(simde_mm_subs_epi16(y_parity128[k], systematic128[k + term_flag]), 1);
 #endif
 #elif defined(__arm__) || defined(__aarch64__)
   m11_128[k] = vhaddq_s16(systematic128[k+term_flag],y_parity128[k]);
@@ -173,18 +173,18 @@ void compute_gamma16(llr_t *m11,llr_t *m10,llr_t *systematic,channel_t *y_parity
 void compute_alpha16(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned short frame_length,unsigned char F) {
   int k,l,l2,K1,rerun_flag=0;
 #if defined(__x86_64__) || defined(__i386__)
-  __m128i *alpha128=(__m128i *)alpha,*alpha_ptr,*m11p,*m10p;
+  simde__m128i *alpha128 = (simde__m128i *)alpha, *alpha_ptr, *m11p, *m10p;
 #if 1
-  __m128i a0,a1,a2,a3,a4,a5,a6,a7;
-  __m128i m_b0,m_b1,m_b2,m_b3,m_b4,m_b5,m_b6,m_b7;
-  __m128i new0,new1,new2,new3,new4,new5,new6,new7;
-  __m128i alpha_max;
+  simde__m128i a0, a1, a2, a3, a4, a5, a6, a7;
+  simde__m128i m_b0, m_b1, m_b2, m_b3, m_b4, m_b5, m_b6, m_b7;
+  simde__m128i new0, new1, new2, new3, new4, new5, new6, new7;
+  simde__m128i alpha_max;
 #else
-  __m256i *alpha256=(__m256i *)alpha,*alpha_ptr256,m11,m10;
-  __m256i a01,a23,a45,a67,a02,a13,a64,a75;
-  __m256i m_b01,m_b23,m_b45,m_b67,new01,new23,new45,new67;
-  __m256i m11m10_256;
-  __m256i alpha_max;
+  simde__m256i *alpha256 = (simde__m256i *)alpha, *alpha_ptr256, m11, m10;
+  simde__m256i a01, a23, a45, a67, a02, a13, a64, a75;
+  simde__m256i m_b01, m_b23, m_b45, m_b67, new01, new23, new45, new67;
+  simde__m256i m11m10_256;
+  simde__m256i alpha_max;
 #endif
 #elif defined(__arm__) || defined(__aarch64__)
   int16x8_t *alpha128=(int16x8_t *)alpha,*alpha_ptr;
@@ -201,21 +201,21 @@ void compute_alpha16(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned s
 
   for (l=K1;; l=l2,rerun_flag=1) {
 #if defined(__x86_64__) || defined(__i386__)
-    alpha128 = (__m128i *)alpha;
+    alpha128 = (simde__m128i *)alpha;
 #elif defined(__arm__) || defined(__aarch64__)
     alpha128 = (int16x8_t *)alpha;
 #endif
 
     if (rerun_flag == 0) {
 #if defined(__x86_64__) || defined(__i386__)
-      alpha128[0] = _mm_set_epi16(-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,0);
-      alpha128[1] = _mm_set_epi16(-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2);
-      alpha128[2] = _mm_set_epi16(-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2);
-      alpha128[3] = _mm_set_epi16(-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2);
-      alpha128[4] = _mm_set_epi16(-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2);
-      alpha128[5] = _mm_set_epi16(-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2);
-      alpha128[6] = _mm_set_epi16(-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2);
-      alpha128[7] = _mm_set_epi16(-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2,-MAX/2);
+      alpha128[0] = simde_mm_set_epi16(-MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, 0);
+      alpha128[1] = simde_mm_set_epi16(-MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2);
+      alpha128[2] = simde_mm_set_epi16(-MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2);
+      alpha128[3] = simde_mm_set_epi16(-MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2);
+      alpha128[4] = simde_mm_set_epi16(-MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2);
+      alpha128[5] = simde_mm_set_epi16(-MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2);
+      alpha128[6] = simde_mm_set_epi16(-MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2);
+      alpha128[7] = simde_mm_set_epi16(-MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2, -MAX / 2);
 #elif defined(__arm__) || defined(__aarch64__)
       alpha128[0] = vdupq_n_s16(-MAX/2);
       alpha128[0] = vsetq_lane_s16(0,alpha128[0],0);
@@ -241,14 +241,14 @@ void compute_alpha16(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned s
     } else {
       //set initial alpha in columns 1-7 from final alpha from last run in columns 0-6
 #if defined(__x86_64__) || defined(__i386__)
-      alpha128[0] = _mm_slli_si128(alpha128[frame_length],2);
-      alpha128[1] = _mm_slli_si128(alpha128[1+frame_length],2);
-      alpha128[2] = _mm_slli_si128(alpha128[2+frame_length],2);
-      alpha128[3] = _mm_slli_si128(alpha128[3+frame_length],2);
-      alpha128[4] = _mm_slli_si128(alpha128[4+frame_length],2);
-      alpha128[5] = _mm_slli_si128(alpha128[5+frame_length],2);
-      alpha128[6] = _mm_slli_si128(alpha128[6+frame_length],2);
-      alpha128[7] = _mm_slli_si128(alpha128[7+frame_length],2);
+      alpha128[0] = simde_mm_slli_si128(alpha128[frame_length], 2);
+      alpha128[1] = simde_mm_slli_si128(alpha128[1 + frame_length], 2);
+      alpha128[2] = simde_mm_slli_si128(alpha128[2 + frame_length], 2);
+      alpha128[3] = simde_mm_slli_si128(alpha128[3 + frame_length], 2);
+      alpha128[4] = simde_mm_slli_si128(alpha128[4 + frame_length], 2);
+      alpha128[5] = simde_mm_slli_si128(alpha128[5 + frame_length], 2);
+      alpha128[6] = simde_mm_slli_si128(alpha128[6 + frame_length], 2);
+      alpha128[7] = simde_mm_slli_si128(alpha128[7 + frame_length], 2);
 #elif defined(__arm__) || defined(__aarch64__)
       alpha128[0] = (int16x8_t)vshlq_n_s64((int64x2_t)alpha128[frame_length],16);
       alpha128[0] = vsetq_lane_s16(alpha[8],alpha128[0],3);
@@ -290,8 +290,8 @@ void compute_alpha16(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned s
 
     alpha_ptr = &alpha128[0];
 #if defined(__x86_64__) || defined(__i386__)
-    m11p = (__m128i *)m_11;
-    m10p = (__m128i *)m_10;
+    m11p = (simde__m128i *)m_11;
+    m10p = (simde__m128i *)m_10;
 #elif defined(__arm__) || defined(__aarch64__)
     m11p = (int16x8_t *)m_11;
     m10p = (int16x8_t *)m_10;
@@ -301,45 +301,45 @@ void compute_alpha16(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned s
          k<l;
          k++) {
 #if defined(__x86_64__) || defined(__i386__)
-      a1=_mm_load_si128(&alpha_ptr[1]);
-      a3=_mm_load_si128(&alpha_ptr[3]);
-      a5=_mm_load_si128(&alpha_ptr[5]);
-      a7=_mm_load_si128(&alpha_ptr[7]);
-      m_b0 = _mm_adds_epi16(a1,*m11p);  // m11
-      m_b4 = _mm_subs_epi16(a1,*m11p);  // m00=-m11
-      m_b1 = _mm_subs_epi16(a3,*m10p);  // m01=-m10
-      m_b5 = _mm_adds_epi16(a3,*m10p);  // m10
-      m_b2 = _mm_adds_epi16(a5,*m10p);  // m10
-      m_b6 = _mm_subs_epi16(a5,*m10p);  // m01=-m10
-      m_b3 = _mm_subs_epi16(a7,*m11p);  // m00=-m11
-      m_b7 = _mm_adds_epi16(a7,*m11p);  // m11
-      a0=_mm_load_si128(&alpha_ptr[0]);
-      a2=_mm_load_si128(&alpha_ptr[2]);
-      a4=_mm_load_si128(&alpha_ptr[4]);
-      a6=_mm_load_si128(&alpha_ptr[6]);
-      new0 = _mm_subs_epi16(a0,*m11p);  // m00=-m11
-      new4 = _mm_adds_epi16(a0,*m11p);  // m11
-      new1 = _mm_adds_epi16(a2,*m10p);  // m10
-      new5 = _mm_subs_epi16(a2,*m10p);  // m01=-m10
-      new2 = _mm_subs_epi16(a4,*m10p);  // m01=-m10
-      new6 = _mm_adds_epi16(a4,*m10p);  // m10
-      new3 = _mm_adds_epi16(a6,*m11p);  // m11
-      new7 = _mm_subs_epi16(a6,*m11p);  // m00=-m11
-      a0 = _mm_max_epi16(m_b0,new0);
-      a1 = _mm_max_epi16(m_b1,new1);
-      a2 = _mm_max_epi16(m_b2,new2);
-      a3 = _mm_max_epi16(m_b3,new3);
-      a4 = _mm_max_epi16(m_b4,new4);
-      a5 = _mm_max_epi16(m_b5,new5);
-      a6 = _mm_max_epi16(m_b6,new6);
-      a7 = _mm_max_epi16(m_b7,new7);
-      alpha_max = _mm_max_epi16(a0,a1);
-      alpha_max = _mm_max_epi16(alpha_max,a2);
-      alpha_max = _mm_max_epi16(alpha_max,a3);
-      alpha_max = _mm_max_epi16(alpha_max,a4);
-      alpha_max = _mm_max_epi16(alpha_max,a5);
-      alpha_max = _mm_max_epi16(alpha_max,a6);
-      alpha_max = _mm_max_epi16(alpha_max,a7);
+      a1 = simde_mm_load_si128(&alpha_ptr[1]);
+      a3 = simde_mm_load_si128(&alpha_ptr[3]);
+      a5 = simde_mm_load_si128(&alpha_ptr[5]);
+      a7 = simde_mm_load_si128(&alpha_ptr[7]);
+      m_b0 = simde_mm_adds_epi16(a1, *m11p); // m11
+      m_b4 = simde_mm_subs_epi16(a1, *m11p); // m00=-m11
+      m_b1 = simde_mm_subs_epi16(a3, *m10p); // m01=-m10
+      m_b5 = simde_mm_adds_epi16(a3, *m10p); // m10
+      m_b2 = simde_mm_adds_epi16(a5, *m10p); // m10
+      m_b6 = simde_mm_subs_epi16(a5, *m10p); // m01=-m10
+      m_b3 = simde_mm_subs_epi16(a7, *m11p); // m00=-m11
+      m_b7 = simde_mm_adds_epi16(a7, *m11p); // m11
+      a0 = simde_mm_load_si128(&alpha_ptr[0]);
+      a2 = simde_mm_load_si128(&alpha_ptr[2]);
+      a4 = simde_mm_load_si128(&alpha_ptr[4]);
+      a6 = simde_mm_load_si128(&alpha_ptr[6]);
+      new0 = simde_mm_subs_epi16(a0, *m11p); // m00=-m11
+      new4 = simde_mm_adds_epi16(a0, *m11p); // m11
+      new1 = simde_mm_adds_epi16(a2, *m10p); // m10
+      new5 = simde_mm_subs_epi16(a2, *m10p); // m01=-m10
+      new2 = simde_mm_subs_epi16(a4, *m10p); // m01=-m10
+      new6 = simde_mm_adds_epi16(a4, *m10p); // m10
+      new3 = simde_mm_adds_epi16(a6, *m11p); // m11
+      new7 = simde_mm_subs_epi16(a6, *m11p); // m00=-m11
+      a0 = simde_mm_max_epi16(m_b0, new0);
+      a1 = simde_mm_max_epi16(m_b1, new1);
+      a2 = simde_mm_max_epi16(m_b2, new2);
+      a3 = simde_mm_max_epi16(m_b3, new3);
+      a4 = simde_mm_max_epi16(m_b4, new4);
+      a5 = simde_mm_max_epi16(m_b5, new5);
+      a6 = simde_mm_max_epi16(m_b6, new6);
+      a7 = simde_mm_max_epi16(m_b7, new7);
+      alpha_max = simde_mm_max_epi16(a0, a1);
+      alpha_max = simde_mm_max_epi16(alpha_max, a2);
+      alpha_max = simde_mm_max_epi16(alpha_max, a3);
+      alpha_max = simde_mm_max_epi16(alpha_max, a4);
+      alpha_max = simde_mm_max_epi16(alpha_max, a5);
+      alpha_max = simde_mm_max_epi16(alpha_max, a6);
+      alpha_max = simde_mm_max_epi16(alpha_max, a7);
 #elif defined(__arm__) || defined(__aarch64__)
       m_b0 = vqaddq_s16(alpha_ptr[1],*m11p);  // m11
       m_b4 = vqsubq_s16(alpha_ptr[1],*m11p);  // m00=-m11
@@ -378,14 +378,14 @@ void compute_alpha16(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned s
       m11p++;
       m10p++;
 #if defined(__x86_64__) || defined(__i386__)
-      alpha_ptr[0] = _mm_subs_epi16(a0,alpha_max);
-      alpha_ptr[1] = _mm_subs_epi16(a1,alpha_max);
-      alpha_ptr[2] = _mm_subs_epi16(a2,alpha_max);
-      alpha_ptr[3] = _mm_subs_epi16(a3,alpha_max);
-      alpha_ptr[4] = _mm_subs_epi16(a4,alpha_max);
-      alpha_ptr[5] = _mm_subs_epi16(a5,alpha_max);
-      alpha_ptr[6] = _mm_subs_epi16(a6,alpha_max);
-      alpha_ptr[7] = _mm_subs_epi16(a7,alpha_max);
+      alpha_ptr[0] = simde_mm_subs_epi16(a0, alpha_max);
+      alpha_ptr[1] = simde_mm_subs_epi16(a1, alpha_max);
+      alpha_ptr[2] = simde_mm_subs_epi16(a2, alpha_max);
+      alpha_ptr[3] = simde_mm_subs_epi16(a3, alpha_max);
+      alpha_ptr[4] = simde_mm_subs_epi16(a4, alpha_max);
+      alpha_ptr[5] = simde_mm_subs_epi16(a5, alpha_max);
+      alpha_ptr[6] = simde_mm_subs_epi16(a6, alpha_max);
+      alpha_ptr[7] = simde_mm_subs_epi16(a7, alpha_max);
 #elif defined(__arm__) || defined(__aarch64__)
       alpha_ptr[0] = vqsubq_s16(a0,alpha_max);
       alpha_ptr[1] = vqsubq_s16(a1,alpha_max);
@@ -445,11 +445,11 @@ void compute_alpha16(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned s
 void compute_beta16(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned short frame_length,unsigned char F,int offset8_flag) {
   int k,rerun_flag=0;
 #if defined(__x86_64__) || defined(__i386__)
-  __m128i m11_128,m10_128;
-  __m128i m_b0,m_b1,m_b2,m_b3,m_b4,m_b5,m_b6,m_b7;
-  __m128i new0,new1,new2,new3,new4,new5,new6,new7;
-  __m128i *beta128,*alpha128,*beta_ptr;
-  __m128i beta_max;
+  simde__m128i m11_128, m10_128;
+  simde__m128i m_b0, m_b1, m_b2, m_b3, m_b4, m_b5, m_b6, m_b7;
+  simde__m128i new0, new1, new2, new3, new4, new5, new6, new7;
+  simde__m128i *beta128, *alpha128, *beta_ptr;
+  simde__m128i beta_max;
 #elif defined(__arm__) || defined(__aarch64__)
   int16x8_t m11_128,m10_128;
   int16x8_t m_b0,m_b1,m_b2,m_b3,m_b4,m_b5,m_b6,m_b7;
@@ -516,8 +516,8 @@ void compute_beta16(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned sh
 
   for (rerun_flag=0;; rerun_flag=1) {
 #if defined(__x86_64__) || defined(__i386__)
-    beta_ptr   = (__m128i *)&beta[frame_length<<3];
-    alpha128   = (__m128i *)&alpha[0];
+    beta_ptr = (simde__m128i *)&beta[frame_length << 3];
+    alpha128 = (simde__m128i *)&alpha[0];
 #elif defined(__arm__) || defined(__aarch64__)
     beta_ptr   = (int16x8_t *)&beta[frame_length<<3];
     alpha128   = (int16x8_t *)&alpha[0];
@@ -545,15 +545,15 @@ void compute_beta16(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned sh
 #endif
     } else {
 #if defined(__x86_64__) || defined(__i386__)
-      beta128 = (__m128i *)&beta[0];
-      beta_ptr[0] = _mm_srli_si128(beta128[0],2);
-      beta_ptr[1] = _mm_srli_si128(beta128[1],2);
-      beta_ptr[2] = _mm_srli_si128(beta128[2],2);
-      beta_ptr[3] = _mm_srli_si128(beta128[3],2);
-      beta_ptr[4] = _mm_srli_si128(beta128[4],2);
-      beta_ptr[5] = _mm_srli_si128(beta128[5],2);
-      beta_ptr[6] = _mm_srli_si128(beta128[6],2);
-      beta_ptr[7] = _mm_srli_si128(beta128[7],2);
+      beta128 = (simde__m128i *)&beta[0];
+      beta_ptr[0] = simde_mm_srli_si128(beta128[0], 2);
+      beta_ptr[1] = simde_mm_srli_si128(beta128[1], 2);
+      beta_ptr[2] = simde_mm_srli_si128(beta128[2], 2);
+      beta_ptr[3] = simde_mm_srli_si128(beta128[3], 2);
+      beta_ptr[4] = simde_mm_srli_si128(beta128[4], 2);
+      beta_ptr[5] = simde_mm_srli_si128(beta128[5], 2);
+      beta_ptr[6] = simde_mm_srli_si128(beta128[6], 2);
+      beta_ptr[7] = simde_mm_srli_si128(beta128[7], 2);
 #elif defined(__arm__) || defined(__aarch64__)
       beta128 = (int16x8_t *)&beta[0];
       beta_ptr   = (int16x8_t *)&beta[frame_length<<3];
@@ -588,14 +588,14 @@ void compute_beta16(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned sh
     }
 
 #if defined(__x86_64__) || defined(__i386__)
-    beta_ptr[0] = _mm_insert_epi16(beta_ptr[0],beta0_16,7);
-    beta_ptr[1] = _mm_insert_epi16(beta_ptr[1],beta1_16,7);
-    beta_ptr[2] = _mm_insert_epi16(beta_ptr[2],beta2_16,7);
-    beta_ptr[3] = _mm_insert_epi16(beta_ptr[3],beta3_16,7);
-    beta_ptr[4] = _mm_insert_epi16(beta_ptr[4],beta4_16,7);
-    beta_ptr[5] = _mm_insert_epi16(beta_ptr[5],beta5_16,7);
-    beta_ptr[6] = _mm_insert_epi16(beta_ptr[6],beta6_16,7);
-    beta_ptr[7] = _mm_insert_epi16(beta_ptr[7],beta7_16,7);
+    beta_ptr[0] = simde_mm_insert_epi16(beta_ptr[0], beta0_16, 7);
+    beta_ptr[1] = simde_mm_insert_epi16(beta_ptr[1], beta1_16, 7);
+    beta_ptr[2] = simde_mm_insert_epi16(beta_ptr[2], beta2_16, 7);
+    beta_ptr[3] = simde_mm_insert_epi16(beta_ptr[3], beta3_16, 7);
+    beta_ptr[4] = simde_mm_insert_epi16(beta_ptr[4], beta4_16, 7);
+    beta_ptr[5] = simde_mm_insert_epi16(beta_ptr[5], beta5_16, 7);
+    beta_ptr[6] = simde_mm_insert_epi16(beta_ptr[6], beta6_16, 7);
+    beta_ptr[7] = simde_mm_insert_epi16(beta_ptr[7], beta7_16, 7);
 #elif defined(__arm__) || defined(__aarch64__)
     beta_ptr[0] = vsetq_lane_s16(beta0_16,beta_ptr[0],7);
     beta_ptr[1] = vsetq_lane_s16(beta1_16,beta_ptr[1],7);
@@ -621,49 +621,49 @@ void compute_beta16(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned sh
 
     for (k=(frame_length>>3)-1; k>=loopval; k--) {
 #if defined(__x86_64__) || defined(__i386__)
-      m11_128=((__m128i *)m_11)[k];
-      m10_128=((__m128i *)m_10)[k];
-      m_b0 = _mm_adds_epi16(beta_ptr[4],m11_128);  //m11
-      m_b1 = _mm_subs_epi16(beta_ptr[4],m11_128);  //m00
-      m_b2 = _mm_subs_epi16(beta_ptr[5],m10_128);  //m01
-      m_b3 = _mm_adds_epi16(beta_ptr[5],m10_128);  //m10
-      m_b4 = _mm_adds_epi16(beta_ptr[6],m10_128);  //m10
-      m_b5 = _mm_subs_epi16(beta_ptr[6],m10_128);  //m01
-      m_b6 = _mm_subs_epi16(beta_ptr[7],m11_128);  //m00
-      m_b7 = _mm_adds_epi16(beta_ptr[7],m11_128);  //m11
-      new0 = _mm_subs_epi16(beta_ptr[0],m11_128);  //m00
-      new1 = _mm_adds_epi16(beta_ptr[0],m11_128);  //m11
-      new2 = _mm_adds_epi16(beta_ptr[1],m10_128);  //m10
-      new3 = _mm_subs_epi16(beta_ptr[1],m10_128);  //m01
-      new4 = _mm_subs_epi16(beta_ptr[2],m10_128);  //m01
-      new5 = _mm_adds_epi16(beta_ptr[2],m10_128);  //m10
-      new6 = _mm_adds_epi16(beta_ptr[3],m11_128);  //m11
-      new7 = _mm_subs_epi16(beta_ptr[3],m11_128);  //m00
+      m11_128 = ((simde__m128i *)m_11)[k];
+      m10_128 = ((simde__m128i *)m_10)[k];
+      m_b0 = simde_mm_adds_epi16(beta_ptr[4], m11_128); // m11
+      m_b1 = simde_mm_subs_epi16(beta_ptr[4], m11_128); // m00
+      m_b2 = simde_mm_subs_epi16(beta_ptr[5], m10_128); // m01
+      m_b3 = simde_mm_adds_epi16(beta_ptr[5], m10_128); // m10
+      m_b4 = simde_mm_adds_epi16(beta_ptr[6], m10_128); // m10
+      m_b5 = simde_mm_subs_epi16(beta_ptr[6], m10_128); // m01
+      m_b6 = simde_mm_subs_epi16(beta_ptr[7], m11_128); // m00
+      m_b7 = simde_mm_adds_epi16(beta_ptr[7], m11_128); // m11
+      new0 = simde_mm_subs_epi16(beta_ptr[0], m11_128); // m00
+      new1 = simde_mm_adds_epi16(beta_ptr[0], m11_128); // m11
+      new2 = simde_mm_adds_epi16(beta_ptr[1], m10_128); // m10
+      new3 = simde_mm_subs_epi16(beta_ptr[1], m10_128); // m01
+      new4 = simde_mm_subs_epi16(beta_ptr[2], m10_128); // m01
+      new5 = simde_mm_adds_epi16(beta_ptr[2], m10_128); // m10
+      new6 = simde_mm_adds_epi16(beta_ptr[3], m11_128); // m11
+      new7 = simde_mm_subs_epi16(beta_ptr[3], m11_128); // m00
 
       beta_ptr-=8;
-      beta_ptr[0] = _mm_max_epi16(m_b0,new0);
-      beta_ptr[1] = _mm_max_epi16(m_b1,new1);
-      beta_ptr[2] = _mm_max_epi16(m_b2,new2);
-      beta_ptr[3] = _mm_max_epi16(m_b3,new3);
-      beta_ptr[4] = _mm_max_epi16(m_b4,new4);
-      beta_ptr[5] = _mm_max_epi16(m_b5,new5);
-      beta_ptr[6] = _mm_max_epi16(m_b6,new6);
-      beta_ptr[7] = _mm_max_epi16(m_b7,new7);
-      beta_max = _mm_max_epi16(beta_ptr[0],beta_ptr[1]);
-      beta_max = _mm_max_epi16(beta_max   ,beta_ptr[2]);
-      beta_max = _mm_max_epi16(beta_max   ,beta_ptr[3]);
-      beta_max = _mm_max_epi16(beta_max   ,beta_ptr[4]);
-      beta_max = _mm_max_epi16(beta_max   ,beta_ptr[5]);
-      beta_max = _mm_max_epi16(beta_max   ,beta_ptr[6]);
-      beta_max = _mm_max_epi16(beta_max   ,beta_ptr[7]);
-      beta_ptr[0] = _mm_subs_epi16(beta_ptr[0],beta_max);
-      beta_ptr[1] = _mm_subs_epi16(beta_ptr[1],beta_max);
-      beta_ptr[2] = _mm_subs_epi16(beta_ptr[2],beta_max);
-      beta_ptr[3] = _mm_subs_epi16(beta_ptr[3],beta_max);
-      beta_ptr[4] = _mm_subs_epi16(beta_ptr[4],beta_max);
-      beta_ptr[5] = _mm_subs_epi16(beta_ptr[5],beta_max);
-      beta_ptr[6] = _mm_subs_epi16(beta_ptr[6],beta_max);
-      beta_ptr[7] = _mm_subs_epi16(beta_ptr[7],beta_max);
+      beta_ptr[0] = simde_mm_max_epi16(m_b0, new0);
+      beta_ptr[1] = simde_mm_max_epi16(m_b1, new1);
+      beta_ptr[2] = simde_mm_max_epi16(m_b2, new2);
+      beta_ptr[3] = simde_mm_max_epi16(m_b3, new3);
+      beta_ptr[4] = simde_mm_max_epi16(m_b4, new4);
+      beta_ptr[5] = simde_mm_max_epi16(m_b5, new5);
+      beta_ptr[6] = simde_mm_max_epi16(m_b6, new6);
+      beta_ptr[7] = simde_mm_max_epi16(m_b7, new7);
+      beta_max = simde_mm_max_epi16(beta_ptr[0], beta_ptr[1]);
+      beta_max = simde_mm_max_epi16(beta_max, beta_ptr[2]);
+      beta_max = simde_mm_max_epi16(beta_max, beta_ptr[3]);
+      beta_max = simde_mm_max_epi16(beta_max, beta_ptr[4]);
+      beta_max = simde_mm_max_epi16(beta_max, beta_ptr[5]);
+      beta_max = simde_mm_max_epi16(beta_max, beta_ptr[6]);
+      beta_max = simde_mm_max_epi16(beta_max, beta_ptr[7]);
+      beta_ptr[0] = simde_mm_subs_epi16(beta_ptr[0], beta_max);
+      beta_ptr[1] = simde_mm_subs_epi16(beta_ptr[1], beta_max);
+      beta_ptr[2] = simde_mm_subs_epi16(beta_ptr[2], beta_max);
+      beta_ptr[3] = simde_mm_subs_epi16(beta_ptr[3], beta_max);
+      beta_ptr[4] = simde_mm_subs_epi16(beta_ptr[4], beta_max);
+      beta_ptr[5] = simde_mm_subs_epi16(beta_ptr[5], beta_max);
+      beta_ptr[6] = simde_mm_subs_epi16(beta_ptr[6], beta_max);
+      beta_ptr[7] = simde_mm_subs_epi16(beta_ptr[7], beta_max);
 #elif defined(__arm__) || defined(__aarch64__)
       m11_128=((int16x8_t *)m_11)[k];
       m10_128=((int16x8_t *)m_10)[k];
@@ -729,14 +729,14 @@ void compute_beta16(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned sh
 
 void compute_ext16(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,llr_t *ext, llr_t *systematic,unsigned short frame_length) {
 #if defined(__x86_64__) || defined(__i386__)
-  __m128i *alpha128=(__m128i *)alpha;
-  __m128i *beta128=(__m128i *)beta;
-  __m128i *m11_128,*m10_128,*ext_128;
-  __m128i *alpha_ptr,*beta_ptr;
-  __m128i m00_1,m00_2,m00_3,m00_4;
-  __m128i m01_1,m01_2,m01_3,m01_4;
-  __m128i m10_1,m10_2,m10_3,m10_4;
-  __m128i m11_1,m11_2,m11_3,m11_4;
+  simde__m128i *alpha128 = (simde__m128i *)alpha;
+  simde__m128i *beta128 = (simde__m128i *)beta;
+  simde__m128i *m11_128, *m10_128, *ext_128;
+  simde__m128i *alpha_ptr, *beta_ptr;
+  simde__m128i m00_1, m00_2, m00_3, m00_4;
+  simde__m128i m01_1, m01_2, m01_3, m01_4;
+  simde__m128i m10_1, m10_2, m10_3, m10_4;
+  simde__m128i m11_1, m11_2, m11_3, m11_4;
 #elif defined(__arm__) || defined(__aarch64__)
   int16x8_t *alpha128=(int16x8_t *)alpha;
   int16x8_t *beta128=(int16x8_t *)beta;
@@ -759,9 +759,9 @@ void compute_ext16(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,llr_t *ext,
 
   for (k=0; k<(frame_length>>3); k++) {
 #if defined(__x86_64__) || defined(__i386__)
-    m11_128        = (__m128i *)&m_11[k<<3];
-    m10_128        = (__m128i *)&m_10[k<<3];
-    ext_128        = (__m128i *)&ext[k<<3];
+    m11_128 = (simde__m128i *)&m_11[k << 3];
+    m10_128 = (simde__m128i *)&m_10[k << 3];
+    ext_128 = (simde__m128i *)&ext[k << 3];
     /*
       fprintf(fdsse4,"EXT %03d\n",k);
       print_shorts("a0:",&alpha_ptr[0]);
@@ -782,39 +782,39 @@ void compute_ext16(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,llr_t *ext,
       print_shorts("b7:",&beta_ptr[7]);
     */
 #if 1
-    m00_4 = _mm_adds_epi16(alpha_ptr[7],beta_ptr[3]); //ALPHA_BETA_4m00;
-    m11_4 = _mm_adds_epi16(alpha_ptr[7],beta_ptr[7]); //ALPHA_BETA_4m11;
-    m00_3 = _mm_adds_epi16(alpha_ptr[6],beta_ptr[7]); //ALPHA_BETA_3m00;
-    m11_3 = _mm_adds_epi16(alpha_ptr[6],beta_ptr[3]); //ALPHA_BETA_3m11;
-    m00_2 = _mm_adds_epi16(alpha_ptr[1],beta_ptr[4]); //ALPHA_BETA_2m00;
-    m11_2 = _mm_adds_epi16(alpha_ptr[1],beta_ptr[0]); //ALPHA_BETA_2m11;
-    m11_1 = _mm_adds_epi16(alpha_ptr[0],beta_ptr[4]); //ALPHA_BETA_1m11;
-    m00_1 = _mm_adds_epi16(alpha_ptr[0],beta_ptr[0]); //ALPHA_BETA_1m00;
-    m01_4 = _mm_adds_epi16(alpha_ptr[5],beta_ptr[6]); //ALPHA_BETA_4m01;
-    m10_4 = _mm_adds_epi16(alpha_ptr[5],beta_ptr[2]); //ALPHA_BETA_4m10;
-    m01_3 = _mm_adds_epi16(alpha_ptr[4],beta_ptr[2]); //ALPHA_BETA_3m01;
-    m10_3 = _mm_adds_epi16(alpha_ptr[4],beta_ptr[6]); //ALPHA_BETA_3m10;
-    m01_2 = _mm_adds_epi16(alpha_ptr[3],beta_ptr[1]); //ALPHA_BETA_2m01;
-    m10_2 = _mm_adds_epi16(alpha_ptr[3],beta_ptr[5]); //ALPHA_BETA_2m10;
-    m10_1 = _mm_adds_epi16(alpha_ptr[2],beta_ptr[1]); //ALPHA_BETA_1m10;
-    m01_1 = _mm_adds_epi16(alpha_ptr[2],beta_ptr[5]); //ALPHA_BETA_1m01;
+    m00_4 = simde_mm_adds_epi16(alpha_ptr[7], beta_ptr[3]); // ALPHA_BETA_4m00;
+    m11_4 = simde_mm_adds_epi16(alpha_ptr[7], beta_ptr[7]); // ALPHA_BETA_4m11;
+    m00_3 = simde_mm_adds_epi16(alpha_ptr[6], beta_ptr[7]); // ALPHA_BETA_3m00;
+    m11_3 = simde_mm_adds_epi16(alpha_ptr[6], beta_ptr[3]); // ALPHA_BETA_3m11;
+    m00_2 = simde_mm_adds_epi16(alpha_ptr[1], beta_ptr[4]); // ALPHA_BETA_2m00;
+    m11_2 = simde_mm_adds_epi16(alpha_ptr[1], beta_ptr[0]); // ALPHA_BETA_2m11;
+    m11_1 = simde_mm_adds_epi16(alpha_ptr[0], beta_ptr[4]); // ALPHA_BETA_1m11;
+    m00_1 = simde_mm_adds_epi16(alpha_ptr[0], beta_ptr[0]); // ALPHA_BETA_1m00;
+    m01_4 = simde_mm_adds_epi16(alpha_ptr[5], beta_ptr[6]); // ALPHA_BETA_4m01;
+    m10_4 = simde_mm_adds_epi16(alpha_ptr[5], beta_ptr[2]); // ALPHA_BETA_4m10;
+    m01_3 = simde_mm_adds_epi16(alpha_ptr[4], beta_ptr[2]); // ALPHA_BETA_3m01;
+    m10_3 = simde_mm_adds_epi16(alpha_ptr[4], beta_ptr[6]); // ALPHA_BETA_3m10;
+    m01_2 = simde_mm_adds_epi16(alpha_ptr[3], beta_ptr[1]); // ALPHA_BETA_2m01;
+    m10_2 = simde_mm_adds_epi16(alpha_ptr[3], beta_ptr[5]); // ALPHA_BETA_2m10;
+    m10_1 = simde_mm_adds_epi16(alpha_ptr[2], beta_ptr[1]); // ALPHA_BETA_1m10;
+    m01_1 = simde_mm_adds_epi16(alpha_ptr[2], beta_ptr[5]); // ALPHA_BETA_1m01;
 #else
-    m00_1 = _mm_adds_epi16(alpha_ptr[0],beta_ptr[0]); //ALPHA_BETA_1m00;
-    m10_1 = _mm_adds_epi16(alpha_ptr[2],beta_ptr[1]); //ALPHA_BETA_1m10;
-    m11_1 = _mm_adds_epi16(alpha_ptr[0],beta_ptr[4]); //ALPHA_BETA_1m11;
-    m01_1 = _mm_adds_epi16(alpha_ptr[2],beta_ptr[5]); //ALPHA_BETA_1m01;
-    m11_2 = _mm_adds_epi16(alpha_ptr[1],beta_ptr[0]); //ALPHA_BETA_2m11;
-    m01_2 = _mm_adds_epi16(alpha_ptr[3],beta_ptr[1]); //ALPHA_BETA_2m01;
-    m00_2 = _mm_adds_epi16(alpha_ptr[1],beta_ptr[4]); //ALPHA_BETA_2m00;
-    m10_2 = _mm_adds_epi16(alpha_ptr[3],beta_ptr[5]); //ALPHA_BETA_2m10;
-    m11_3 = _mm_adds_epi16(alpha_ptr[6],beta_ptr[3]); //ALPHA_BETA_3m11;
-    m01_3 = _mm_adds_epi16(alpha_ptr[4],beta_ptr[2]); //ALPHA_BETA_3m01;
-    m00_3 = _mm_adds_epi16(alpha_ptr[6],beta_ptr[7]); //ALPHA_BETA_3m00;
-    m10_3 = _mm_adds_epi16(alpha_ptr[4],beta_ptr[6]); //ALPHA_BETA_3m10;
-    m00_4 = _mm_adds_epi16(alpha_ptr[7],beta_ptr[3]); //ALPHA_BETA_4m00;
-    m10_4 = _mm_adds_epi16(alpha_ptr[5],beta_ptr[2]); //ALPHA_BETA_4m10;
-    m11_4 = _mm_adds_epi16(alpha_ptr[7],beta_ptr[7]); //ALPHA_BETA_4m11;
-    m01_4 = _mm_adds_epi16(alpha_ptr[5],beta_ptr[6]); //ALPHA_BETA_4m01;
+    m00_1 = simde_mm_adds_epi16(alpha_ptr[0], beta_ptr[0]); // ALPHA_BETA_1m00;
+    m10_1 = simde_mm_adds_epi16(alpha_ptr[2], beta_ptr[1]); // ALPHA_BETA_1m10;
+    m11_1 = simde_mm_adds_epi16(alpha_ptr[0], beta_ptr[4]); // ALPHA_BETA_1m11;
+    m01_1 = simde_mm_adds_epi16(alpha_ptr[2], beta_ptr[5]); // ALPHA_BETA_1m01;
+    m11_2 = simde_mm_adds_epi16(alpha_ptr[1], beta_ptr[0]); // ALPHA_BETA_2m11;
+    m01_2 = simde_mm_adds_epi16(alpha_ptr[3], beta_ptr[1]); // ALPHA_BETA_2m01;
+    m00_2 = simde_mm_adds_epi16(alpha_ptr[1], beta_ptr[4]); // ALPHA_BETA_2m00;
+    m10_2 = simde_mm_adds_epi16(alpha_ptr[3], beta_ptr[5]); // ALPHA_BETA_2m10;
+    m11_3 = simde_mm_adds_epi16(alpha_ptr[6], beta_ptr[3]); // ALPHA_BETA_3m11;
+    m01_3 = simde_mm_adds_epi16(alpha_ptr[4], beta_ptr[2]); // ALPHA_BETA_3m01;
+    m00_3 = simde_mm_adds_epi16(alpha_ptr[6], beta_ptr[7]); // ALPHA_BETA_3m00;
+    m10_3 = simde_mm_adds_epi16(alpha_ptr[4], beta_ptr[6]); // ALPHA_BETA_3m10;
+    m00_4 = simde_mm_adds_epi16(alpha_ptr[7], beta_ptr[3]); // ALPHA_BETA_4m00;
+    m10_4 = simde_mm_adds_epi16(alpha_ptr[5], beta_ptr[2]); // ALPHA_BETA_4m10;
+    m11_4 = simde_mm_adds_epi16(alpha_ptr[7], beta_ptr[7]); // ALPHA_BETA_4m11;
+    m01_4 = simde_mm_adds_epi16(alpha_ptr[5], beta_ptr[6]); // ALPHA_BETA_4m01;
 #endif
     /*
       print_shorts("m11_1:",&m11_1);
@@ -834,30 +834,30 @@ void compute_ext16(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,llr_t *ext,
       print_shorts("m01_3:",&m01_3);
       print_shorts("m01_4:",&m01_4);
     */
-    m01_1 = _mm_max_epi16(m01_1,m01_2);
-    m01_1 = _mm_max_epi16(m01_1,m01_3);
-    m01_1 = _mm_max_epi16(m01_1,m01_4);
-    m00_1 = _mm_max_epi16(m00_1,m00_2);
-    m00_1 = _mm_max_epi16(m00_1,m00_3);
-    m00_1 = _mm_max_epi16(m00_1,m00_4);
-    m10_1 = _mm_max_epi16(m10_1,m10_2);
-    m10_1 = _mm_max_epi16(m10_1,m10_3);
-    m10_1 = _mm_max_epi16(m10_1,m10_4);
-    m11_1 = _mm_max_epi16(m11_1,m11_2);
-    m11_1 = _mm_max_epi16(m11_1,m11_3);
-    m11_1 = _mm_max_epi16(m11_1,m11_4);
+    m01_1 = simde_mm_max_epi16(m01_1, m01_2);
+    m01_1 = simde_mm_max_epi16(m01_1, m01_3);
+    m01_1 = simde_mm_max_epi16(m01_1, m01_4);
+    m00_1 = simde_mm_max_epi16(m00_1, m00_2);
+    m00_1 = simde_mm_max_epi16(m00_1, m00_3);
+    m00_1 = simde_mm_max_epi16(m00_1, m00_4);
+    m10_1 = simde_mm_max_epi16(m10_1, m10_2);
+    m10_1 = simde_mm_max_epi16(m10_1, m10_3);
+    m10_1 = simde_mm_max_epi16(m10_1, m10_4);
+    m11_1 = simde_mm_max_epi16(m11_1, m11_2);
+    m11_1 = simde_mm_max_epi16(m11_1, m11_3);
+    m11_1 = simde_mm_max_epi16(m11_1, m11_4);
     //      print_shorts("m11_1:",&m11_1);
-    m01_1 = _mm_subs_epi16(m01_1,*m10_128);
-    m00_1 = _mm_subs_epi16(m00_1,*m11_128);
-    m10_1 = _mm_adds_epi16(m10_1,*m10_128);
-    m11_1 = _mm_adds_epi16(m11_1,*m11_128);
+    m01_1 = simde_mm_subs_epi16(m01_1, *m10_128);
+    m00_1 = simde_mm_subs_epi16(m00_1, *m11_128);
+    m10_1 = simde_mm_adds_epi16(m10_1, *m10_128);
+    m11_1 = simde_mm_adds_epi16(m11_1, *m11_128);
     //      print_shorts("m10_1:",&m10_1);
     //      print_shorts("m11_1:",&m11_1);
-    m01_1 = _mm_max_epi16(m01_1,m00_1);
-    m10_1 = _mm_max_epi16(m10_1,m11_1);
+    m01_1 = simde_mm_max_epi16(m01_1, m00_1);
+    m10_1 = simde_mm_max_epi16(m10_1, m11_1);
     //      print_shorts("m01_1:",&m01_1);
     //      print_shorts("m10_1:",&m10_1);
-    *ext_128 = _mm_subs_epi16(m10_1,m01_1);
+    *ext_128 = simde_mm_subs_epi16(m10_1, m01_1);
 #ifdef DEBUG_LOGMAP
     fprintf(fdsse4,"ext %p\n",ext_128);
     print_shorts("ext:",(int16_t *)ext_128);
@@ -1006,9 +1006,9 @@ uint8_t phy_threegpplte_turbo_decoder16(int16_t *y,
   uint32_t crc, oldcrc, crc_len;
   uint8_t temp;
 #if defined(__x86_64__) || defined(__i386__)
-  __m128i *yp128;
-  __m128i tmp={0}, zeros=_mm_setzero_si128();
-  __m128i tmpe;
+  simde__m128i *yp128;
+  simde__m128i tmp = {0}, zeros = simde_mm_setzero_si128();
+  simde__m128i tmpe;
 #elif defined(__arm__) || defined(__aarch64__)
   int16x8_t *yp128;
   //  int16x8_t tmp128[(n+8)>>3];
@@ -1056,7 +1056,7 @@ uint8_t phy_threegpplte_turbo_decoder16(int16_t *y,
   }
 
 #if defined(__x86_64__) || defined(__i386__)
-  yp128 = (__m128i *)y;
+  yp128 = (simde__m128i *)y;
 #elif defined(__arm__) || defined(__aarch64__)
   yp128 = (int16x8_t *)y;
 #endif
@@ -1070,63 +1070,63 @@ uint8_t phy_threegpplte_turbo_decoder16(int16_t *y,
     pi2_p = &pi2tab16[iind][i];
     j=pi2_p[0];
 #if defined(__x86_64__) || defined(__i386__)
-    tmpe = _mm_load_si128(yp128);
+    tmpe = simde_mm_load_si128(yp128);
     //    fprintf(fdsse4,"yp128 %p\n",yp128);
     //    print_shorts("tmpe",(int16_t *)&tmpe);
-    s[j]   = _mm_extract_epi16(tmpe,0);
-    yp1[j] = _mm_extract_epi16(tmpe,1);
-    yp2[j] = _mm_extract_epi16(tmpe,2);
+    s[j] = simde_mm_extract_epi16(tmpe, 0);
+    yp1[j] = simde_mm_extract_epi16(tmpe, 1);
+    yp2[j] = simde_mm_extract_epi16(tmpe, 2);
 #ifdef DEBUG_LOGMAP
     fprintf(fdsse4,"init0: j %u, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
 #endif
     j=pi2_p[1];
-    s[j]   = _mm_extract_epi16(tmpe,3);
-    yp1[j] = _mm_extract_epi16(tmpe,4);
-    yp2[j] = _mm_extract_epi16(tmpe,5);
+    s[j] = simde_mm_extract_epi16(tmpe, 3);
+    yp1[j] = simde_mm_extract_epi16(tmpe, 4);
+    yp2[j] = simde_mm_extract_epi16(tmpe, 5);
 #ifdef DEBUG_LOGMAP
     fprintf(fdsse4,"init1: j %u, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
 #endif
     j=pi2_p[2];
-    s[j]   = _mm_extract_epi16(tmpe,6);
-    yp1[j] = _mm_extract_epi16(tmpe,7);
-    tmpe = _mm_load_si128(&yp128[1]);
-    yp2[j] = _mm_extract_epi16(tmpe,0);
+    s[j] = simde_mm_extract_epi16(tmpe, 6);
+    yp1[j] = simde_mm_extract_epi16(tmpe, 7);
+    tmpe = simde_mm_load_si128(&yp128[1]);
+    yp2[j] = simde_mm_extract_epi16(tmpe, 0);
 #ifdef DEBUG_LOGMAP
     fprintf(fdsse4,"init2: j %u, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
 #endif
     j=pi2_p[3];
-    s[j]   = _mm_extract_epi16(tmpe,1);
-    yp1[j] = _mm_extract_epi16(tmpe,2);
-    yp2[j] = _mm_extract_epi16(tmpe,3);
+    s[j] = simde_mm_extract_epi16(tmpe, 1);
+    yp1[j] = simde_mm_extract_epi16(tmpe, 2);
+    yp2[j] = simde_mm_extract_epi16(tmpe, 3);
 #ifdef DEBUG_LOGMAP
     fprintf(fdsse4,"init3: j %u, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
 #endif
     j=pi2_p[4];
-    s[j]   = _mm_extract_epi16(tmpe,4);
-    yp1[j] = _mm_extract_epi16(tmpe,5);
-    yp2[j] = _mm_extract_epi16(tmpe,6);
+    s[j] = simde_mm_extract_epi16(tmpe, 4);
+    yp1[j] = simde_mm_extract_epi16(tmpe, 5);
+    yp2[j] = simde_mm_extract_epi16(tmpe, 6);
 #ifdef DEBUG_LOGMAP
     fprintf(fdsse4,"init4: j %u, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
 #endif
     j=pi2_p[5];
-    s[j]   = _mm_extract_epi16(tmpe,7);
-    tmpe = _mm_load_si128(&yp128[2]);
-    yp1[j] = _mm_extract_epi16(tmpe,0);
-    yp2[j] = _mm_extract_epi16(tmpe,1);
+    s[j] = simde_mm_extract_epi16(tmpe, 7);
+    tmpe = simde_mm_load_si128(&yp128[2]);
+    yp1[j] = simde_mm_extract_epi16(tmpe, 0);
+    yp2[j] = simde_mm_extract_epi16(tmpe, 1);
 #ifdef DEBUG_LOGMAP
     fprintf(fdsse4,"init5: j %u, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
 #endif
     j=pi2_p[6];
-    s[j]   = _mm_extract_epi16(tmpe,2);
-    yp1[j] = _mm_extract_epi16(tmpe,3);
-    yp2[j] = _mm_extract_epi16(tmpe,4);
+    s[j] = simde_mm_extract_epi16(tmpe, 2);
+    yp1[j] = simde_mm_extract_epi16(tmpe, 3);
+    yp2[j] = simde_mm_extract_epi16(tmpe, 4);
 #ifdef DEBUG_LOGMAP
     fprintf(fdsse4,"init6: j %u, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
 #endif
     j=pi2_p[7];
-    s[j]   = _mm_extract_epi16(tmpe,5);
-    yp1[j] = _mm_extract_epi16(tmpe,6);
-    yp2[j] = _mm_extract_epi16(tmpe,7);
+    s[j] = simde_mm_extract_epi16(tmpe, 5);
+    yp1[j] = simde_mm_extract_epi16(tmpe, 6);
+    yp2[j] = simde_mm_extract_epi16(tmpe, 7);
 #ifdef DEBUG_LOGMAP
     fprintf(fdsse4,"init7: j %u, s[j] %d yp1[j] %d yp2[j] %d\n",j,s[j],yp1[j],yp2[j]);
 #endif
@@ -1209,14 +1209,14 @@ uint8_t phy_threegpplte_turbo_decoder16(int16_t *y,
 
     for (i=0; i<(n>>3); i++) { // steady-state portion
 #if defined(__x86_64__) || defined(__i386__)
-      ((__m128i *)systematic2)[i]=_mm_insert_epi16(((__m128i *)systematic2)[i],ext[*pi4_p++],0);
-      ((__m128i *)systematic2)[i]=_mm_insert_epi16(((__m128i *)systematic2)[i],ext[*pi4_p++],1);
-      ((__m128i *)systematic2)[i]=_mm_insert_epi16(((__m128i *)systematic2)[i],ext[*pi4_p++],2);
-      ((__m128i *)systematic2)[i]=_mm_insert_epi16(((__m128i *)systematic2)[i],ext[*pi4_p++],3);
-      ((__m128i *)systematic2)[i]=_mm_insert_epi16(((__m128i *)systematic2)[i],ext[*pi4_p++],4);
-      ((__m128i *)systematic2)[i]=_mm_insert_epi16(((__m128i *)systematic2)[i],ext[*pi4_p++],5);
-      ((__m128i *)systematic2)[i]=_mm_insert_epi16(((__m128i *)systematic2)[i],ext[*pi4_p++],6);
-      ((__m128i *)systematic2)[i]=_mm_insert_epi16(((__m128i *)systematic2)[i],ext[*pi4_p++],7);
+      ((simde__m128i *)systematic2)[i] = simde_mm_insert_epi16(((simde__m128i *)systematic2)[i], ext[*pi4_p++], 0);
+      ((simde__m128i *)systematic2)[i] = simde_mm_insert_epi16(((simde__m128i *)systematic2)[i], ext[*pi4_p++], 1);
+      ((simde__m128i *)systematic2)[i] = simde_mm_insert_epi16(((simde__m128i *)systematic2)[i], ext[*pi4_p++], 2);
+      ((simde__m128i *)systematic2)[i] = simde_mm_insert_epi16(((simde__m128i *)systematic2)[i], ext[*pi4_p++], 3);
+      ((simde__m128i *)systematic2)[i] = simde_mm_insert_epi16(((simde__m128i *)systematic2)[i], ext[*pi4_p++], 4);
+      ((simde__m128i *)systematic2)[i] = simde_mm_insert_epi16(((simde__m128i *)systematic2)[i], ext[*pi4_p++], 5);
+      ((simde__m128i *)systematic2)[i] = simde_mm_insert_epi16(((simde__m128i *)systematic2)[i], ext[*pi4_p++], 6);
+      ((simde__m128i *)systematic2)[i] = simde_mm_insert_epi16(((simde__m128i *)systematic2)[i], ext[*pi4_p++], 7);
 #elif defined(__arm__) || defined(__aarch64__)
       ((int16x8_t *)systematic2)[i]=vsetq_lane_s16(ext[*pi4_p++],((int16x8_t *)systematic2)[i],0);
       ((int16x8_t *)systematic2)[i]=vsetq_lane_s16(ext[*pi4_p++],((int16x8_t *)systematic2)[i],1);
@@ -1228,7 +1228,7 @@ uint8_t phy_threegpplte_turbo_decoder16(int16_t *y,
       ((int16x8_t *)systematic2)[i]=vsetq_lane_s16(ext[*pi4_p++],((int16x8_t *)systematic2)[i],7);
 #endif
 #ifdef DEBUG_LOGMAP
-      print_shorts("syst2",(int16_t *)&((__m128i *)systematic2)[i]);
+      print_shorts("syst2", (int16_t *)&((simde__m128i *)systematic2)[i]);
 #endif
     }
 
@@ -1239,15 +1239,16 @@ uint8_t phy_threegpplte_turbo_decoder16(int16_t *y,
 
     for (i=0; i<(n>>3); i++) {
 #if defined(__x86_64__) || defined(__i386__)
-      tmp=_mm_insert_epi16(tmp,ext2[*pi5_p++],0);
-      tmp=_mm_insert_epi16(tmp,ext2[*pi5_p++],1);
-      tmp=_mm_insert_epi16(tmp,ext2[*pi5_p++],2);
-      tmp=_mm_insert_epi16(tmp,ext2[*pi5_p++],3);
-      tmp=_mm_insert_epi16(tmp,ext2[*pi5_p++],4);
-      tmp=_mm_insert_epi16(tmp,ext2[*pi5_p++],5);
-      tmp=_mm_insert_epi16(tmp,ext2[*pi5_p++],6);
-      tmp=_mm_insert_epi16(tmp,ext2[*pi5_p++],7);
-      ((__m128i *)systematic1)[i] = _mm_adds_epi16(_mm_subs_epi16(tmp,((__m128i *)ext)[i]),((__m128i *)systematic0)[i]);
+      tmp = simde_mm_insert_epi16(tmp, ext2[*pi5_p++], 0);
+      tmp = simde_mm_insert_epi16(tmp, ext2[*pi5_p++], 1);
+      tmp = simde_mm_insert_epi16(tmp, ext2[*pi5_p++], 2);
+      tmp = simde_mm_insert_epi16(tmp, ext2[*pi5_p++], 3);
+      tmp = simde_mm_insert_epi16(tmp, ext2[*pi5_p++], 4);
+      tmp = simde_mm_insert_epi16(tmp, ext2[*pi5_p++], 5);
+      tmp = simde_mm_insert_epi16(tmp, ext2[*pi5_p++], 6);
+      tmp = simde_mm_insert_epi16(tmp, ext2[*pi5_p++], 7);
+      ((simde__m128i *)systematic1)[i] =
+          simde_mm_adds_epi16(simde_mm_subs_epi16(tmp, ((simde__m128i *)ext)[i]), ((simde__m128i *)systematic0)[i]);
 #elif defined(__arm__) || defined(__aarch64__)
       tmp=vsetq_lane_s16(ext2[*pi5_p++],tmp,0);
       tmp=vsetq_lane_s16(ext2[*pi5_p++],tmp,1);
@@ -1260,7 +1261,7 @@ uint8_t phy_threegpplte_turbo_decoder16(int16_t *y,
       ((int16x8_t *)systematic1)[i] = vqaddq_s16(vqsubq_s16(tmp,((int16x8_t *)ext)[i]),((int16x8_t *)systematic0)[i]);
 #endif
 #ifdef DEBUG_LOGMAP
-      print_shorts("syst1",(int16_t *)&((__m128i *)systematic1)[i]);
+      print_shorts("syst1", (int16_t *)&((simde__m128i *)systematic1)[i]);
 #endif
     }
 
@@ -1270,19 +1271,19 @@ uint8_t phy_threegpplte_turbo_decoder16(int16_t *y,
 
       for (i=0; i<(n>>3); i++) {
 #if defined(__x86_64__) || defined(__i386__)
-        tmp=_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++],7);
-        tmp=_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++],6);
-        tmp=_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++],5);
-        tmp=_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++],4);
-        tmp=_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++],3);
-        tmp=_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++],2);
-        tmp=_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++],1);
-        tmp=_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++],0);
+        tmp = simde_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++], 7);
+        tmp = simde_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++], 6);
+        tmp = simde_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++], 5);
+        tmp = simde_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++], 4);
+        tmp = simde_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++], 3);
+        tmp = simde_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++], 2);
+        tmp = simde_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++], 1);
+        tmp = simde_mm_insert_epi16(tmp, ((llr_t *)ext2)[*pi6_p++], 0);
 #ifdef DEBUG_LOGMAP
         print_shorts("tmp",(int16_t *)&tmp);
 #endif
-        tmp=_mm_cmpgt_epi8(_mm_packs_epi16(tmp,zeros),zeros);
-        decoded_bytes[i]=(unsigned char)_mm_movemask_epi8(tmp);
+        tmp = simde_mm_cmpgt_epi8(simde_mm_packs_epi16(tmp, zeros), zeros);
+        decoded_bytes[i] = (unsigned char)simde_mm_movemask_epi8(tmp);
 #elif defined(__arm__) || defined(__aarch64__)
         tmp=vsetq_lane_s16(ext2[*pi6_p++],tmp,7);
         tmp=vsetq_lane_s16(ext2[*pi6_p++],tmp,6);
@@ -1299,7 +1300,7 @@ uint8_t phy_threegpplte_turbo_decoder16(int16_t *y,
         // Mask64 = 2^b0 + 2^b1 + 2^b2 + 2^b3 + 2^b4 + 2^b5 + 2^b6 + 2^b7
         uint64x2_t Mask   = vpaddlq_u32(vpaddlq_u16(vandq_u16(vcgtq_s16(tmp,zeros), Powers)));
         uint64x1_t Mask64 = vget_high_u64(Mask)+vget_low_u64(Mask);
-        decoded_bytes[i] = (uint8_t)Mask64;
+        decoded_bytes[i] = *(uint8_t*)&Mask64;
 #endif
 #ifdef DEBUG_LOGMAP
         print_shorts("tmp",(int16_t *)&tmp);
@@ -1364,9 +1365,9 @@ uint8_t phy_threegpplte_turbo_decoder16(int16_t *y,
     if (iteration_cnt < max_iterations) {
       log_map16(systematic1,yparity1,m11,m10,alpha,beta,ext,n,0,F,offset8_flag,alpha_stats,beta_stats,gamma_stats,ext_stats);
 #if defined(__x86_64__) || defined(__i386__)
-      __m128i *ext_128=(__m128i *) ext;
-      __m128i *s1_128=(__m128i *) systematic1;
-      __m128i *s0_128=(__m128i *) systematic0;
+      simde__m128i *ext_128 = (simde__m128i *)ext;
+      simde__m128i *s1_128 = (simde__m128i *)systematic1;
+      simde__m128i *s0_128 = (simde__m128i *)systematic0;
 #elif defined(__arm__) || defined(__aarch64__)
       int16x8_t *ext_128=(int16x8_t *) ext;
       int16x8_t *s1_128=(int16x8_t *) systematic1;
@@ -1376,7 +1377,7 @@ uint8_t phy_threegpplte_turbo_decoder16(int16_t *y,
 
       for (i=0; i<myloop; i++) {
 #if defined(__x86_64__) || defined(__i386__)
-        *ext_128=_mm_adds_epi16(_mm_subs_epi16(*ext_128,*s1_128++),*s0_128++);
+        *ext_128 = simde_mm_adds_epi16(simde_mm_subs_epi16(*ext_128, *s1_128++), *s0_128++);
 #elif defined(__arm__) || defined(__aarch64__)
         *ext_128=vqaddq_s16(vqsubq_s16(*ext_128,*s1_128++),*s0_128++);
 #endif
@@ -1390,8 +1391,8 @@ uint8_t phy_threegpplte_turbo_decoder16(int16_t *y,
   fclose(fdsse4);
 #endif
 #if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 #endif
   if (iteration_cnt > max_iterations)
     set_abort(ab, true);
diff --git a/openair1/PHY/CODING/3gpplte_turbo_decoder_sse_8bit.c b/openair1/PHY/CODING/3gpplte_turbo_decoder_sse_8bit.c
index 1c18f909a75f059a07a3942e4d5acb945d632563..1447dbc6cc78b4c9e2121811f536dd3bcc3e592c 100644
--- a/openair1/PHY/CODING/3gpplte_turbo_decoder_sse_8bit.c
+++ b/openair1/PHY/CODING/3gpplte_turbo_decoder_sse_8bit.c
@@ -56,26 +56,23 @@
 
 #include "common/ran_context.h"
 
-#define SHUFFLE16(a,b,c,d,e,f,g,h) _mm_set_epi8(h==-1?-1:h*2+1, \
-    h==-1?-1:h*2, \
-    g==-1?-1:g*2+1, \
-    g==-1?-1:g*2, \
-    f==-1?-1:f*2+1, \
-    f==-1?-1:f*2, \
-    e==-1?-1:e*2+1, \
-    e==-1?-1:e*2, \
-    d==-1?-1:d*2+1, \
-    d==-1?-1:d*2, \
-    c==-1?-1:c*2+1, \
-    c==-1?-1:c*2, \
-    b==-1?-1:b*2+1, \
-    b==-1?-1:b*2, \
-    a==-1?-1:a*2+1, \
-    a==-1?-1:a*2);
-
-
-
-
+#define SHUFFLE16(a, b, c, d, e, f, g, h)     \
+  simde_mm_set_epi8(h == -1 ? -1 : h * 2 + 1, \
+                    h == -1 ? -1 : h * 2,     \
+                    g == -1 ? -1 : g * 2 + 1, \
+                    g == -1 ? -1 : g * 2,     \
+                    f == -1 ? -1 : f * 2 + 1, \
+                    f == -1 ? -1 : f * 2,     \
+                    e == -1 ? -1 : e * 2 + 1, \
+                    e == -1 ? -1 : e * 2,     \
+                    d == -1 ? -1 : d * 2 + 1, \
+                    d == -1 ? -1 : d * 2,     \
+                    c == -1 ? -1 : c * 2 + 1, \
+                    c == -1 ? -1 : c * 2,     \
+                    b == -1 ? -1 : b * 2 + 1, \
+                    b == -1 ? -1 : b * 2,     \
+                    a == -1 ? -1 : a * 2 + 1, \
+                    a == -1 ? -1 : a * 2);
 
 //#define DEBUG_LOGMAP
 
@@ -149,10 +146,10 @@ void compute_gamma8(llr_t *m11,llr_t *m10,llr_t *systematic,channel_t *y_parity,
                     unsigned short frame_length,unsigned char term_flag) {
   int k,K1;
 #if defined(__x86_64__)||defined(__i386__)
-  __m128i *systematic128 = (__m128i *)systematic;
-  __m128i *y_parity128   = (__m128i *)y_parity;
-  __m128i *m10_128        = (__m128i *)m10;
-  __m128i *m11_128        = (__m128i *)m11;
+  simde__m128i *systematic128 = (simde__m128i *)systematic;
+  simde__m128i *y_parity128 = (simde__m128i *)y_parity;
+  simde__m128i *m10_128 = (simde__m128i *)m10;
+  simde__m128i *m11_128 = (simde__m128i *)m11;
 #elif defined(__arm__) || defined(__aarch64__)
   int8x16_t *systematic128  = (int8x16_t *)systematic;
   int8x16_t *y_parity128    = (int8x16_t *)y_parity;
@@ -163,20 +160,20 @@ void compute_gamma8(llr_t *m11,llr_t *m10,llr_t *systematic,channel_t *y_parity,
   printf("compute_gamma, %p,%p,%p,%p,framelength %d\n",m11,m10,systematic,y_parity,frame_length);
 #endif
 #if defined(__x86_64__) || defined(__i386__)
-  register __m128i sl,sh,ypl,yph; //K128=_mm_set1_epi8(-128);
+  register simde__m128i sl, sh, ypl, yph; // K128=simde_mm_set1_epi8(-128);
 #endif
   K1 = (frame_length>>4);
 
   for (k=0; k<K1; k++) {
 #if defined(__x86_64__) || defined(__i386__)
-    sl  = _mm_cvtepi8_epi16(systematic128[k]);
-    sh  = _mm_cvtepi8_epi16(_mm_srli_si128(systematic128[k],8));
-    ypl = _mm_cvtepi8_epi16(y_parity128[k]);
-    yph = _mm_cvtepi8_epi16(_mm_srli_si128(y_parity128[k],8));
-    m11_128[k] = _mm_packs_epi16(_mm_srai_epi16(_mm_adds_epi16(sl,ypl),1),
-                                 _mm_srai_epi16(_mm_adds_epi16(sh,yph),1));
-    m10_128[k] = _mm_packs_epi16(_mm_srai_epi16(_mm_subs_epi16(sl,ypl),1),
-                                 _mm_srai_epi16(_mm_subs_epi16(sh,yph),1));
+    sl = simde_mm_cvtepi8_epi16(systematic128[k]);
+    sh = simde_mm_cvtepi8_epi16(simde_mm_srli_si128(systematic128[k], 8));
+    ypl = simde_mm_cvtepi8_epi16(y_parity128[k]);
+    yph = simde_mm_cvtepi8_epi16(simde_mm_srli_si128(y_parity128[k], 8));
+    m11_128[k] = simde_mm_packs_epi16(simde_mm_srai_epi16(simde_mm_adds_epi16(sl, ypl), 1),
+                                      simde_mm_srai_epi16(simde_mm_adds_epi16(sh, yph), 1));
+    m10_128[k] = simde_mm_packs_epi16(simde_mm_srai_epi16(simde_mm_subs_epi16(sl, ypl), 1),
+                                      simde_mm_srai_epi16(simde_mm_subs_epi16(sh, yph), 1));
 #elif defined(__arm__) || defined(__aarch64__)
     m11_128[k] = vhaddq_s8(systematic128[k],y_parity128[k]);
     m10_128[k] = vhsubq_s8(systematic128[k],y_parity128[k]);
@@ -185,14 +182,14 @@ void compute_gamma8(llr_t *m11,llr_t *m10,llr_t *systematic,channel_t *y_parity,
 
   // Termination
 #if defined(__x86_64__) || defined(__i386__)
-  sl  = _mm_cvtepi8_epi16(systematic128[k+term_flag]);
-  sh = _mm_cvtepi8_epi16(_mm_srli_si128(systematic128[k],8));
-  ypl = _mm_cvtepi8_epi16(y_parity128[k+term_flag]);
-  yph = _mm_cvtepi8_epi16(_mm_srli_si128(y_parity128[k],8));
-  m11_128[k] = _mm_packs_epi16(_mm_srai_epi16(_mm_adds_epi16(sl,ypl),1),
-                               _mm_srai_epi16(_mm_adds_epi16(sh,yph),1));
-  m10_128[k] = _mm_packs_epi16(_mm_srai_epi16(_mm_subs_epi16(sl,ypl),1),
-                               _mm_srai_epi16(_mm_subs_epi16(sh,yph),1));
+  sl = simde_mm_cvtepi8_epi16(systematic128[k + term_flag]);
+  sh = simde_mm_cvtepi8_epi16(simde_mm_srli_si128(systematic128[k], 8));
+  ypl = simde_mm_cvtepi8_epi16(y_parity128[k + term_flag]);
+  yph = simde_mm_cvtepi8_epi16(simde_mm_srli_si128(y_parity128[k], 8));
+  m11_128[k] = simde_mm_packs_epi16(simde_mm_srai_epi16(simde_mm_adds_epi16(sl, ypl), 1),
+                                    simde_mm_srai_epi16(simde_mm_adds_epi16(sh, yph), 1));
+  m10_128[k] = simde_mm_packs_epi16(simde_mm_srai_epi16(simde_mm_subs_epi16(sl, ypl), 1),
+                                    simde_mm_srai_epi16(simde_mm_subs_epi16(sh, yph), 1));
 #elif defined(__arm__) || defined(__aarch64__)
   m11_128[k] = vhaddq_s8(systematic128[k+term_flag],y_parity128[k]);
   m10_128[k] = vhsubq_s8(systematic128[k+term_flag],y_parity128[k]);
@@ -204,11 +201,11 @@ void compute_gamma8(llr_t *m11,llr_t *m10,llr_t *systematic,channel_t *y_parity,
 void compute_alpha8(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned short frame_length,unsigned char F) {
   int k,loopval,rerun_flag;
 #if defined(__x86_64__) || defined(__i386__)
-  __m128i *alpha128=(__m128i *)alpha,*alpha_ptr;
-  __m128i *m11p,*m10p;
-  __m128i m_b0,m_b1,m_b2,m_b3,m_b4,m_b5,m_b6,m_b7;
-  __m128i new0,new1,new2,new3,new4,new5,new6,new7;
-  __m128i alpha_max;
+  simde__m128i *alpha128 = (simde__m128i *)alpha, *alpha_ptr;
+  simde__m128i *m11p, *m10p;
+  simde__m128i m_b0, m_b1, m_b2, m_b3, m_b4, m_b5, m_b6, m_b7;
+  simde__m128i new0, new1, new2, new3, new4, new5, new6, new7;
+  simde__m128i alpha_max;
 #elif defined(__arm__) || defined(__aarch64__)
   int8x16_t *alpha128=(int8x16_t *)alpha,*alpha_ptr;
   int8x16_t *m11p,*m10p;
@@ -219,77 +216,197 @@ void compute_alpha8(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned sh
   // Set initial state: first colum is known
   // the other columns are unknown, so all states are set to same value
 #if defined(__x86_64__) || defined(__i386__)
-  alpha128[0] = _mm_set_epi8(-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,0);
-  alpha128[1] = _mm_set_epi8(-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2);
-  alpha128[2] = _mm_set_epi8(-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2);
-  alpha128[3] = _mm_set_epi8(-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2);
-  alpha128[4] = _mm_set_epi8(-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2);
-  alpha128[5] = _mm_set_epi8(-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2);
-  alpha128[6] = _mm_set_epi8(-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2);
-  alpha128[7] = _mm_set_epi8(-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2,-MAX8/2);
+  alpha128[0] = simde_mm_set_epi8(-MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  0);
+  alpha128[1] = simde_mm_set_epi8(-MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2);
+  alpha128[2] = simde_mm_set_epi8(-MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2);
+  alpha128[3] = simde_mm_set_epi8(-MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2);
+  alpha128[4] = simde_mm_set_epi8(-MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2);
+  alpha128[5] = simde_mm_set_epi8(-MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2);
+  alpha128[6] = simde_mm_set_epi8(-MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2);
+  alpha128[7] = simde_mm_set_epi8(-MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2,
+                                  -MAX8 / 2);
 
   for (loopval=frame_length>>4, rerun_flag=0; rerun_flag<2; loopval=L, rerun_flag++) {
     alpha_ptr = &alpha128[0];
-    m11p = (__m128i *)m_11;
-    m10p = (__m128i *)m_10;
+    m11p = (simde__m128i *)m_11;
+    m10p = (simde__m128i *)m_10;
 
     for (k=0;  k<loopval;  k++) {
-      m_b0 = _mm_adds_epi8(alpha_ptr[1],*m11p);  // m11
-      m_b4 = _mm_subs_epi8(alpha_ptr[1],*m11p);  // m00=-m11
-      m_b1 = _mm_subs_epi8(alpha_ptr[3],*m10p);  // m01=-m10
-      m_b5 = _mm_adds_epi8(alpha_ptr[3],*m10p);  // m10
-      m_b2 = _mm_adds_epi8(alpha_ptr[5],*m10p);  // m10
-      m_b6 = _mm_subs_epi8(alpha_ptr[5],*m10p);  // m01=-m10
-      m_b3 = _mm_subs_epi8(alpha_ptr[7],*m11p);  // m00=-m11
-      m_b7 = _mm_adds_epi8(alpha_ptr[7],*m11p);  // m11
-      new0 = _mm_subs_epi8(alpha_ptr[0],*m11p);  // m00=-m11
-      new4 = _mm_adds_epi8(alpha_ptr[0],*m11p);  // m11
-      new1 = _mm_adds_epi8(alpha_ptr[2],*m10p);  // m10
-      new5 = _mm_subs_epi8(alpha_ptr[2],*m10p);  // m01=-m10
-      new2 = _mm_subs_epi8(alpha_ptr[4],*m10p);  // m01=-m10
-      new6 = _mm_adds_epi8(alpha_ptr[4],*m10p);  // m10
-      new3 = _mm_adds_epi8(alpha_ptr[6],*m11p);  // m11
-      new7 = _mm_subs_epi8(alpha_ptr[6],*m11p);  // m00=-m11
+      m_b0 = simde_mm_adds_epi8(alpha_ptr[1], *m11p); // m11
+      m_b4 = simde_mm_subs_epi8(alpha_ptr[1], *m11p); // m00=-m11
+      m_b1 = simde_mm_subs_epi8(alpha_ptr[3], *m10p); // m01=-m10
+      m_b5 = simde_mm_adds_epi8(alpha_ptr[3], *m10p); // m10
+      m_b2 = simde_mm_adds_epi8(alpha_ptr[5], *m10p); // m10
+      m_b6 = simde_mm_subs_epi8(alpha_ptr[5], *m10p); // m01=-m10
+      m_b3 = simde_mm_subs_epi8(alpha_ptr[7], *m11p); // m00=-m11
+      m_b7 = simde_mm_adds_epi8(alpha_ptr[7], *m11p); // m11
+      new0 = simde_mm_subs_epi8(alpha_ptr[0], *m11p); // m00=-m11
+      new4 = simde_mm_adds_epi8(alpha_ptr[0], *m11p); // m11
+      new1 = simde_mm_adds_epi8(alpha_ptr[2], *m10p); // m10
+      new5 = simde_mm_subs_epi8(alpha_ptr[2], *m10p); // m01=-m10
+      new2 = simde_mm_subs_epi8(alpha_ptr[4], *m10p); // m01=-m10
+      new6 = simde_mm_adds_epi8(alpha_ptr[4], *m10p); // m10
+      new3 = simde_mm_adds_epi8(alpha_ptr[6], *m11p); // m11
+      new7 = simde_mm_subs_epi8(alpha_ptr[6], *m11p); // m00=-m11
       alpha_ptr += 8;
       m11p++;
       m10p++;
-      alpha_ptr[0] = _mm_max_epi8(m_b0,new0);
-      alpha_ptr[1] = _mm_max_epi8(m_b1,new1);
-      alpha_ptr[2] = _mm_max_epi8(m_b2,new2);
-      alpha_ptr[3] = _mm_max_epi8(m_b3,new3);
-      alpha_ptr[4] = _mm_max_epi8(m_b4,new4);
-      alpha_ptr[5] = _mm_max_epi8(m_b5,new5);
-      alpha_ptr[6] = _mm_max_epi8(m_b6,new6);
-      alpha_ptr[7] = _mm_max_epi8(m_b7,new7);
+      alpha_ptr[0] = simde_mm_max_epi8(m_b0, new0);
+      alpha_ptr[1] = simde_mm_max_epi8(m_b1, new1);
+      alpha_ptr[2] = simde_mm_max_epi8(m_b2, new2);
+      alpha_ptr[3] = simde_mm_max_epi8(m_b3, new3);
+      alpha_ptr[4] = simde_mm_max_epi8(m_b4, new4);
+      alpha_ptr[5] = simde_mm_max_epi8(m_b5, new5);
+      alpha_ptr[6] = simde_mm_max_epi8(m_b6, new6);
+      alpha_ptr[7] = simde_mm_max_epi8(m_b7, new7);
       // compute and subtract maxima
-      alpha_max = _mm_max_epi8(alpha_ptr[0],alpha_ptr[1]);
-      alpha_max = _mm_max_epi8(alpha_max,alpha_ptr[2]);
-      alpha_max = _mm_max_epi8(alpha_max,alpha_ptr[3]);
-      alpha_max = _mm_max_epi8(alpha_max,alpha_ptr[4]);
-      alpha_max = _mm_max_epi8(alpha_max,alpha_ptr[5]);
-      alpha_max = _mm_max_epi8(alpha_max,alpha_ptr[6]);
-      alpha_max = _mm_max_epi8(alpha_max,alpha_ptr[7]);
-      alpha_ptr[0] = _mm_subs_epi8(alpha_ptr[0],alpha_max);
-      alpha_ptr[1] = _mm_subs_epi8(alpha_ptr[1],alpha_max);
-      alpha_ptr[2] = _mm_subs_epi8(alpha_ptr[2],alpha_max);
-      alpha_ptr[3] = _mm_subs_epi8(alpha_ptr[3],alpha_max);
-      alpha_ptr[4] = _mm_subs_epi8(alpha_ptr[4],alpha_max);
-      alpha_ptr[5] = _mm_subs_epi8(alpha_ptr[5],alpha_max);
-      alpha_ptr[6] = _mm_subs_epi8(alpha_ptr[6],alpha_max);
-      alpha_ptr[7] = _mm_subs_epi8(alpha_ptr[7],alpha_max);
+      alpha_max = simde_mm_max_epi8(alpha_ptr[0], alpha_ptr[1]);
+      alpha_max = simde_mm_max_epi8(alpha_max, alpha_ptr[2]);
+      alpha_max = simde_mm_max_epi8(alpha_max, alpha_ptr[3]);
+      alpha_max = simde_mm_max_epi8(alpha_max, alpha_ptr[4]);
+      alpha_max = simde_mm_max_epi8(alpha_max, alpha_ptr[5]);
+      alpha_max = simde_mm_max_epi8(alpha_max, alpha_ptr[6]);
+      alpha_max = simde_mm_max_epi8(alpha_max, alpha_ptr[7]);
+      alpha_ptr[0] = simde_mm_subs_epi8(alpha_ptr[0], alpha_max);
+      alpha_ptr[1] = simde_mm_subs_epi8(alpha_ptr[1], alpha_max);
+      alpha_ptr[2] = simde_mm_subs_epi8(alpha_ptr[2], alpha_max);
+      alpha_ptr[3] = simde_mm_subs_epi8(alpha_ptr[3], alpha_max);
+      alpha_ptr[4] = simde_mm_subs_epi8(alpha_ptr[4], alpha_max);
+      alpha_ptr[5] = simde_mm_subs_epi8(alpha_ptr[5], alpha_max);
+      alpha_ptr[6] = simde_mm_subs_epi8(alpha_ptr[6], alpha_max);
+      alpha_ptr[7] = simde_mm_subs_epi8(alpha_ptr[7], alpha_max);
     }
 
     // Set intial state for next iteration from the last state
     // as acolum end states are the first states of the next column
     int K1= frame_length>>1;
-    alpha128[0] = _mm_slli_si128(alpha128[K1],1);
-    alpha128[1] = _mm_slli_si128(alpha128[1+K1],1);
-    alpha128[2] = _mm_slli_si128(alpha128[2+K1],1);
-    alpha128[3] = _mm_slli_si128(alpha128[3+K1],1);
-    alpha128[4] = _mm_slli_si128(alpha128[4+K1],1);
-    alpha128[5] = _mm_slli_si128(alpha128[5+K1],1);
-    alpha128[6] = _mm_slli_si128(alpha128[6+K1],1);
-    alpha128[7] = _mm_slli_si128(alpha128[7+K1],1);
+    alpha128[0] = simde_mm_slli_si128(alpha128[K1], 1);
+    alpha128[1] = simde_mm_slli_si128(alpha128[1 + K1], 1);
+    alpha128[2] = simde_mm_slli_si128(alpha128[2 + K1], 1);
+    alpha128[3] = simde_mm_slli_si128(alpha128[3 + K1], 1);
+    alpha128[4] = simde_mm_slli_si128(alpha128[4 + K1], 1);
+    alpha128[5] = simde_mm_slli_si128(alpha128[5 + K1], 1);
+    alpha128[6] = simde_mm_slli_si128(alpha128[6 + K1], 1);
+    alpha128[7] = simde_mm_slli_si128(alpha128[7 + K1], 1);
     alpha[16] =  -MAX8/2;
     alpha[32] = -MAX8/2;
     alpha[48] = -MAX8/2;
@@ -396,11 +513,11 @@ void compute_alpha8(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned sh
 void compute_beta8(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned short frame_length,unsigned char F,int offset8_flag) {
   int k,rerun_flag, loopval;
 #if defined(__x86_64__) || defined(__i386__)
-  __m128i m11_128,m10_128;
-  __m128i m_b0,m_b1,m_b2,m_b3,m_b4,m_b5,m_b6,m_b7;
-  __m128i new0,new1,new2,new3,new4,new5,new6,new7;
-  __m128i *beta128,*alpha128,*beta_ptr;
-  __m128i beta_max;
+  simde__m128i m11_128, m10_128;
+  simde__m128i m_b0, m_b1, m_b2, m_b3, m_b4, m_b5, m_b6, m_b7;
+  simde__m128i new0, new1, new2, new3, new4, new5, new6, new7;
+  simde__m128i *beta128, *alpha128, *beta_ptr;
+  simde__m128i beta_max;
 #elif defined(__arm__) || defined(__aarch64__)
   int8x16_t m11_128,m10_128;
   int8x16_t m_b0,m_b1,m_b2,m_b3,m_b4,m_b5,m_b6,m_b7;
@@ -419,8 +536,8 @@ void compute_beta8(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned sho
   // we are supposed to run compute_alpha just before compute_beta
   // so the initial states of backward computation can be set from last value of alpha states (forward computation)
 #if defined(__x86_64__) || defined(__i386__)
-  beta_ptr   = (__m128i *)&beta[frame_length<<3];
-  alpha128   = (__m128i *)&alpha[0];
+  beta_ptr = (simde__m128i *)&beta[frame_length << 3];
+  alpha128 = (simde__m128i *)&alpha[0];
 #elif defined(__arm__) || defined(__aarch64__)
   beta_ptr   = (int8x16_t *)&beta[frame_length<<3];
   alpha128   = (int8x16_t *)&alpha[0];
@@ -443,14 +560,14 @@ void compute_beta8(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned sho
       // workaround: init with 0
       beta0 = beta1 = beta2 = beta3 = beta4 = beta5 = beta6 = beta7 = 0;
 #if defined(__x86_64__) || defined(__i386__)
-      beta_ptr[0] = _mm_insert_epi8(beta_ptr[0],beta0,15);
-      beta_ptr[1] = _mm_insert_epi8(beta_ptr[1],beta1,15);
-      beta_ptr[2] = _mm_insert_epi8(beta_ptr[2],beta2,15);
-      beta_ptr[3] = _mm_insert_epi8(beta_ptr[3],beta3,15);
-      beta_ptr[4] = _mm_insert_epi8(beta_ptr[4],beta4,15);
-      beta_ptr[5] = _mm_insert_epi8(beta_ptr[5],beta5,15);
-      beta_ptr[6] = _mm_insert_epi8(beta_ptr[6],beta6,15);
-      beta_ptr[7] = _mm_insert_epi8(beta_ptr[7],beta7,15);
+      beta_ptr[0] = simde_mm_insert_epi8(beta_ptr[0], beta0, 15);
+      beta_ptr[1] = simde_mm_insert_epi8(beta_ptr[1], beta1, 15);
+      beta_ptr[2] = simde_mm_insert_epi8(beta_ptr[2], beta2, 15);
+      beta_ptr[3] = simde_mm_insert_epi8(beta_ptr[3], beta3, 15);
+      beta_ptr[4] = simde_mm_insert_epi8(beta_ptr[4], beta4, 15);
+      beta_ptr[5] = simde_mm_insert_epi8(beta_ptr[5], beta5, 15);
+      beta_ptr[6] = simde_mm_insert_epi8(beta_ptr[6], beta6, 15);
+      beta_ptr[7] = simde_mm_insert_epi8(beta_ptr[7], beta7, 15);
 #elif defined(__arm__) || defined(__aarch64__)
       beta_ptr[0] = vsetq_lane_s8(beta0,beta_ptr[0],15);
       beta_ptr[1] = vsetq_lane_s8(beta1,beta_ptr[1],15);
@@ -464,7 +581,7 @@ void compute_beta8(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned sho
     }
 
 #if defined(__x86_64__) || defined(__i386__)
-    beta_ptr = (__m128i *)&beta[frame_length<<3];
+    beta_ptr = (simde__m128i *)&beta[frame_length << 3];
 #elif defined(__arm__) || defined(__aarch64__)
     beta_ptr = (int8x16_t *)&beta[frame_length<<3];
 #endif
@@ -473,48 +590,48 @@ void compute_beta8(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned sho
          k>=loopval;
          k--) {
 #if defined(__x86_64__) || defined(__i386__)
-      m11_128=((__m128i *)m_11)[k];
-      m10_128=((__m128i *)m_10)[k];
-      m_b0 = _mm_adds_epi8(beta_ptr[4],m11_128);  //m11
-      m_b1 = _mm_subs_epi8(beta_ptr[4],m11_128);  //m00
-      m_b2 = _mm_subs_epi8(beta_ptr[5],m10_128);  //m01
-      m_b3 = _mm_adds_epi8(beta_ptr[5],m10_128);  //m10
-      m_b4 = _mm_adds_epi8(beta_ptr[6],m10_128);  //m10
-      m_b5 = _mm_subs_epi8(beta_ptr[6],m10_128);  //m01
-      m_b6 = _mm_subs_epi8(beta_ptr[7],m11_128);  //m00
-      m_b7 = _mm_adds_epi8(beta_ptr[7],m11_128);  //m11
-      new0 = _mm_subs_epi8(beta_ptr[0],m11_128);  //m00
-      new1 = _mm_adds_epi8(beta_ptr[0],m11_128);  //m11
-      new2 = _mm_adds_epi8(beta_ptr[1],m10_128);  //m10
-      new3 = _mm_subs_epi8(beta_ptr[1],m10_128);  //m01
-      new4 = _mm_subs_epi8(beta_ptr[2],m10_128);  //m01
-      new5 = _mm_adds_epi8(beta_ptr[2],m10_128);  //m10
-      new6 = _mm_adds_epi8(beta_ptr[3],m11_128);  //m11
-      new7 = _mm_subs_epi8(beta_ptr[3],m11_128);  //m00
+      m11_128 = ((simde__m128i *)m_11)[k];
+      m10_128 = ((simde__m128i *)m_10)[k];
+      m_b0 = simde_mm_adds_epi8(beta_ptr[4], m11_128); // m11
+      m_b1 = simde_mm_subs_epi8(beta_ptr[4], m11_128); // m00
+      m_b2 = simde_mm_subs_epi8(beta_ptr[5], m10_128); // m01
+      m_b3 = simde_mm_adds_epi8(beta_ptr[5], m10_128); // m10
+      m_b4 = simde_mm_adds_epi8(beta_ptr[6], m10_128); // m10
+      m_b5 = simde_mm_subs_epi8(beta_ptr[6], m10_128); // m01
+      m_b6 = simde_mm_subs_epi8(beta_ptr[7], m11_128); // m00
+      m_b7 = simde_mm_adds_epi8(beta_ptr[7], m11_128); // m11
+      new0 = simde_mm_subs_epi8(beta_ptr[0], m11_128); // m00
+      new1 = simde_mm_adds_epi8(beta_ptr[0], m11_128); // m11
+      new2 = simde_mm_adds_epi8(beta_ptr[1], m10_128); // m10
+      new3 = simde_mm_subs_epi8(beta_ptr[1], m10_128); // m01
+      new4 = simde_mm_subs_epi8(beta_ptr[2], m10_128); // m01
+      new5 = simde_mm_adds_epi8(beta_ptr[2], m10_128); // m10
+      new6 = simde_mm_adds_epi8(beta_ptr[3], m11_128); // m11
+      new7 = simde_mm_subs_epi8(beta_ptr[3], m11_128); // m00
       beta_ptr-=8;
-      beta_ptr[0] = _mm_max_epi8(m_b0,new0);
-      beta_ptr[1] = _mm_max_epi8(m_b1,new1);
-      beta_ptr[2] = _mm_max_epi8(m_b2,new2);
-      beta_ptr[3] = _mm_max_epi8(m_b3,new3);
-      beta_ptr[4] = _mm_max_epi8(m_b4,new4);
-      beta_ptr[5] = _mm_max_epi8(m_b5,new5);
-      beta_ptr[6] = _mm_max_epi8(m_b6,new6);
-      beta_ptr[7] = _mm_max_epi8(m_b7,new7);
-      beta_max = _mm_max_epi8(beta_ptr[0],beta_ptr[1]);
-      beta_max = _mm_max_epi8(beta_max   ,beta_ptr[2]);
-      beta_max = _mm_max_epi8(beta_max   ,beta_ptr[3]);
-      beta_max = _mm_max_epi8(beta_max   ,beta_ptr[4]);
-      beta_max = _mm_max_epi8(beta_max   ,beta_ptr[5]);
-      beta_max = _mm_max_epi8(beta_max   ,beta_ptr[6]);
-      beta_max = _mm_max_epi8(beta_max   ,beta_ptr[7]);
-      beta_ptr[0] = _mm_subs_epi8(beta_ptr[0],beta_max);
-      beta_ptr[1] = _mm_subs_epi8(beta_ptr[1],beta_max);
-      beta_ptr[2] = _mm_subs_epi8(beta_ptr[2],beta_max);
-      beta_ptr[3] = _mm_subs_epi8(beta_ptr[3],beta_max);
-      beta_ptr[4] = _mm_subs_epi8(beta_ptr[4],beta_max);
-      beta_ptr[5] = _mm_subs_epi8(beta_ptr[5],beta_max);
-      beta_ptr[6] = _mm_subs_epi8(beta_ptr[6],beta_max);
-      beta_ptr[7] = _mm_subs_epi8(beta_ptr[7],beta_max);
+      beta_ptr[0] = simde_mm_max_epi8(m_b0, new0);
+      beta_ptr[1] = simde_mm_max_epi8(m_b1, new1);
+      beta_ptr[2] = simde_mm_max_epi8(m_b2, new2);
+      beta_ptr[3] = simde_mm_max_epi8(m_b3, new3);
+      beta_ptr[4] = simde_mm_max_epi8(m_b4, new4);
+      beta_ptr[5] = simde_mm_max_epi8(m_b5, new5);
+      beta_ptr[6] = simde_mm_max_epi8(m_b6, new6);
+      beta_ptr[7] = simde_mm_max_epi8(m_b7, new7);
+      beta_max = simde_mm_max_epi8(beta_ptr[0], beta_ptr[1]);
+      beta_max = simde_mm_max_epi8(beta_max, beta_ptr[2]);
+      beta_max = simde_mm_max_epi8(beta_max, beta_ptr[3]);
+      beta_max = simde_mm_max_epi8(beta_max, beta_ptr[4]);
+      beta_max = simde_mm_max_epi8(beta_max, beta_ptr[5]);
+      beta_max = simde_mm_max_epi8(beta_max, beta_ptr[6]);
+      beta_max = simde_mm_max_epi8(beta_max, beta_ptr[7]);
+      beta_ptr[0] = simde_mm_subs_epi8(beta_ptr[0], beta_max);
+      beta_ptr[1] = simde_mm_subs_epi8(beta_ptr[1], beta_max);
+      beta_ptr[2] = simde_mm_subs_epi8(beta_ptr[2], beta_max);
+      beta_ptr[3] = simde_mm_subs_epi8(beta_ptr[3], beta_max);
+      beta_ptr[4] = simde_mm_subs_epi8(beta_ptr[4], beta_max);
+      beta_ptr[5] = simde_mm_subs_epi8(beta_ptr[5], beta_max);
+      beta_ptr[6] = simde_mm_subs_epi8(beta_ptr[6], beta_max);
+      beta_ptr[7] = simde_mm_subs_epi8(beta_ptr[7], beta_max);
 #elif defined(__arm__) || defined(__aarch64__)
       m11_128=((int8x16_t *)m_11)[k];
       m10_128=((int8x16_t *)m_10)[k];
@@ -565,16 +682,16 @@ void compute_beta8(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned sho
     // as column last states are the first states of the next column
     // The initial state of column 0 is coming from tail bits (to be computed)
 #if defined(__x86_64__) || defined(__i386__)
-    beta128 = (__m128i *)&beta[0];
-    beta_ptr   = (__m128i *)&beta[frame_length<<3];
-    beta_ptr[0] = _mm_srli_si128(beta128[0],1);
-    beta_ptr[1] = _mm_srli_si128(beta128[1],1);
-    beta_ptr[2] = _mm_srli_si128(beta128[2],1);
-    beta_ptr[3] = _mm_srli_si128(beta128[3],1);
-    beta_ptr[4] = _mm_srli_si128(beta128[4],1);
-    beta_ptr[5] = _mm_srli_si128(beta128[5],1);
-    beta_ptr[6] = _mm_srli_si128(beta128[6],1);
-    beta_ptr[7] = _mm_srli_si128(beta128[7],1);
+    beta128 = (simde__m128i *)&beta[0];
+    beta_ptr = (simde__m128i *)&beta[frame_length << 3];
+    beta_ptr[0] = simde_mm_srli_si128(beta128[0], 1);
+    beta_ptr[1] = simde_mm_srli_si128(beta128[1], 1);
+    beta_ptr[2] = simde_mm_srli_si128(beta128[2], 1);
+    beta_ptr[3] = simde_mm_srli_si128(beta128[3], 1);
+    beta_ptr[4] = simde_mm_srli_si128(beta128[4], 1);
+    beta_ptr[5] = simde_mm_srli_si128(beta128[5], 1);
+    beta_ptr[6] = simde_mm_srli_si128(beta128[6], 1);
+    beta_ptr[7] = simde_mm_srli_si128(beta128[7], 1);
 #elif defined(__arm__) || defined(__aarch64__)
     beta128 = (int8x16_t *)&beta[0];
     beta_ptr   = (int8x16_t *)&beta[frame_length<<3];
@@ -600,14 +717,14 @@ void compute_beta8(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,unsigned sho
 
 void compute_ext8(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,llr_t *ext, llr_t *systematic,unsigned short frame_length) {
 #if defined(__x86_64__) || defined(__i386__)
-  __m128i *alpha128=(__m128i *)alpha;
-  __m128i *beta128=(__m128i *)beta;
-  __m128i *m11_128,*m10_128,*ext_128;
-  __m128i *alpha_ptr,*beta_ptr;
-  __m128i m00_1,m00_2,m00_3,m00_4;
-  __m128i m01_1,m01_2,m01_3,m01_4;
-  __m128i m10_1,m10_2,m10_3,m10_4;
-  __m128i m11_1,m11_2,m11_3,m11_4;
+  simde__m128i *alpha128 = (simde__m128i *)alpha;
+  simde__m128i *beta128 = (simde__m128i *)beta;
+  simde__m128i *m11_128, *m10_128, *ext_128;
+  simde__m128i *alpha_ptr, *beta_ptr;
+  simde__m128i m00_1, m00_2, m00_3, m00_4;
+  simde__m128i m01_1, m01_2, m01_3, m01_4;
+  simde__m128i m10_1, m10_2, m10_3, m10_4;
+  simde__m128i m11_1, m11_2, m11_3, m11_4;
 #elif defined(__arm__) || defined(__aarch64__)
   int8x16_t *alpha128=(int8x16_t *)alpha;
   int8x16_t *beta128=(int8x16_t *)beta;
@@ -630,44 +747,44 @@ void compute_ext8(llr_t *alpha,llr_t *beta,llr_t *m_11,llr_t *m_10,llr_t *ext, l
 
   for (k=0; k<(frame_length>>4); k++) {
 #if defined(__x86_64__) || defined(__i386__)
-    m11_128        = (__m128i *)&m_11[k<<4];
-    m10_128        = (__m128i *)&m_10[k<<4];
-    ext_128        = (__m128i *)&ext[k<<4];
-    m00_4 = _mm_adds_epi8(alpha_ptr[7],beta_ptr[3]); //ALPHA_BETA_4m00;
-    m11_4 = _mm_adds_epi8(alpha_ptr[7],beta_ptr[7]); //ALPHA_BETA_4m11;
-    m00_3 = _mm_adds_epi8(alpha_ptr[6],beta_ptr[7]); //ALPHA_BETA_3m00;
-    m11_3 = _mm_adds_epi8(alpha_ptr[6],beta_ptr[3]); //ALPHA_BETA_3m11;
-    m00_2 = _mm_adds_epi8(alpha_ptr[1],beta_ptr[4]); //ALPHA_BETA_2m00;
-    m11_2 = _mm_adds_epi8(alpha_ptr[1],beta_ptr[0]); //ALPHA_BETA_2m11;
-    m11_1 = _mm_adds_epi8(alpha_ptr[0],beta_ptr[4]); //ALPHA_BETA_1m11;
-    m00_1 = _mm_adds_epi8(alpha_ptr[0],beta_ptr[0]); //ALPHA_BETA_1m00;
-    m01_4 = _mm_adds_epi8(alpha_ptr[5],beta_ptr[6]); //ALPHA_BETA_4m01;
-    m10_4 = _mm_adds_epi8(alpha_ptr[5],beta_ptr[2]); //ALPHA_BETA_4m10;
-    m01_3 = _mm_adds_epi8(alpha_ptr[4],beta_ptr[2]); //ALPHA_BETA_3m01;
-    m10_3 = _mm_adds_epi8(alpha_ptr[4],beta_ptr[6]); //ALPHA_BETA_3m10;
-    m01_2 = _mm_adds_epi8(alpha_ptr[3],beta_ptr[1]); //ALPHA_BETA_2m01;
-    m10_2 = _mm_adds_epi8(alpha_ptr[3],beta_ptr[5]); //ALPHA_BETA_2m10;
-    m10_1 = _mm_adds_epi8(alpha_ptr[2],beta_ptr[1]); //ALPHA_BETA_1m10;
-    m01_1 = _mm_adds_epi8(alpha_ptr[2],beta_ptr[5]); //ALPHA_BETA_1m01;
-    m01_1 = _mm_max_epi8(m01_1,m01_2);
-    m01_1 = _mm_max_epi8(m01_1,m01_3);
-    m01_1 = _mm_max_epi8(m01_1,m01_4);
-    m00_1 = _mm_max_epi8(m00_1,m00_2);
-    m00_1 = _mm_max_epi8(m00_1,m00_3);
-    m00_1 = _mm_max_epi8(m00_1,m00_4);
-    m10_1 = _mm_max_epi8(m10_1,m10_2);
-    m10_1 = _mm_max_epi8(m10_1,m10_3);
-    m10_1 = _mm_max_epi8(m10_1,m10_4);
-    m11_1 = _mm_max_epi8(m11_1,m11_2);
-    m11_1 = _mm_max_epi8(m11_1,m11_3);
-    m11_1 = _mm_max_epi8(m11_1,m11_4);
-    m01_1 = _mm_subs_epi8(m01_1,*m10_128);
-    m00_1 = _mm_subs_epi8(m00_1,*m11_128);
-    m10_1 = _mm_adds_epi8(m10_1,*m10_128);
-    m11_1 = _mm_adds_epi8(m11_1,*m11_128);
-    m01_1 = _mm_max_epi8(m01_1,m00_1);
-    m10_1 = _mm_max_epi8(m10_1,m11_1);
-    *ext_128 = _mm_subs_epi8(m10_1,m01_1);
+    m11_128 = (simde__m128i *)&m_11[k << 4];
+    m10_128 = (simde__m128i *)&m_10[k << 4];
+    ext_128 = (simde__m128i *)&ext[k << 4];
+    m00_4 = simde_mm_adds_epi8(alpha_ptr[7], beta_ptr[3]); // ALPHA_BETA_4m00;
+    m11_4 = simde_mm_adds_epi8(alpha_ptr[7], beta_ptr[7]); // ALPHA_BETA_4m11;
+    m00_3 = simde_mm_adds_epi8(alpha_ptr[6], beta_ptr[7]); // ALPHA_BETA_3m00;
+    m11_3 = simde_mm_adds_epi8(alpha_ptr[6], beta_ptr[3]); // ALPHA_BETA_3m11;
+    m00_2 = simde_mm_adds_epi8(alpha_ptr[1], beta_ptr[4]); // ALPHA_BETA_2m00;
+    m11_2 = simde_mm_adds_epi8(alpha_ptr[1], beta_ptr[0]); // ALPHA_BETA_2m11;
+    m11_1 = simde_mm_adds_epi8(alpha_ptr[0], beta_ptr[4]); // ALPHA_BETA_1m11;
+    m00_1 = simde_mm_adds_epi8(alpha_ptr[0], beta_ptr[0]); // ALPHA_BETA_1m00;
+    m01_4 = simde_mm_adds_epi8(alpha_ptr[5], beta_ptr[6]); // ALPHA_BETA_4m01;
+    m10_4 = simde_mm_adds_epi8(alpha_ptr[5], beta_ptr[2]); // ALPHA_BETA_4m10;
+    m01_3 = simde_mm_adds_epi8(alpha_ptr[4], beta_ptr[2]); // ALPHA_BETA_3m01;
+    m10_3 = simde_mm_adds_epi8(alpha_ptr[4], beta_ptr[6]); // ALPHA_BETA_3m10;
+    m01_2 = simde_mm_adds_epi8(alpha_ptr[3], beta_ptr[1]); // ALPHA_BETA_2m01;
+    m10_2 = simde_mm_adds_epi8(alpha_ptr[3], beta_ptr[5]); // ALPHA_BETA_2m10;
+    m10_1 = simde_mm_adds_epi8(alpha_ptr[2], beta_ptr[1]); // ALPHA_BETA_1m10;
+    m01_1 = simde_mm_adds_epi8(alpha_ptr[2], beta_ptr[5]); // ALPHA_BETA_1m01;
+    m01_1 = simde_mm_max_epi8(m01_1, m01_2);
+    m01_1 = simde_mm_max_epi8(m01_1, m01_3);
+    m01_1 = simde_mm_max_epi8(m01_1, m01_4);
+    m00_1 = simde_mm_max_epi8(m00_1, m00_2);
+    m00_1 = simde_mm_max_epi8(m00_1, m00_3);
+    m00_1 = simde_mm_max_epi8(m00_1, m00_4);
+    m10_1 = simde_mm_max_epi8(m10_1, m10_2);
+    m10_1 = simde_mm_max_epi8(m10_1, m10_3);
+    m10_1 = simde_mm_max_epi8(m10_1, m10_4);
+    m11_1 = simde_mm_max_epi8(m11_1, m11_2);
+    m11_1 = simde_mm_max_epi8(m11_1, m11_3);
+    m11_1 = simde_mm_max_epi8(m11_1, m11_4);
+    m01_1 = simde_mm_subs_epi8(m01_1, *m10_128);
+    m00_1 = simde_mm_subs_epi8(m00_1, *m11_128);
+    m10_1 = simde_mm_adds_epi8(m10_1, *m10_128);
+    m11_1 = simde_mm_adds_epi8(m11_1, *m11_128);
+    m01_1 = simde_mm_max_epi8(m01_1, m00_1);
+    m10_1 = simde_mm_max_epi8(m10_1, m11_1);
+    *ext_128 = simde_mm_subs_epi8(m10_1, m01_1);
     alpha_ptr+=8;
     beta_ptr+=8;
 #elif defined(__arm__) || defined(__aarch64__)
@@ -819,9 +936,9 @@ uint8_t phy_threegpplte_turbo_decoder8(int16_t *y,
   unsigned int crc, crc_len;
   uint8_t temp;
 #if defined(__x86_64__) || defined(__i386__)
-  __m128i *yp128;
-  __m128i tmp128[(n+8)>>3];
-  __m128i tmp={0}, zeros=_mm_setzero_si128();
+  simde__m128i *yp128;
+  simde__m128i tmp128[(n + 8) >> 3];
+  simde__m128i tmp = {0}, zeros = simde_mm_setzero_si128();
 #elif defined(__arm__) || defined(__aarch64__)
   int8x16_t *yp128;
   int8x16_t tmp128[(n+8)>>3];
@@ -873,35 +990,41 @@ uint8_t phy_threegpplte_turbo_decoder8(int16_t *y,
 
 #if defined(__x86_64__) || defined(__i386__)
   // note: this makes valgrind freak
-  __m128i avg=_mm_set1_epi32(0);
+  simde__m128i avg = simde_mm_set1_epi32(0);
 
   for (i=0; i<(3*(n>>4))+1; i++) {
-    __m128i tmp=_mm_abs_epi16(_mm_unpackhi_epi16(((__m128i *)y)[i],((__m128i *)y)[i]));
-    avg=_mm_add_epi32(_mm_cvtepi16_epi32(_mm_abs_epi16(((__m128i *)y)[i])),avg);
-    avg=_mm_add_epi32(_mm_cvtepi16_epi32(tmp),avg);
+    simde__m128i tmp = simde_mm_abs_epi16(simde_mm_unpackhi_epi16(((simde__m128i *)y)[i], ((simde__m128i *)y)[i]));
+    avg = simde_mm_add_epi32(simde_mm_cvtepi16_epi32(simde_mm_abs_epi16(((simde__m128i *)y)[i])), avg);
+    avg = simde_mm_add_epi32(simde_mm_cvtepi16_epi32(tmp), avg);
   }
 
-  int32_t round_avg=(_mm_extract_epi32(avg,0)+_mm_extract_epi32(avg,1)+_mm_extract_epi32(avg,2)+_mm_extract_epi32(avg,3))/(n*3);
+  int32_t round_avg = (simde_mm_extract_epi32(avg, 0) + simde_mm_extract_epi32(avg, 1) + simde_mm_extract_epi32(avg, 2)
+                       + simde_mm_extract_epi32(avg, 3))
+                      / (n * 3);
 
   //printf("avg input turbo: %d sum %d taille bloc %d\n",round_avg,round_sum,n);
 
   if (round_avg < 16 )
     for (i=0,j=0; i<(3*(n2>>4))+1; i++,j+=2)
-      ((__m128i *)y8)[i] = _mm_packs_epi16(((__m128i *)y)[j],((__m128i *)y)[j+1]);
+      ((simde__m128i *)y8)[i] = simde_mm_packs_epi16(((simde__m128i *)y)[j], ((simde__m128i *)y)[j + 1]);
   else if (round_avg < 32)
     for (i=0,j=0; i<(3*(n2>>4))+1; i++,j+=2)
-      ((__m128i *)y8)[i] = _mm_packs_epi16(_mm_srai_epi16(((__m128i *)y)[j],1),_mm_srai_epi16(((__m128i *)y)[j+1],1));
+      ((simde__m128i *)y8)[i] =
+          simde_mm_packs_epi16(simde_mm_srai_epi16(((simde__m128i *)y)[j], 1), simde_mm_srai_epi16(((simde__m128i *)y)[j + 1], 1));
   else if (round_avg < 64 )
     for (i=0,j=0; i<(3*(n2>>4))+1; i++,j+=2)
-      ((__m128i *)y8)[i] = _mm_packs_epi16(_mm_srai_epi16(((__m128i *)y)[j],2),_mm_srai_epi16(((__m128i *)y)[j+1],2));
+      ((simde__m128i *)y8)[i] =
+          simde_mm_packs_epi16(simde_mm_srai_epi16(((simde__m128i *)y)[j], 2), simde_mm_srai_epi16(((simde__m128i *)y)[j + 1], 2));
   else if (round_avg < 128)
     for (i=0,j=0; i<(3*(n2>>4))+1; i++,j+=2)
-      ((__m128i *)y8)[i] = _mm_packs_epi16(_mm_srai_epi16(((__m128i *)y)[j],3),_mm_srai_epi16(((__m128i *)y)[j+1],3));
+      ((simde__m128i *)y8)[i] =
+          simde_mm_packs_epi16(simde_mm_srai_epi16(((simde__m128i *)y)[j], 3), simde_mm_srai_epi16(((simde__m128i *)y)[j + 1], 3));
   else
     for (i=0,j=0; i<(3*(n2>>4))+1; i++,j+=2)
-      ((__m128i *)y8)[i] = _mm_packs_epi16(_mm_srai_epi16(((__m128i *)y)[j],3),_mm_srai_epi16(((__m128i *)y)[j+1],4));
+      ((simde__m128i *)y8)[i] =
+          simde_mm_packs_epi16(simde_mm_srai_epi16(((simde__m128i *)y)[j], 3), simde_mm_srai_epi16(((simde__m128i *)y)[j + 1], 4));
 
-  yp128 = (__m128i *)y8;
+  yp128 = (simde__m128i *)y8;
 #elif defined(__arm__) || defined(__aarch64__)
   int32x4_t avg=vdupq_n_s32(0);
 
@@ -1005,22 +1128,22 @@ uint8_t phy_threegpplte_turbo_decoder8(int16_t *y,
 
     for (i=0; i<(n2>>4); i++) { // steady-state portion
 #if defined(__x86_64__) || defined(__i386__)
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],0);
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],1);
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],2);
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],3);
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],4);
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],5);
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],6);
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],7);
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],8);
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],9);
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],10);
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],11);
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],12);
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],13);
-      tmp=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],14);
-      ((__m128i *)systematic2)[i]=_mm_insert_epi8(tmp,((llr_t *)ext)[*pi4_p++],15);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 0);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 1);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 2);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 3);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 4);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 5);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 6);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 7);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 8);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 9);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 10);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 11);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 12);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 13);
+      tmp = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 14);
+      ((simde__m128i *)systematic2)[i] = simde_mm_insert_epi8(tmp, ((llr_t *)ext)[*pi4_p++], 15);
 #elif defined(__arm__) || defined(__aarch64__)
       tmp=vsetq_lane_s8(((llr_t *)ext)[*pi4_p++],tmp,0);
       tmp=vsetq_lane_s8(((llr_t *)ext)[*pi4_p++],tmp,1);
@@ -1051,24 +1174,25 @@ uint8_t phy_threegpplte_turbo_decoder8(int16_t *y,
     if ((n2&0x7f) == 0) {  // n2 is a multiple of 128 bits
       for (i=0; i<(n2>>4); i++) {
 #if defined(__x86_64__) || defined(__i386__)
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],0);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],1);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],2);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],3);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],4);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],5);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],6);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],7);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],8);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],9);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],10);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],11);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],12);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],13);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],14);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],15);
-        decoded_bytes_interl[i]=(uint16_t) _mm_movemask_epi8(_mm_cmpgt_epi8(tmp,zeros));
-        ((__m128i *)systematic1)[i] = _mm_adds_epi8(_mm_subs_epi8(tmp,((__m128i *)ext)[i]),((__m128i *)systematic0)[i]);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 0);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 1);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 2);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 3);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 4);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 5);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 6);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 7);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 8);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 9);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 10);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 11);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 12);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 13);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 14);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 15);
+        decoded_bytes_interl[i] = (uint16_t)simde_mm_movemask_epi8(simde_mm_cmpgt_epi8(tmp, zeros));
+        ((simde__m128i *)systematic1)[i] =
+            simde_mm_adds_epi8(simde_mm_subs_epi8(tmp, ((simde__m128i *)ext)[i]), ((simde__m128i *)systematic0)[i]);
 #elif defined(__arm__) || defined(__aarch64__)
         tmp=vsetq_lane_s8(ext2[*pi5_p++],tmp,0);
         tmp=vsetq_lane_s8(ext2[*pi5_p++],tmp,1);
@@ -1095,24 +1219,25 @@ uint8_t phy_threegpplte_turbo_decoder8(int16_t *y,
     } else {
       for (i=0; i<(n2>>4); i++) {
 #if defined(__x86_64__) || defined(__i386__)
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],0);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],1);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],2);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],3);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],4);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],5);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],6);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],7);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],8);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],9);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],10);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],11);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],12);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],13);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],14);
-        tmp=_mm_insert_epi8(tmp,ext2[*pi5_p++],15);
-        tmp128[i] = _mm_adds_epi8(((__m128i *)ext2)[i],((__m128i *)systematic2)[i]);
-        ((__m128i *)systematic1)[i] = _mm_adds_epi8(_mm_subs_epi8(tmp,((__m128i *)ext)[i]),((__m128i *)systematic0)[i]);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 0);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 1);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 2);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 3);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 4);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 5);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 6);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 7);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 8);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 9);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 10);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 11);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 12);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 13);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 14);
+        tmp = simde_mm_insert_epi8(tmp, ext2[*pi5_p++], 15);
+        tmp128[i] = simde_mm_adds_epi8(((simde__m128i *)ext2)[i], ((simde__m128i *)systematic2)[i]);
+        ((simde__m128i *)systematic1)[i] =
+            simde_mm_adds_epi8(simde_mm_subs_epi8(tmp, ((simde__m128i *)ext)[i]), ((simde__m128i *)systematic0)[i]);
 #elif defined(__arm__) || defined(__aarch64__)
         tmp=vsetq_lane_s8(ext2[*pi5_p++],tmp,0);
         tmp=vsetq_lane_s8(ext2[*pi5_p++],tmp,1);
@@ -1144,27 +1269,27 @@ uint8_t phy_threegpplte_turbo_decoder8(int16_t *y,
         // re-order the decoded bits in theregular order
         // as it is presently ordered as 16 sequential columns
 #if defined(__x86_64__) || defined(__i386__)
-        __m128i *dbytes=(__m128i *)decoded_bytes_interl;
-        __m128i shuffle=SHUFFLE16(7,6,5,4,3,2,1,0);
-        __m128i mask  __attribute__((aligned(16)));
+        simde__m128i *dbytes = (simde__m128i *)decoded_bytes_interl;
+        simde__m128i shuffle = SHUFFLE16(7, 6, 5, 4, 3, 2, 1, 0);
+        simde__m128i mask __attribute__((aligned(16)));
         int n_128=n2>>7;
 
         for (i=0; i<n_128; i++) {
-          mask=_mm_set1_epi16(1);
-          __m128i tmp __attribute__((aligned(16)));
-          tmp=_mm_shuffle_epi8(dbytes[i],shuffle);
-          __m128i tmp2 __attribute__((aligned(16))) ;
-          tmp2=_mm_and_si128(tmp,mask);
-          tmp2=_mm_cmpeq_epi16(tmp2,mask);
+          mask = simde_mm_set1_epi16(1);
+          simde__m128i tmp __attribute__((aligned(16)));
+          tmp = simde_mm_shuffle_epi8(dbytes[i], shuffle);
+          simde__m128i tmp2 __attribute__((aligned(16)));
+          tmp2 = simde_mm_and_si128(tmp, mask);
+          tmp2 = simde_mm_cmpeq_epi16(tmp2, mask);
           //    printf("decoded_bytes %p\n",decoded_bytes);
-          decoded_bytes[n_128*0+i]=(uint8_t) _mm_movemask_epi8(_mm_packs_epi16(tmp2,zeros));
+          decoded_bytes[n_128 * 0 + i] = (uint8_t)simde_mm_movemask_epi8(simde_mm_packs_epi16(tmp2, zeros));
           int j;
 
           for (j=1; j<16; j++) {
-            mask=_mm_slli_epi16(mask,1);
-            tmp2=_mm_and_si128(tmp,mask);
-            tmp2=_mm_cmpeq_epi16(tmp2,mask);
-            decoded_bytes[n_128*j +i]=(uint8_t) _mm_movemask_epi8(_mm_packs_epi16(tmp2,zeros));
+            mask = simde_mm_slli_epi16(mask, 1);
+            tmp2 = simde_mm_and_si128(tmp, mask);
+            tmp2 = simde_mm_cmpeq_epi16(tmp2, mask);
+            decoded_bytes[n_128 * j + i] = (uint8_t)simde_mm_movemask_epi8(simde_mm_packs_epi16(tmp2, zeros));
           }
         }
 
@@ -1192,24 +1317,24 @@ uint8_t phy_threegpplte_turbo_decoder8(int16_t *y,
 
         for (i=0; i<(n2>>4); i++) {
 #if defined(__x86_64__) || defined(__i386__)
-          tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],7);
-          tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],6);
-          tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],5);
-          tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],4);
-          tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],3);
-          tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],2);
-          tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],1);
-          tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],0);
-          tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],15);
-          tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],14);
-          tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],13);
-          tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],12);
-          tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],11);
-          tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],10);
-          tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],9);
-          tmp=_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++],8);
-          tmp=_mm_cmpgt_epi8(tmp,zeros);
-          ((uint16_t *)decoded_bytes)[i]=(uint16_t)_mm_movemask_epi8(tmp);
+          tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 7);
+          tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 6);
+          tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 5);
+          tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 4);
+          tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 3);
+          tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 2);
+          tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 1);
+          tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 0);
+          tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 15);
+          tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 14);
+          tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 13);
+          tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 12);
+          tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 11);
+          tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 10);
+          tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 9);
+          tmp = simde_mm_insert_epi8(tmp, ((llr_t *)tmp128)[*pi6_p++], 8);
+          tmp = simde_mm_cmpgt_epi8(tmp, zeros);
+          ((uint16_t *)decoded_bytes)[i] = (uint16_t)simde_mm_movemask_epi8(tmp);
 #elif defined(__arm__) || defined(__aarch64__)
           tmp=vsetq_lane_s8(((llr_t *)tmp128)[*pi6_p++],tmp,7);
           tmp=vsetq_lane_s8(((llr_t *)tmp128)[*pi6_p++],tmp,6);
@@ -1286,9 +1411,9 @@ uint8_t phy_threegpplte_turbo_decoder8(int16_t *y,
     if (iteration_cnt < max_iterations) {
       log_map8(systematic1,yparity1,m11,m10,alpha,beta,ext,n2,0,F,offset8_flag,alpha_stats,beta_stats,gamma_stats,ext_stats);
 #if defined(__x86_64__) || defined(__i386__)
-      __m128i *ext_128=(__m128i *) ext;
-      __m128i *s1_128=(__m128i *) systematic1;
-      __m128i *s0_128=(__m128i *) systematic0;
+      simde__m128i *ext_128 = (simde__m128i *)ext;
+      simde__m128i *s1_128 = (simde__m128i *)systematic1;
+      simde__m128i *s0_128 = (simde__m128i *)systematic0;
 #elif defined(__arm__) || defined(__aarch64__)
       int8x16_t *ext_128=(int8x16_t *) ext;
       int8x16_t *s1_128=(int8x16_t *) systematic1;
@@ -1298,7 +1423,7 @@ uint8_t phy_threegpplte_turbo_decoder8(int16_t *y,
 
       for (i=0; i<myloop; i++) {
 #if defined(__x86_64__) || defined(__i386__)
-        *ext_128=_mm_adds_epi8(_mm_subs_epi8(*ext_128,*s1_128++),*s0_128++);
+        *ext_128 = simde_mm_adds_epi8(simde_mm_subs_epi8(*ext_128, *s1_128++), *s0_128++);
 #elif defined(__arm__) || defined(__aarch64__)
         *ext_128=vqaddq_s8(vqsubq_s8(*ext_128,*s1_128++),*s0_128++);
 #endif
diff --git a/openair1/PHY/CODING/TESTBENCH/coding_unitary_defs.h b/openair1/PHY/CODING/TESTBENCH/coding_unitary_defs.h
index 23d02ef1976eda0df9b140ee39071387bb988877..644751e80e7d2d36eeed3b58e6baf1063350b12f 100644
--- a/openair1/PHY/CODING/TESTBENCH/coding_unitary_defs.h
+++ b/openair1/PHY/CODING/TESTBENCH/coding_unitary_defs.h
@@ -34,9 +34,9 @@ void exit_function(const char* file, const char* function, const int line, const
   exit(-1);
 }
 
-signed char quantize(double D, double x, unsigned char B) {
+int8_t quantize(double D, double x, uint8_t B) {
   double qxd;
-  short maxlev;
+  int16_t maxlev;
   qxd = floor(x / D);
   maxlev = 1 << (B - 1); //(char)(pow(2,B-1));
 
@@ -45,7 +45,7 @@ signed char quantize(double D, double x, unsigned char B) {
   else if (qxd >= maxlev)
     qxd = maxlev - 1;
 
-  return ((char) qxd);
+  return ((int8_t) qxd);
 }
 
 
diff --git a/openair1/PHY/CODING/TESTBENCH/ldpctest.c b/openair1/PHY/CODING/TESTBENCH/ldpctest.c
index f4064fe844f34523b6e4dba3604535737a97c5ab..73e50c97a8b8912be54f305ad1ea641181529c94 100644
--- a/openair1/PHY/CODING/TESTBENCH/ldpctest.c
+++ b/openair1/PHY/CODING/TESTBENCH/ldpctest.c
@@ -23,6 +23,7 @@
 #include <math.h>
 #include <stdio.h>
 #include <string.h>
+#include <stdint.h>
 #include "assertions.h"
 #include "SIMULATION/TOOLS/sim.h"
 #include "common/utils/load_module_shlib.h"
@@ -43,7 +44,7 @@
 #define NR_LDPC_ENABLE_PARITY_CHECK
 
 // 4-bit quantizer
-char quantize4bit(double D,double x)
+int8_t quantize4bit(double D,double x)
 {
   double qxd;
   qxd = floor(x/D);
@@ -54,13 +55,13 @@ char quantize4bit(double D,double x)
   else if (qxd > 7)
     qxd = 7;
 
-  return((char)qxd);
+  return((int8_t)qxd);
 }
 
-char quantize8bit(double D,double x)
+int8_t quantize8bit(double D,double x)
 {
   double qxd;
-  //char maxlev;
+  //int8_t maxlev;
   qxd = floor(x/D);
 
   //maxlev = 1<<(B-1);
@@ -72,7 +73,7 @@ char quantize8bit(double D,double x)
   else if (qxd >= 128)
     qxd = 127;
 
-  return((char)qxd);
+  return((int8_t)qxd);
 }
 
 typedef struct {
@@ -92,7 +93,7 @@ int test_ldpc(short max_iterations,
               int nom_rate,
               int denom_rate,
               double SNR,
-              unsigned char qbits,
+              uint8_t qbits,
               short block_length,
               unsigned int ntrials,
               int n_segments,
@@ -117,15 +118,15 @@ int test_ldpc(short max_iterations,
   sigma = 1.0/sqrt(2*SNR);
   opp_enabled=1;
   //short test_input[block_length];
-  unsigned char *test_input[MAX_NUM_NR_DLSCH_SEGMENTS_PER_LAYER*NR_MAX_NB_LAYERS]={NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL};;
+  uint8_t *test_input[MAX_NUM_NR_DLSCH_SEGMENTS_PER_LAYER*NR_MAX_NB_LAYERS]={NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL};;
   //short *c; //padded codeword
-  unsigned char estimated_output[MAX_NUM_DLSCH_SEGMENTS][block_length];
+  uint8_t estimated_output[MAX_NUM_DLSCH_SEGMENTS][block_length];
   memset(estimated_output, 0, sizeof(estimated_output));
-  unsigned char *channel_input[MAX_NUM_DLSCH_SEGMENTS];
-  unsigned char *channel_input_optim[MAX_NUM_DLSCH_SEGMENTS];
+  uint8_t *channel_input[MAX_NUM_DLSCH_SEGMENTS];
+  uint8_t *channel_input_optim[MAX_NUM_DLSCH_SEGMENTS];
   //double channel_output[68 * 384];
   double modulated_input[MAX_NUM_DLSCH_SEGMENTS][68 * 384] = { 0 };
-  char channel_output_fixed[MAX_NUM_DLSCH_SEGMENTS][68  * 384] = { 0 };
+  int8_t channel_output_fixed[MAX_NUM_DLSCH_SEGMENTS][68  * 384] = { 0 };
   short BG=0,nrows=0;//,ncols;
   int no_punctured_columns,removed_bit;
   int i1,Zc,Kb=0;
@@ -149,12 +150,12 @@ int test_ldpc(short max_iterations,
 
   // generate input block
   for(int j=0;j<MAX_NUM_DLSCH_SEGMENTS;j++) {
-    test_input[j]=(unsigned char *)malloc16(sizeof(unsigned char) * block_length/8);
-    memset(test_input[j], 0, sizeof(unsigned char) * block_length / 8);
-    channel_input[j] = (unsigned char *)malloc16(sizeof(unsigned char) * 68*384);
-    memset(channel_input[j], 0, sizeof(unsigned char) * 68 * 384);
-    channel_input_optim[j] = (unsigned char *)malloc16(sizeof(unsigned char) * 68*384);
-    memset(channel_input_optim[j], 0, sizeof(unsigned char) * 68 * 384);
+    test_input[j]=(uint8_t *)malloc16(sizeof(uint8_t) * block_length/8);
+    memset(test_input[j], 0, sizeof(uint8_t) * block_length / 8);
+    channel_input[j] = (uint8_t *)malloc16(sizeof(uint8_t) * 68*384);
+    memset(channel_input[j], 0, sizeof(uint8_t) * 68 * 384);
+    channel_input_optim[j] = (uint8_t *)malloc16(sizeof(uint8_t) * 68*384);
+    memset(channel_input_optim[j], 0, sizeof(uint8_t) * 68 * 384);
   }
 
   reset_meas(&time);
@@ -179,7 +180,7 @@ int test_ldpc(short max_iterations,
 
   for (int j=0;j<MAX_NUM_DLSCH_SEGMENTS;j++) {
     for (int i=0; i<block_length/8; i++) {
-      test_input[j][i]=(unsigned char) rand();
+      test_input[j][i]=(uint8_t) rand();
       //test_input[j][i]=j%256;
       //test_input[j][i]=252;
     }
@@ -265,7 +266,7 @@ int test_ldpc(short max_iterations,
   removed_bit=(nrows-no_punctured_columns-2) * Zc+block_length-(int)(block_length/((float)nom_rate/(float)denom_rate));
   encoder_implemparams_t impp=INIT0_LDPCIMPLEMPARAMS;
 
-  impp.gen_code=1;
+  impp.gen_code = 2;
   if (ntrials==0)
     encoder_orig(test_input,channel_input, Zc, BG, block_length, BG, &impp);
   impp.gen_code=0;
@@ -327,21 +328,21 @@ int test_ldpc(short max_iterations,
             modulated_input[j][i]=-1.0;///sqrt(2);
 
           ///channel_output[i] = modulated_input[i] + gaussdouble(0.0,1.0) * 1/sqrt(2*SNR);
-          //channel_output_fixed[i] = (char) ((channel_output[i]*128)<0?(channel_output[i]*128-0.5):(channel_output[i]*128+0.5)); //fixed point 9-7
+          //channel_output_fixed[i] = (int8_t) ((channel_output[i]*128)<0?(channel_output[i]*128-0.5):(channel_output[i]*128+0.5)); //fixed point 9-7
           //printf("llr[%d]=%d\n",i,channel_output_fixed[i]);
 
-          //channel_output_fixed[i] = (char)quantize(sigma/4.0,(2.0*modulated_input[i]) - 1.0 + sigma*gaussdouble(0.0,1.0),qbits);
-          channel_output_fixed[j][i] = (char)quantize(sigma/4.0/4.0,modulated_input[j][i] + sigma*gaussdouble(0.0,1.0),qbits);
-          //channel_output_fixed[i] = (char)quantize8bit(sigma/4.0,(2.0*modulated_input[i]) - 1.0 + sigma*gaussdouble(0.0,1.0));
+          //channel_output_fixed[i] = (int8_t)quantize(sigma/4.0,(2.0*modulated_input[i]) - 1.0 + sigma*gaussdouble(0.0,1.0),qbits);
+          channel_output_fixed[j][i] = (int8_t)quantize(sigma/4.0/4.0,modulated_input[j][i] + sigma*gaussdouble(0.0,1.0),qbits);
+          //channel_output_fixed[i] = (int8_t)quantize8bit(sigma/4.0,(2.0*modulated_input[i]) - 1.0 + sigma*gaussdouble(0.0,1.0));
           //printf("llr[%d]=%d\n",i,channel_output_fixed[i]);
           //printf("channel_output_fixed[%d]: %d\n",i,channel_output_fixed[i]);
 
 
           //Uncoded BER
-          unsigned char channel_output_uncoded = channel_output_fixed[j][i]<0 ? 1 /* QPSK demod */ : 0;
+          uint8_t channel_output_uncoded = channel_output_fixed[j][i]<0 ? 1 /* QPSK demod */ : 0;
 
           if (channel_output_uncoded != channel_input_optim[j][i-2*Zc])
-      *errors_bit_uncoded = (*errors_bit_uncoded) + 1;
+            *errors_bit_uncoded = (*errors_bit_uncoded) + 1;
 
         }
      
@@ -373,8 +374,8 @@ int test_ldpc(short max_iterations,
         }
         for (int i=0; i<block_length; i++)
         {
-          unsigned char estoutputbit = (estimated_output[j][i/8]&(1<<(i&7)))>>(i&7);
-          unsigned char inputbit = (test_input[j][i/8]&(1<<(i&7)))>>(i&7); // Further correct for multiple segments
+          uint8_t estoutputbit = (estimated_output[j][i/8]&(1<<(i&7)))>>(i&7);
+          uint8_t inputbit = (test_input[j][i/8]&(1<<(i&7)))>>(i&7); // Further correct for multiple segments
           if (estoutputbit != inputbit)
             *errors_bit = (*errors_bit) + 1;
         }
@@ -445,7 +446,7 @@ int main(int argc, char *argv[])
   int nom_rate=1;
   int denom_rate=3;
   double SNR0=-2.0,SNR,SNR_lin;
-  unsigned char qbits=8;
+  uint8_t qbits=8;
   unsigned int decoded_errors[10000]; // initiate the size of matrix equivalent to size of SNR
   int c,i=0, i1 = 0;
 
diff --git a/openair1/PHY/CODING/TESTBENCH/ltetest.c b/openair1/PHY/CODING/TESTBENCH/ltetest.c
index 56053f7784d93d9aef7a00bee5feaf748321c9a2..45abec169700ad094a490caeaebd3c8a332d633d 100644
--- a/openair1/PHY/CODING/TESTBENCH/ltetest.c
+++ b/openair1/PHY/CODING/TESTBENCH/ltetest.c
@@ -116,7 +116,7 @@ void lte_param_init(unsigned char N_tx, unsigned char N_rx,unsigned char transmi
 }
 
 /*
-void print_shorts(char *s,__m128i *x) {
+void print_shorts(char *s,simde__m128i *x) {
 
   short *tempb = (short *)x;
 
diff --git a/openair1/PHY/CODING/crc.h b/openair1/PHY/CODING/crc.h
index bca966d62bd1eb55db679b4a43f0bf64c6f07f34..b65eacf0bb0a9497f894155d4f725fdefa72122c 100644
--- a/openair1/PHY/CODING/crc.h
+++ b/openair1/PHY/CODING/crc.h
@@ -39,9 +39,6 @@
 
 #ifndef __CRC_H__
 #define __CRC_H__
-
-#include <x86intrin.h>
-
 #include "crcext.h"
 #include "types.h"
 #include "PHY/sse_intrin.h"
@@ -305,14 +302,20 @@ uint32_t crc32_calc_slice4(const uint8_t *data,
  * @return New 16 byte folded data
  */
 __forceinline
-__m128i crc32_folding_round(const __m128i data_block,
-                            const __m128i k1_k2,
-                            const __m128i fold)
+simde__m128i crc32_folding_round(const simde__m128i data_block,
+                            const simde__m128i k1_k2,
+                            const simde__m128i fold)
 {
-        __m128i tmp = _mm_clmulepi64_si128(fold, k1_k2, 0x11);
+#ifdef __x86_64__
+  simde__m128i tmp = _mm_clmulepi64_si128(fold, k1_k2, 0x11);
+
+  return simde_mm_xor_si128(_mm_clmulepi64_si128(fold, k1_k2, 0x00), simde_mm_xor_si128(data_block, tmp));
+#else	
+        simde__m128i tmp = simde_mm_clmulepi64_si128(fold, k1_k2, 0x11);
 
-        return _mm_xor_si128(_mm_clmulepi64_si128(fold, k1_k2, 0x00),
-                             _mm_xor_si128(data_block, tmp));
+        return simde_mm_xor_si128(simde_mm_clmulepi64_si128(fold, k1_k2, 0x00),
+                             simde_mm_xor_si128(data_block, tmp));
+#endif
 }
 
 /**
@@ -324,17 +327,23 @@ __m128i crc32_folding_round(const __m128i data_block,
  * @return data reduced to 64 bits
  */
 __forceinline
-__m128i crc32_reduce_128_to_64(__m128i data128, const __m128i k3_q)
+simde__m128i crc32_reduce_128_to_64(simde__m128i data128, const simde__m128i k3_q)
 {
-        __m128i tmp;
+        simde__m128i tmp;
 
-        tmp = _mm_xor_si128(_mm_clmulepi64_si128(data128, k3_q, 0x01 /* k3 */),
+#ifdef __x86_64__
+        tmp = simde_mm_xor_si128(_mm_clmulepi64_si128(data128, k3_q, 0x01 /* k3 */),
                             data128);
 
-        data128 = _mm_xor_si128(_mm_clmulepi64_si128(tmp, k3_q, 0x01 /* k3 */),
+        data128 = simde_mm_xor_si128(_mm_clmulepi64_si128(tmp, k3_q, 0x01 /* k3 */),
                                 data128);
+#else
+        tmp = simde_mm_xor_si128(simde_mm_clmulepi64_si128(data128, k3_q, 0x01 /* k3 */), data128);
+
+        data128 = simde_mm_xor_si128(simde_mm_clmulepi64_si128(tmp, k3_q, 0x01 /* k3 */), data128);
 
-        return _mm_srli_si128(_mm_slli_si128(data128, 8), 8);
+#endif
+        return simde_mm_srli_si128(simde_mm_slli_si128(data128, 8), 8);
 }
 
 /**
@@ -348,15 +357,22 @@ __m128i crc32_reduce_128_to_64(__m128i data128, const __m128i k3_q)
  */
 __forceinline
 uint32_t
-crc32_reduce_64_to_32(__m128i fold, const __m128i k3_q, const __m128i p_res)
+crc32_reduce_64_to_32(simde__m128i fold, const simde__m128i k3_q, const simde__m128i p_res)
 {
-        __m128i temp;
-
-        temp = _mm_clmulepi64_si128(_mm_srli_si128(fold, 4),
+        simde__m128i temp;
+#ifdef __x86_64__
+        temp = _mm_clmulepi64_si128(simde_mm_srli_si128(fold, 4),
                                     k3_q, 0x10 /* Q */);
-        temp = _mm_srli_si128(_mm_xor_si128(temp, fold), 4);
+        temp = simde_mm_srli_si128(simde_mm_xor_si128(temp, fold), 4);
         temp = _mm_clmulepi64_si128(temp, p_res, 0 /* P */);
-        return _mm_extract_epi32(_mm_xor_si128(temp, fold), 0);
+
+#else
+        temp = simde_mm_clmulepi64_si128(simde_mm_srli_si128(fold, 4),
+                                    k3_q, 0x10 /* Q */);
+        temp = simde_mm_srli_si128(simde_mm_xor_si128(temp, fold), 4);
+        temp = simde_mm_clmulepi64_si128(temp, p_res, 0 /* P */);
+#endif	
+        return simde_mm_extract_epi32(simde_mm_xor_si128(temp, fold), 0);
 }
 
 /**
@@ -379,7 +395,7 @@ crc32_calc_pclmulqdq(const uint8_t *data,
                      uint32_t data_len, uint32_t crc,
                      const struct crc_pclmulqdq_ctx *params)
 {
-        __m128i temp, fold, k, swap;
+        simde__m128i temp, fold, k, swap;
         uint32_t n;
 
         if (unlikely(data == NULL || data_len == 0 || params == NULL))
@@ -405,7 +421,7 @@ crc32_calc_pclmulqdq(const uint8_t *data,
          * Load first 16 data bytes in \a fold and
          * set \a swap BE<->LE 16 byte conversion variable
          */
-        fold = _mm_loadu_si128((__m128i *)data);
+        fold = simde_mm_loadu_si128((simde__m128i *)data);
         swap = crc_xmm_be_le_swap128;
 
         /**
@@ -420,20 +436,20 @@ crc32_calc_pclmulqdq(const uint8_t *data,
                  * - adjust data block
                  * - 4 least significant bytes need to be zero
                  */
-                fold = _mm_shuffle_epi8(fold, swap);
-                fold = _mm_slli_si128(xmm_shift_right(fold, 20 - data_len), 4);
+                fold = simde_mm_shuffle_epi8(fold, swap);
+                fold = simde_mm_slli_si128(xmm_shift_right(fold, 20 - data_len), 4);
 
                 /**
                  * Apply CRC init value
                  */
-                temp = _mm_insert_epi32(_mm_setzero_si128(), bswap4(crc), 0);
+                temp = simde_mm_insert_epi32(simde_mm_setzero_si128(), bswap4(crc), 0);
                 temp = xmm_shift_left(temp, data_len - 4);
-                fold = _mm_xor_si128(fold, temp);
+                fold = simde_mm_xor_si128(fold, temp);
         } else {
                 /**
                  * There are 2x16 data blocks or more
                  */
-                __m128i next_data;
+                simde__m128i next_data;
 
                 /**
                  * n = number of bytes required to align \a data_len
@@ -445,10 +461,10 @@ crc32_calc_pclmulqdq(const uint8_t *data,
                  * Apply CRC initial value and
                  * get \a fold to BE format
                  */
-                fold = _mm_xor_si128(fold,
-                                     _mm_insert_epi32(_mm_setzero_si128(),
+                fold = simde_mm_xor_si128(fold,
+                                     simde_mm_insert_epi32(simde_mm_setzero_si128(),
                                                       crc, 0));
-                fold = _mm_shuffle_epi8(fold, swap);
+                fold = simde_mm_shuffle_epi8(fold, swap);
 
                 /**
                  * Load next 16 bytes of data and
@@ -456,9 +472,9 @@ crc32_calc_pclmulqdq(const uint8_t *data,
                  *
                  * CONCAT(fold,next_data) >> (n*8)
                  */
-                next_data = _mm_loadu_si128((__m128i *)&data[16]);
-                next_data = _mm_shuffle_epi8(next_data, swap);
-                next_data = _mm_or_si128(xmm_shift_right(next_data, n),
+                next_data = simde_mm_loadu_si128((simde__m128i *)&data[16]);
+                next_data = simde_mm_shuffle_epi8(next_data, swap);
+                next_data = simde_mm_or_si128(xmm_shift_right(next_data, n),
                                          xmm_shift_left(fold, 16 - n));
                 fold = xmm_shift_right(fold, n);
 
@@ -467,12 +483,12 @@ crc32_calc_pclmulqdq(const uint8_t *data,
                          * In such unlikely case clear 4 least significant bytes
                          */
                         next_data =
-                                _mm_slli_si128(_mm_srli_si128(next_data, 4), 4);
+                                simde_mm_slli_si128(simde_mm_srli_si128(next_data, 4), 4);
 
                 /**
                  * Do the initial folding round on 2 first 16 byte chunks
                  */
-                k = _mm_load_si128((__m128i *)(&params->k1));
+                k = simde_mm_load_si128((simde__m128i *)(&params->k1));
                 fold = crc32_folding_round(next_data, k, fold);
 
                 if (likely(data_len > 32)) {
@@ -480,7 +496,7 @@ crc32_calc_pclmulqdq(const uint8_t *data,
                          * \a data_block needs to be at least 48 bytes long
                          * in order to get here
                          */
-                        __m128i new_data;
+                        simde__m128i new_data;
 
                         /**
                          * Main folding loop
@@ -493,8 +509,8 @@ crc32_calc_pclmulqdq(const uint8_t *data,
                          * - the last 16 bytes is processed separately
                          */
                         for (n = 16 + 16 - n; n < (data_len - 16); n += 16) {
-                                new_data = _mm_loadu_si128((__m128i *)&data[n]);
-                                new_data = _mm_shuffle_epi8(new_data, swap);
+                                new_data = simde_mm_loadu_si128((simde__m128i *)&data[n]);
+                                new_data = simde_mm_shuffle_epi8(new_data, swap);
                                 fold = crc32_folding_round(new_data, k, fold);
                         }
 
@@ -504,9 +520,9 @@ crc32_calc_pclmulqdq(const uint8_t *data,
                          * Read from offset -4 is to avoid one
                          * shift right operation.
                          */
-                        new_data = _mm_loadu_si128((__m128i *)&data[n - 4]);
-                        new_data = _mm_shuffle_epi8(new_data, swap);
-                        new_data = _mm_slli_si128(new_data, 4);
+                        new_data = simde_mm_loadu_si128((simde__m128i *)&data[n - 4]);
+                        new_data = simde_mm_shuffle_epi8(new_data, swap);
+                        new_data = simde_mm_slli_si128(new_data, 4);
                         fold = crc32_folding_round(new_data, k, fold);
                 } /* if (data_len > 32) */
         }
@@ -520,14 +536,14 @@ crc32_calc_pclmulqdq(const uint8_t *data,
         /**
          * REDUCTION 128 -> 64
          */
-        k = _mm_load_si128((__m128i *)(&params->k3));
+        k = simde_mm_load_si128((simde__m128i *)(&params->k3));
         fold = crc32_reduce_128_to_64(fold, k);
 
         /**
          * REDUCTION 64 -> 32
          */
         n = crc32_reduce_64_to_32(fold, k,
-                                  _mm_load_si128((__m128i *)(&params->p)));
+                                  simde_mm_load_si128((simde__m128i *)(&params->p)));
 
 #ifdef __KERNEL__
         /**
diff --git a/openair1/PHY/CODING/crc_byte.c b/openair1/PHY/CODING/crc_byte.c
index 8248688b77e0f165c5d8435c7d6743dff0beeb11..dd5712519a831e51f3869f7322bf573827152963 100644
--- a/openair1/PHY/CODING/crc_byte.c
+++ b/openair1/PHY/CODING/crc_byte.c
@@ -30,17 +30,15 @@
    Modified in June, 2001, to include  the length non multiple of 8
 */
 
-#ifndef __SSE4_1__
+#if !defined(__SSE4_1__) && !defined(__aarch64__)
 #define USE_INTEL_CRC 0
 #else
-#define USE_INTEL_CRC __SSE4_1__
+#define USE_INTEL_CRC 1
+#include "crc.h"
 #endif
 
 #include "coding_defs.h"
 #include "assertions.h"
-#if USE_INTEL_CRC
-#include "crc.h"
-#endif
 /*ref 36-212 v8.6.0 , pp 8-9 */
 /* the highest degree is set by default */
 
@@ -103,14 +101,14 @@ static uint32_t crc6Table[256];
 
 #if USE_INTEL_CRC
 static const struct crc_pclmulqdq_ctx lte_crc24a_pclmulqdq __attribute__((aligned(16))) = {
-    0x64e4d700, /**< k1 */
-    0x2c8c9d00, /**< k2 */
-    0xd9fe8c00, /**< k3 */
-    0xf845fe24, /**< q */
-    0x864cfb00, /**< p */
-    0ULL /**< res */
+        0x64e4d700,     /**< k1 */
+        0x2c8c9d00,     /**< k2 */
+        0xd9fe8c00,     /**< k3 */
+        0xf845fe24,     /**< q */
+        0x864cfb00,     /**< p */
+        0ULL            /**< res */
 };
-__m128i crc_xmm_be_le_swap128;
+simde__m128i crc_xmm_be_le_swap128;
 
 const uint8_t crc_xmm_shift_tab[48]
     __attribute__((aligned(16))) = {0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
@@ -133,9 +131,9 @@ void crcTableInit (void)
     crc8Table[c] = crcbit(&c, 1, poly8) >> 24;
     crc6Table[c] = crcbit(&c, 1, poly6) >> 24;
   } while (++c);
-#if USE_INTEL_CRC
-    crc_xmm_be_le_swap128 = _mm_setr_epi32(0x0c0d0e0f, 0x08090a0b,
-					   0x04050607, 0x00010203);
+#if defined(__SSE4_1__) || defined(__aarch64__) 
+    crc_xmm_be_le_swap128 = simde_mm_setr_epi32(0x0c0d0e0f, 0x08090a0b,
+				    	        0x04050607, 0x00010203);
 
 #endif
 }
@@ -164,7 +162,7 @@ uint32_t crc24a(unsigned char* inptr, int bitlen)
     crc = (crc << resbit) ^ crc24aTable[((*inptr) >> (8 - resbit)) ^ (crc >> (32 - resbit))];
   return crc;
   }
-  #if USE_INTEL_CRC
+  #if defined(__SSE4_1__) || defined(__aarch64__) 
   else {
   return crc32_calc_pclmulqdq(inptr, octetlen, 0,
                               &lte_crc24a_pclmulqdq);
diff --git a/openair1/PHY/CODING/crcext.h b/openair1/PHY/CODING/crcext.h
index f441f25e50965deaedb846f47db32ed809395a0f..3e8b11f799a2d1e55ee70fb20a020aeeb09c17d6 100644
--- a/openair1/PHY/CODING/crcext.h
+++ b/openair1/PHY/CODING/crcext.h
@@ -33,7 +33,7 @@
 #ifndef __CRCEXT_H__
 #define __CRCEXT_H__
 
-#include <x86intrin.h>
+#include "PHY/sse_intrin.h"
 #include "types.h"
 /**
  * Flag indicating availability of PCLMULQDQ instruction
@@ -45,7 +45,7 @@ extern int pclmulqdq_available;
  * Flag indicating availability of PCLMULQDQ instruction
  * Only valid after running CRCInit() function.
  */
-extern __m128i crc_xmm_be_le_swap128;
+extern simde__m128i crc_xmm_be_le_swap128;
 extern const uint8_t crc_xmm_shift_tab[48];
 
 /**
@@ -57,11 +57,11 @@ extern const uint8_t crc_xmm_shift_tab[48];
  * @return \a reg >> (\a num * 8)
  */
 __forceinline
-__m128i xmm_shift_right(__m128i reg, const unsigned int num)
+simde__m128i xmm_shift_right(simde__m128i reg, const unsigned int num)
 {
-        const __m128i *p = (const __m128i *)(crc_xmm_shift_tab + 16 + num);
+        const simde__m128i *p = (const simde__m128i *)(crc_xmm_shift_tab + 16 + num);
 
-        return _mm_shuffle_epi8(reg, _mm_loadu_si128(p));
+        return simde_mm_shuffle_epi8(reg, simde_mm_loadu_si128(p));
 }
 
 /**
@@ -73,11 +73,11 @@ __m128i xmm_shift_right(__m128i reg, const unsigned int num)
  * @return \a reg << (\a num * 8)
  */
 __forceinline
-__m128i xmm_shift_left(__m128i reg, const unsigned int num)
+simde__m128i xmm_shift_left(simde__m128i reg, const unsigned int num)
 {
-        const __m128i *p = (const __m128i *)(crc_xmm_shift_tab + 16 - num);
+        const simde__m128i *p = (const simde__m128i *)(crc_xmm_shift_tab + 16 - num);
 
-        return _mm_shuffle_epi8(reg, _mm_loadu_si128(p));
+        return simde_mm_shuffle_epi8(reg, simde_mm_loadu_si128(p));
 }
 
 /**
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_bnProc.h b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_bnProc.h
index 659144bce2fed2be309e1e43696a6963985647f4..f31e2f46b94244da8670191ed2557db51878ca26 100644
--- a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_bnProc.h
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_bnProc.h
@@ -43,1433 +43,91 @@ static inline void nrLDPC_bnProcPc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_
     const uint32_t* lut_startAddrBnGroups = p_lut->startAddrBnGroups;
     const uint16_t* lut_startAddrBnGroupsLlr = p_lut->startAddrBnGroupsLlr;
 
-    __m128i* p_bnProcBuf;
-    __m256i* p_bnProcBufRes;
-    __m128i* p_llrProcBuf;
-    __m256i* p_llrProcBuf256;
-    __m256i* p_llrRes;
-
-    // Number of BNs in Groups
-    uint32_t M;
-    //uint32_t M32rem;
-    uint32_t i,j;
-    uint32_t k;
-    // Offset to each bit within a group in terms of 32 Byte
-    uint32_t cnOffsetInGroup;
-    uint8_t idxBnGroup = 0;
-
-    __m256i ymm0, ymm1, ymmRes0, ymmRes1;
-
-    // =====================================================================
-    // Process group with 1 CN
-
-    // There is always a BN group with 1 CN
-    // Number of groups of 32 BNs for parallel processing
-    M = (lut_numBnInBnGroups[0]*Z + 31)>>5;
-
-    p_bnProcBuf     = (__m128i*) &bnProcBuf    [lut_startAddrBnGroups   [idxBnGroup]];
-    p_bnProcBufRes  = (__m256i*) &bnProcBufRes [lut_startAddrBnGroups   [idxBnGroup]];
-    p_llrProcBuf    = (__m128i*) &llrProcBuf   [lut_startAddrBnGroupsLlr[idxBnGroup]];
-    p_llrProcBuf256 = (__m256i*) &llrProcBuf   [lut_startAddrBnGroupsLlr[idxBnGroup]];
-    p_llrRes        = (__m256i*) &llrRes       [lut_startAddrBnGroupsLlr[idxBnGroup]];
-
-    // Loop over BNs
-    for (i=0,j=0; i<M; i++,j+=2)
-    {
-        // Store results in bnProcBufRes of first CN for further processing for next iteration
-        // In case parity check fails
-        p_bnProcBufRes[i] = p_llrProcBuf256[i];
-
-        // First 16 LLRs of first CN
-        ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);
-        ymm1 = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
-
-        ymmRes0 = simde_mm256_adds_epi16(ymm0, ymm1);
-
-        // Second 16 LLRs of first CN
-        ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j+1]);
-        ymm1 = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
-
-        ymmRes1 = simde_mm256_adds_epi16(ymm0, ymm1);
-
-        // Pack results back to epi8
-        ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
-        // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-        // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
-
-        // Next result
-        p_llrRes++;
-    }
-
-    // =====================================================================
-    // Process group with 2 CNs
-
-    if (lut_numBnInBnGroups[1] > 0)
-    {
-        // If elements in group move to next address
-        idxBnGroup++;
-
-        // Number of groups of 32 BNs for parallel processing
-        M = (lut_numBnInBnGroups[1]*Z + 31)>>5;
-
-        // Set the offset to each CN within a group in terms of 16 Byte
-        cnOffsetInGroup = (lut_numBnInBnGroups[1]*NR_LDPC_ZMAX)>>4;
-
-        // Set pointers to start of group 2
-        p_bnProcBuf  = (__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
-        p_llrProcBuf = (__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
-        p_llrRes     = (__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
-
-        // Loop over BNs
-        for (i=0,j=0; i<M; i++,j+=2)
-        {
-            // First 16 LLRs of first CN
-            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j]);
-            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j+1]);
-
-            // Loop over CNs
-            for (k=1; k<2; k++)
-            {
-                ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j]);
-                ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-                ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j+1]);
-                ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-            }
-
-            // Add LLR from receiver input
-            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
-            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
-            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-
-            // Pack results back to epi8
-            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-            *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
-
-            // Next result
-            p_llrRes++;
-        }
-    }
-
-    // =====================================================================
-    // Process group with 3 CNs
-
-    if (lut_numBnInBnGroups[2] > 0)
-    {
-        // If elements in group move to next address
-        idxBnGroup++;
-
-        // Number of groups of 32 BNs for parallel processing
-        M = (lut_numBnInBnGroups[2]*Z + 31)>>5;
-
-        // Set the offset to each CN within a group in terms of 16 Byte
-        cnOffsetInGroup = (lut_numBnInBnGroups[2]*NR_LDPC_ZMAX)>>4;
-
-        // Set pointers to start of group 3
-        p_bnProcBuf  = (__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
-        p_llrProcBuf = (__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
-        p_llrRes     = (__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
-
-        // Loop over BNs
-        for (i=0,j=0; i<M; i++,j+=2)
-        {
-            // First 16 LLRs of first CN
-            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j]);
-            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j+1]);
-
-            // Loop over CNs
-            for (k=1; k<3; k++)
-            {
-                ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j]);
-                ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-                ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j+1]);
-                ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-            }
-
-            // Add LLR from receiver input
-            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
-            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
-            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-
-            // Pack results back to epi8
-            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-            *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
-
-            // Next result
-            p_llrRes++;
-        }
-    }
-
-    // =====================================================================
-    // Process group with 4 CNs
-
-    if (lut_numBnInBnGroups[3] > 0)
-    {
-        // If elements in group move to next address
-        idxBnGroup++;
-
-        // Number of groups of 32 BNs for parallel processing
-        M = (lut_numBnInBnGroups[3]*Z + 31)>>5;
-
-        // Set the offset to each CN within a group in terms of 16 Byte
-        cnOffsetInGroup = (lut_numBnInBnGroups[3]*NR_LDPC_ZMAX)>>4;
-
-        // Set pointers to start of group 4
-        p_bnProcBuf  = (__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
-        p_llrProcBuf = (__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
-        p_llrRes     = (__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
-
-        // Loop over BNs
-        for (i=0,j=0; i<M; i++,j+=2)
-        {
-            // First 16 LLRs of first CN
-            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j]);
-            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j+1]);
-
-            // Loop over CNs
-            for (k=1; k<4; k++)
-            {
-                ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j]);
-                ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-                ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j+1]);
-                ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-            }
-
-            // Add LLR from receiver input
-            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
-            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
-            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-
-            // Pack results back to epi8
-            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-            *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
-
-            // Next result
-            p_llrRes++;
-        }
-    }
-
-    // =====================================================================
-    // Process group with 5 CNs
-
-    if (lut_numBnInBnGroups[4] > 0)
-    {
-        // If elements in group move to next address
-        idxBnGroup++;
-
-        // Number of groups of 32 BNs for parallel processing
-        M = (lut_numBnInBnGroups[4]*Z + 31)>>5;
-
-        // Set the offset to each CN within a group in terms of 16 Byte
-        cnOffsetInGroup = (lut_numBnInBnGroups[4]*NR_LDPC_ZMAX)>>4;
-
-        // Set pointers to start of group 5
-        p_bnProcBuf  = (__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
-        p_llrProcBuf = (__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
-        p_llrRes     = (__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
-
-        // Loop over BNs
-        for (i=0,j=0; i<M; i++,j+=2)
-        {
-            // First 16 LLRs of first CN
-            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j]);
-            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j+1]);
-
-            // Loop over CNs
-            for (k=1; k<5; k++)
-            {
-                ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j]);
-                ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-                ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j+1]);
-                ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-            }
-
-            // Add LLR from receiver input
-            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
-            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
-            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-
-            // Pack results back to epi8
-            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-            *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
-
-            // Next result
-            p_llrRes++;
-        }
-    }
-
-    // =====================================================================
-    // Process group with 6 CNs
-
-    if (lut_numBnInBnGroups[5] > 0)
-    {
-        // If elements in group move to next address
-        idxBnGroup++;
-
-        // Number of groups of 32 BNs for parallel processing
-        M = (lut_numBnInBnGroups[5]*Z + 31)>>5;
-
-        // Set the offset to each CN within a group in terms of 16 Byte
-        cnOffsetInGroup = (lut_numBnInBnGroups[5]*NR_LDPC_ZMAX)>>4;
-
-        // Set pointers to start of group 6
-        p_bnProcBuf  = (__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
-        p_llrProcBuf = (__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
-        p_llrRes     = (__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
-
-        // Loop over BNs
-        for (i=0,j=0; i<M; i++,j+=2)
-        {
-            // First 16 LLRs of first CN
-            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j]);
-            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j+1]);
-
-            // Loop over CNs
-            for (k=1; k<6; k++)
-            {
-                ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j]);
-                ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-                ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j+1]);
-                ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-            }
-
-            // Add LLR from receiver input
-            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
-            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
-            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-
-            // Pack results back to epi8
-            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-            *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
-
-            // Next result
-            p_llrRes++;
-        }
-    }
-
-    // =====================================================================
-    // Process group with 7 CNs
-
-    if (lut_numBnInBnGroups[6] > 0)
-    {
-        // If elements in group move to next address
-        idxBnGroup++;
-
-        // Number of groups of 32 BNs for parallel processing
-        M = (lut_numBnInBnGroups[6]*Z + 31)>>5;
-
-        // Set the offset to each CN within a group in terms of 16 Byte
-        cnOffsetInGroup = (lut_numBnInBnGroups[6]*NR_LDPC_ZMAX)>>4;
-
-        // Set pointers to start of group 7
-        p_bnProcBuf  = (__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
-        p_llrProcBuf = (__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
-        p_llrRes     = (__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
-
-        // Loop over BNs
-        for (i=0,j=0; i<M; i++,j+=2)
-        {
-            // First 16 LLRs of first CN
-            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j]);
-            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j+1]);
-
-            // Loop over CNs
-            for (k=1; k<7; k++)
-            {
-                ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j]);
-                ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-                ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j+1]);
-                ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-            }
-
-            // Add LLR from receiver input
-            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
-            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
-            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-
-            // Pack results back to epi8
-            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-            *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
-
-            // Next result
-            p_llrRes++;
-        }
-    }
-
-    // =====================================================================
-    // Process group with 8 CNs
-
-    if (lut_numBnInBnGroups[7] > 0)
-    {
-        // If elements in group move to next address
-        idxBnGroup++;
-
-        // Number of groups of 32 BNs for parallel processing
-        M = (lut_numBnInBnGroups[7]*Z + 31)>>5;
-
-        // Set the offset to each CN within a group in terms of 16 Byte
-        cnOffsetInGroup = (lut_numBnInBnGroups[7]*NR_LDPC_ZMAX)>>4;
-
-        // Set pointers to start of group 8
-        p_bnProcBuf  = (__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
-        p_llrProcBuf = (__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
-        p_llrRes     = (__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
-
-        // Loop over BNs
-        for (i=0,j=0; i<M; i++,j+=2)
-        {
-            // First 16 LLRs of first CN
-            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j]);
-            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j+1]);
-
-            // Loop over CNs
-            for (k=1; k<8; k++)
-            {
-                ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j]);
-                ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-                ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j+1]);
-                ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-            }
-
-            // Add LLR from receiver input
-            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
-            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
-            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-
-            // Pack results back to epi8
-            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-            *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
-
-            // Next result
-            p_llrRes++;
-        }
-    }
-
-    // =====================================================================
-    // Process group with 9 CNs
-
-    if (lut_numBnInBnGroups[8] > 0)
-    {
-        // If elements in group move to next address
-        idxBnGroup++;
-
-        // Number of groups of 32 BNs for parallel processing
-        M = (lut_numBnInBnGroups[8]*Z + 31)>>5;
-
-        // Set the offset to each CN within a group in terms of 16 Byte
-        cnOffsetInGroup = (lut_numBnInBnGroups[8]*NR_LDPC_ZMAX)>>4;
-
-        // Set pointers to start of group 9
-        p_bnProcBuf  = (__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
-        p_llrProcBuf = (__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
-        p_llrRes     = (__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
-
-        // Loop over BNs
-        for (i=0,j=0; i<M; i++,j+=2)
-        {
-            // First 16 LLRs of first CN
-            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j]);
-            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j+1]);
-
-            // Loop over CNs
-            for (k=1; k<9; k++)
-            {
-                ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j]);
-                ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-                ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j+1]);
-                ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-            }
-
-            // Add LLR from receiver input
-            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
-            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
-            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-
-            // Pack results back to epi8
-            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-            *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
-
-            // Next result
-            p_llrRes++;
-        }
-    }
-
-    // =====================================================================
-    // Process group with 10 CNs
-
-    if (lut_numBnInBnGroups[9] > 0)
-    {
-        // If elements in group move to next address
-        idxBnGroup++;
-
-        // Number of groups of 32 BNs for parallel processing
-        M = (lut_numBnInBnGroups[9]*Z + 31)>>5;
-
-        // Set the offset to each CN within a group in terms of 16 Byte
-        cnOffsetInGroup = (lut_numBnInBnGroups[9]*NR_LDPC_ZMAX)>>4;
-
-        // Set pointers to start of group 10
-        p_bnProcBuf  = (__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
-        p_llrProcBuf = (__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
-        p_llrRes     = (__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
-
-        // Loop over BNs
-        for (i=0,j=0; i<M; i++,j+=2)
-        {
-            // First 16 LLRs of first CN
-            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j]);
-            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j+1]);
-
-            // Loop over CNs
-            for (k=1; k<10; k++)
-            {
-                ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j]);
-                ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-                ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j+1]);
-                ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-            }
-
-            // Add LLR from receiver input
-            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
-            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
-            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-
-            // Pack results back to epi8
-            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-            *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
-
-            // Next result
-            p_llrRes++;
-        }
-    }
-
-    // =====================================================================
-    // Process group with 11 CNs
-
-    if (lut_numBnInBnGroups[10] > 0)
-    {
-        // If elements in group move to next address
-        idxBnGroup++;
-
-        // Number of groups of 32 BNs for parallel processing
-        M = (lut_numBnInBnGroups[10]*Z + 31)>>5;
-
-        // Set the offset to each CN within a group in terms of 16 Byte
-        cnOffsetInGroup = (lut_numBnInBnGroups[10]*NR_LDPC_ZMAX)>>4;
-
-        // Set pointers to start of group 11
-        p_bnProcBuf  = (__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
-        p_llrProcBuf = (__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
-        p_llrRes     = (__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
-
-        // Loop over BNs
-        for (i=0,j=0; i<M; i++,j+=2)
-        {
-            // First 16 LLRs of first CN
-            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j]);
-            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j+1]);
-
-            // Loop over CNs
-            for (k=1; k<11; k++)
-            {
-                ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j]);
-                ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-                ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j+1]);
-                ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-            }
-
-            // Add LLR from receiver input
-            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
-            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
-            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-
-            // Pack results back to epi8
-            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-            *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
-
-            // Next result
-            p_llrRes++;
-        }
-    }
-
-    // =====================================================================
-    // Process group with 12 CNs
-
-    if (lut_numBnInBnGroups[11] > 0)
-    {
-        // If elements in group move to next address
-        idxBnGroup++;
-
-        // Number of groups of 32 BNs for parallel processing
-        M = (lut_numBnInBnGroups[11]*Z + 31)>>5;
-
-        // Set the offset to each CN within a group in terms of 16 Byte
-        cnOffsetInGroup = (lut_numBnInBnGroups[11]*NR_LDPC_ZMAX)>>4;
-
-        // Set pointers to start of group 12
-        p_bnProcBuf  = (__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
-        p_llrProcBuf = (__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
-        p_llrRes     = (__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
-
-        // Loop over BNs
-        for (i=0,j=0; i<M; i++,j+=2)
-        {
-            // First 16 LLRs of first CN
-            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j]);
-            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j+1]);
-
-            // Loop over CNs
-            for (k=1; k<12; k++)
-            {
-                ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j]);
-                ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-                ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j+1]);
-                ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-            }
-
-            // Add LLR from receiver input
-            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
-            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
-            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-
-            // Pack results back to epi8
-            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-            *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
-
-            // Next result
-            p_llrRes++;
-        }
-    }
-
-    // =====================================================================
-    // Process group with 13 CNs
-
-    if (lut_numBnInBnGroups[12] > 0)
-    {
-        // If elements in group move to next address
-        idxBnGroup++;
-
-        // Number of groups of 32 BNs for parallel processing
-        M = (lut_numBnInBnGroups[12]*Z + 31)>>5;
-
-        // Set the offset to each CN within a group in terms of 16 Byte
-        cnOffsetInGroup = (lut_numBnInBnGroups[12]*NR_LDPC_ZMAX)>>4;
-
-        // Set pointers to start of group 13
-        p_bnProcBuf  = (__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
-        p_llrProcBuf = (__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
-        p_llrRes     = (__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
-
-        // Loop over BNs
-        for (i=0,j=0; i<M; i++,j+=2)
-        {
-            // First 16 LLRs of first CN
-            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j]);
-            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j+1]);
-
-            // Loop over CNs
-            for (k=1; k<13; k++)
-            {
-                ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j]);
-                ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-                ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j+1]);
-                ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-            }
-
-            // Add LLR from receiver input
-            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
-            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
-            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-
-            // Pack results back to epi8
-            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-            *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
-
-            // Next result
-            p_llrRes++;
-        }
-    }
-
-    // =====================================================================
-    // Process group with 14 CNs
-
-    if (lut_numBnInBnGroups[13] > 0)
-    {
-        // If elements in group move to next address
-        idxBnGroup++;
-
-        // Number of groups of 32 BNs for parallel processing
-        M = (lut_numBnInBnGroups[13]*Z + 31)>>5;
-
-        // Set the offset to each CN within a group in terms of 16 Byte
-        cnOffsetInGroup = (lut_numBnInBnGroups[13]*NR_LDPC_ZMAX)>>4;
-
-        // Set pointers to start of group 14
-        p_bnProcBuf  = (__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
-        p_llrProcBuf = (__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
-        p_llrRes     = (__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
-
-        // Loop over BNs
-        for (i=0,j=0; i<M; i++,j+=2)
-        {
-            // First 16 LLRs of first CN
-            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j]);
-            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j+1]);
-
-            // Loop over CNs
-            for (k=1; k<14; k++)
-            {
-                ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j]);
-                ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-                ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j+1]);
-                ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-            }
-
-            // Add LLR from receiver input
-            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
-            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
-            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-
-            // Pack results back to epi8
-            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-            *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
-
-            // Next result
-            p_llrRes++;
-        }
-    }
-
-    // =====================================================================
-    // Process group with 15 CNs
-
-    if (lut_numBnInBnGroups[14] > 0)
-    {
-        // If elements in group move to next address
-        idxBnGroup++;
-
-        // Number of groups of 32 BNs for parallel processing
-        M = (lut_numBnInBnGroups[14]*Z + 31)>>5;
-
-        // Set the offset to each CN within a group in terms of 16 Byte
-        cnOffsetInGroup = (lut_numBnInBnGroups[14]*NR_LDPC_ZMAX)>>4;
-
-        // Set pointers to start of group 15
-        p_bnProcBuf  = (__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
-        p_llrProcBuf = (__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
-        p_llrRes     = (__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
-
-        // Loop over BNs
-        for (i=0,j=0; i<M; i++,j+=2)
-        {
-            // First 16 LLRs of first CN
-            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j]);
-            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j+1]);
-
-            // Loop over CNs
-            for (k=1; k<15; k++)
-            {
-                ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j]);
-                ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-                ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j+1]);
-                ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-            }
-
-            // Add LLR from receiver input
-            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
-            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
-            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-
-            // Pack results back to epi8
-            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-            *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
-
-            // Next result
-            p_llrRes++;
-        }
-    }
-
-    // =====================================================================
-    // Process group with 16 CNs
-
-    if (lut_numBnInBnGroups[15] > 0)
-    {
-        // If elements in group move to next address
-        idxBnGroup++;
-
-        // Number of groups of 32 BNs for parallel processing
-        M = (lut_numBnInBnGroups[15]*Z + 31)>>5;
-
-        // Set the offset to each CN within a group in terms of 16 Byte
-        cnOffsetInGroup = (lut_numBnInBnGroups[15]*NR_LDPC_ZMAX)>>4;
-
-        // Set pointers to start of group 16
-        p_bnProcBuf  = (__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
-        p_llrProcBuf = (__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
-        p_llrRes     = (__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
-
-        // Loop over BNs
-        for (i=0,j=0; i<M; i++,j+=2)
-        {
-            // First 16 LLRs of first CN
-            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j]);
-            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j+1]);
-
-            // Loop over CNs
-            for (k=1; k<16; k++)
-            {
-                ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j]);
-                ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-                ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j+1]);
-                ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-            }
-
-            // Add LLR from receiver input
-            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
-            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
-            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-
-            // Pack results back to epi8
-            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-            *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
-
-            // Next result
-            p_llrRes++;
-        }
-    }
-
-    // =====================================================================
-    // Process group with 17 CNs
-
-    if (lut_numBnInBnGroups[16] > 0)
-    {
-        // If elements in group move to next address
-        idxBnGroup++;
-
-        // Number of groups of 32 BNs for parallel processing
-        M = (lut_numBnInBnGroups[16]*Z + 31)>>5;
-
-        // Set the offset to each CN within a group in terms of 16 Byte
-        cnOffsetInGroup = (lut_numBnInBnGroups[16]*NR_LDPC_ZMAX)>>4;
-
-        // Set pointers to start of group 16
-        p_bnProcBuf  = (__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
-        p_llrProcBuf = (__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
-        p_llrRes     = (__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
-
-        // Loop over BNs
-        for (i=0,j=0; i<M; i++,j+=2)
-        {
-            // First 16 LLRs of first CN
-            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j]);
-            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j+1]);
-
-            // Loop over CNs
-            for (k=1; k<17; k++)
-            {
-                ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j]);
-                ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-                ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j+1]);
-                ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-            }
-
-            // Add LLR from receiver input
-            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
-            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
-            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-
-            // Pack results back to epi8
-            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-            *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
-
-            // Next result
-            p_llrRes++;
-        }
-    }
-
-    // =====================================================================
-    // Process group with 18 CNs
-
-    if (lut_numBnInBnGroups[17] > 0)
-    {
-        // If elements in group move to next address
-        idxBnGroup++;
-
-        // Number of groups of 32 BNs for parallel processing
-        M = (lut_numBnInBnGroups[17]*Z + 31)>>5;
-
-        // Set the offset to each CN within a group in terms of 16 Byte
-        cnOffsetInGroup = (lut_numBnInBnGroups[17]*NR_LDPC_ZMAX)>>4;
-
-        // Set pointers to start of group 18
-        p_bnProcBuf  = (__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
-        p_llrProcBuf = (__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
-        p_llrRes     = (__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
-
-        // Loop over BNs
-        for (i=0,j=0; i<M; i++,j+=2)
-        {
-            // First 16 LLRs of first CN
-            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j]);
-            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j+1]);
-
-            // Loop over CNs
-            for (k=1; k<18; k++)
-            {
-                ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j]);
-                ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-                ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j+1]);
-                ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-            }
-
-            // Add LLR from receiver input
-            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
-            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
-            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-
-            // Pack results back to epi8
-            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-            *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
-
-            // Next result
-            p_llrRes++;
-        }
-    }
-
-    // =====================================================================
-    // Process group with 19 CNs
-
-    if (lut_numBnInBnGroups[18] > 0)
-    {
-        // If elements in group move to next address
-        idxBnGroup++;
-
-        // Number of groups of 32 BNs for parallel processing
-        M = (lut_numBnInBnGroups[18]*Z + 31)>>5;
-
-        // Set the offset to each CN within a group in terms of 16 Byte
-        cnOffsetInGroup = (lut_numBnInBnGroups[18]*NR_LDPC_ZMAX)>>4;
-
-        // Set pointers to start of group 19
-        p_bnProcBuf  = (__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
-        p_llrProcBuf = (__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
-        p_llrRes     = (__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
-
-        // Loop over BNs
-        for (i=0,j=0; i<M; i++,j+=2)
-        {
-            // First 16 LLRs of first CN
-            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j]);
-            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j+1]);
-
-            // Loop over CNs
-            for (k=1; k<19; k++)
-            {
-                ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j]);
-                ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-                ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j+1]);
-                ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-            }
-
-            // Add LLR from receiver input
-            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
-            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
-            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-
-            // Pack results back to epi8
-            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-            *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
-
-            // Next result
-            p_llrRes++;
-        }
-    }
-
-    // =====================================================================
-    // Process group with 20 CNs
-
-    if (lut_numBnInBnGroups[19] > 0)
-    {
-        // If elements in group move to next address
-        idxBnGroup++;
-
-        // Number of groups of 32 BNs for parallel processing
-        M = (lut_numBnInBnGroups[19]*Z + 31)>>5;
-
-        // Set the offset to each CN within a group in terms of 16 Byte
-        cnOffsetInGroup = (lut_numBnInBnGroups[19]*NR_LDPC_ZMAX)>>4;
-
-        // Set pointers to start of group 20
-        p_bnProcBuf  = (__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
-        p_llrProcBuf = (__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
-        p_llrRes     = (__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
-
-        // Loop over BNs
-        for (i=0,j=0; i<M; i++,j+=2)
-        {
-            // First 16 LLRs of first CN
-            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j]);
-            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j+1]);
-
-            // Loop over CNs
-            for (k=1; k<20; k++)
-            {
-                ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j]);
-                ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-                ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j+1]);
-                ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-            }
-
-            // Add LLR from receiver input
-            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
-            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
-            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-
-            // Pack results back to epi8
-            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-            *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
-
-            // Next result
-            p_llrRes++;
-        }
-    }
-
-    // =====================================================================
-    // Process group with 21 CNs
-
-    if (lut_numBnInBnGroups[20] > 0)
-    {
-        // If elements in group move to next address
-        idxBnGroup++;
-
-        // Number of groups of 32 BNs for parallel processing
-        M = (lut_numBnInBnGroups[20]*Z + 31)>>5;
-
-        // Set the offset to each CN within a group in terms of 16 Byte
-        cnOffsetInGroup = (lut_numBnInBnGroups[20]*NR_LDPC_ZMAX)>>4;
-
-        // Set pointers to start of group 21
-        p_bnProcBuf  = (__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
-        p_llrProcBuf = (__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
-        p_llrRes     = (__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
-
-        // Loop over BNs
-        for (i=0,j=0; i<M; i++,j+=2)
-        {
-            // First 16 LLRs of first CN
-            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j]);
-            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j+1]);
-
-            // Loop over CNs
-            for (k=1; k<21; k++)
-            {
-                ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j]);
-                ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-                ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j+1]);
-                ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-            }
-
-            // Add LLR from receiver input
-            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
-            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
-            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-
-            // Pack results back to epi8
-            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-            *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
-
-            // Next result
-            p_llrRes++;
-        }
-    }
-
-    // =====================================================================
-    // Process group with 22 CNs
-
-    if (lut_numBnInBnGroups[21] > 0)
-    {
-        // If elements in group move to next address
-        idxBnGroup++;
-
-        // Number of groups of 32 BNs for parallel processing
-        M = (lut_numBnInBnGroups[21]*Z + 31)>>5;
-
-        // Set the offset to each CN within a group in terms of 16 Byte
-        cnOffsetInGroup = (lut_numBnInBnGroups[21]*NR_LDPC_ZMAX)>>4;
-
-        // Set pointers to start of group 22
-        p_bnProcBuf  = (__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
-        p_llrProcBuf = (__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
-        p_llrRes     = (__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
-
-        // Loop over BNs
-        for (i=0,j=0; i<M; i++,j+=2)
-        {
-            // First 16 LLRs of first CN
-            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j]);
-            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j+1]);
-
-            // Loop over CNs
-            for (k=1; k<22; k++)
-            {
-                ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j]);
-                ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-                ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j+1]);
-                ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-            }
-
-            // Add LLR from receiver input
-            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
-            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
-            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-
-            // Pack results back to epi8
-            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-            *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
-
-            // Next result
-            p_llrRes++;
-        }
-    }
-
-    // =====================================================================
-    // Process group with 23 CNs
-
-    if (lut_numBnInBnGroups[22] > 0)
-    {
-        // If elements in group move to next address
-        idxBnGroup++;
-
-        // Number of groups of 32 BNs for parallel processing
-        M = (lut_numBnInBnGroups[22]*Z + 31)>>5;
-
-        // Set the offset to each CN within a group in terms of 16 Byte
-        cnOffsetInGroup = (lut_numBnInBnGroups[22]*NR_LDPC_ZMAX)>>4;
-
-        // Set pointers to start of group 23
-        p_bnProcBuf  = (__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
-        p_llrProcBuf = (__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
-        p_llrRes     = (__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
-
-        // Loop over BNs
-        for (i=0,j=0; i<M; i++,j+=2)
-        {
-            // First 16 LLRs of first CN
-            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j]);
-            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j+1]);
-
-            // Loop over CNs
-            for (k=1; k<23; k++)
-            {
-                ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j]);
-                ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-                ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j+1]);
-                ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-            }
-
-            // Add LLR from receiver input
-            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
-            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
-            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-
-            // Pack results back to epi8
-            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-            *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
-
-            // Next result
-            p_llrRes++;
-        }
-    }
-
-    // =====================================================================
-    // Process group with 24 CNs
-
-    if (lut_numBnInBnGroups[23] > 0)
-    {
-        // If elements in group move to next address
-        idxBnGroup++;
-
-        // Number of groups of 32 BNs for parallel processing
-        M = (lut_numBnInBnGroups[23]*Z + 31)>>5;
-
-        // Set the offset to each CN within a group in terms of 16 Byte
-        cnOffsetInGroup = (lut_numBnInBnGroups[23]*NR_LDPC_ZMAX)>>4;
-
-        // Set pointers to start of group 24
-        p_bnProcBuf  = (__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
-        p_llrProcBuf = (__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
-        p_llrRes     = (__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
-
-        // Loop over BNs
-        for (i=0,j=0; i<M; i++,j+=2)
-        {
-            // First 16 LLRs of first CN
-            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j]);
-            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j+1]);
-
-            // Loop over CNs
-            for (k=1; k<24; k++)
-            {
-                ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j]);
-                ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-                ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j+1]);
-                ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-            }
-
-            // Add LLR from receiver input
-            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
-            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
-            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-
-            // Pack results back to epi8
-            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-            *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
-
-            // Next result
-            p_llrRes++;
-        }
-    }
+#if 1 /*def __AVX2__*/    
+    simde__m128i* p_bnProcBuf;
+    simde__m256i* p_bnProcBufRes;
+    simde__m128i* p_llrProcBuf;
+    simde__m256i* p_llrProcBuf256;
+    simde__m256i* p_llrRes;
+#else
+    simde__m128i* p_bnProcBuf;
+    simde__m128i* p_bnProcBufRes;
+    simde__m128i* p_llrProcBuf;
+    simde__m128i* p_llrRes;
+#endif   
+    // Number of BNs in Groups
+    uint32_t M;
+    //uint32_t M32rem;
+    uint32_t i;
+    uint32_t k;
+    // Offset to each bit within a group in terms of 32 Byte
+    uint32_t cnOffsetInGroup;
+    uint8_t idxBnGroup = 0;
+#if 1 /*def __AVX2__*/
+    uint32_t j;
+    simde__m256i ymm0, ymm1, ymmRes0, ymmRes1;
 
     // =====================================================================
-    // Process group with 25 CNs
-
-    if (lut_numBnInBnGroups[24] > 0)
-    {
-        // If elements in group move to next address
-        idxBnGroup++;
-
-        // Number of groups of 32 BNs for parallel processing
-        M = (lut_numBnInBnGroups[24]*Z + 31)>>5;
-
-        // Set the offset to each CN within a group in terms of 16 Byte
-        cnOffsetInGroup = (lut_numBnInBnGroups[24]*NR_LDPC_ZMAX)>>4;
-
-        // Set pointers to start of group 25
-        p_bnProcBuf  = (__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
-        p_llrProcBuf = (__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
-        p_llrRes     = (__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
-
-        // Loop over BNs
-        for (i=0,j=0; i<M; i++,j+=2)
-        {
-            // First 16 LLRs of first CN
-            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j]);
-            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j+1]);
-
-            // Loop over CNs
-            for (k=1; k<25; k++)
-            {
-                ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j]);
-                ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-                ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j+1]);
-                ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-            }
-
-            // Add LLR from receiver input
-            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
-            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
-            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-
-            // Pack results back to epi8
-            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-            *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
+    // Process group with 1 CN
 
-            // Next result
-            p_llrRes++;
-        }
-    }
+    // There is always a BN group with 1 CN
+    // Number of groups of 32 BNs for parallel processing
+    M = (lut_numBnInBnGroups[0]*Z + 31)>>5;
 
-    // =====================================================================
-    // Process group with 26 CNs
+    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [lut_startAddrBnGroups   [idxBnGroup]];
+    p_bnProcBufRes  = (simde__m256i*) &bnProcBufRes [lut_startAddrBnGroups   [idxBnGroup]];
+    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [lut_startAddrBnGroupsLlr[idxBnGroup]];
+    p_llrProcBuf256 = (simde__m256i*) &llrProcBuf   [lut_startAddrBnGroupsLlr[idxBnGroup]];
+    p_llrRes        = (simde__m256i*) &llrRes       [lut_startAddrBnGroupsLlr[idxBnGroup]];
 
-    if (lut_numBnInBnGroups[25] > 0)
+    // Loop over BNs
+    for (i=0,j=0; i<M; i++,j+=2)
     {
-        // If elements in group move to next address
-        idxBnGroup++;
-
-        // Number of groups of 32 BNs for parallel processing
-        M = (lut_numBnInBnGroups[25]*Z + 31)>>5;
-
-        // Set the offset to each CN within a group in terms of 16 Byte
-        cnOffsetInGroup = (lut_numBnInBnGroups[25]*NR_LDPC_ZMAX)>>4;
-
-        // Set pointers to start of group 26
-        p_bnProcBuf  = (__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
-        p_llrProcBuf = (__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
-        p_llrRes     = (__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
-
-        // Loop over BNs
-        for (i=0,j=0; i<M; i++,j+=2)
-        {
-            // First 16 LLRs of first CN
-            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j]);
-            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j+1]);
+        // Store results in bnProcBufRes of first CN for further processing for next iteration
+        // In case parity check fails
+        p_bnProcBufRes[i] = p_llrProcBuf256[i];
 
-            // Loop over CNs
-            for (k=1; k<26; k++)
-            {
-                ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j]);
-                ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
+        // First 16 LLRs of first CN
+        ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);
+        ymm1 = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
 
-                ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j+1]);
-                ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-            }
+        ymmRes0 = simde_mm256_adds_epi16(ymm0, ymm1);
 
-            // Add LLR from receiver input
-            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
-            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
+        // Second 16 LLRs of first CN
+        ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j+1]);
+        ymm1 = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
 
-            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
-            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
+        ymmRes1 = simde_mm256_adds_epi16(ymm0, ymm1);
 
-            // Pack results back to epi8
-            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-            *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
+        // Pack results back to epi8
+        ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
+        // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
+        // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
+        *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
 
-            // Next result
-            p_llrRes++;
-        }
+        // Next result
+        p_llrRes++;
     }
 
+    for (uint32_t cnidx=1;cnidx<NR_LDPC_NUM_BN_GROUPS_BG1_R13;cnidx++) {
     // =====================================================================
-    // Process group with 27 CNs
+    // Process group with 2 CNs
 
-    if (lut_numBnInBnGroups[26] > 0)
-    {
+      if (lut_numBnInBnGroups[cnidx] > 0)
+      {
         // If elements in group move to next address
         idxBnGroup++;
 
         // Number of groups of 32 BNs for parallel processing
-        M = (lut_numBnInBnGroups[26]*Z + 31)>>5;
+        M = (lut_numBnInBnGroups[cnidx]*Z + 31)>>5;
 
         // Set the offset to each CN within a group in terms of 16 Byte
-        cnOffsetInGroup = (lut_numBnInBnGroups[26]*NR_LDPC_ZMAX)>>4;
+        cnOffsetInGroup = (lut_numBnInBnGroups[cnidx]*NR_LDPC_ZMAX)>>4;
 
-        // Set pointers to start of group 27
-        p_bnProcBuf  = (__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
-        p_llrProcBuf = (__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
-        p_llrRes     = (__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
+        // Set pointers to start of group 2
+        p_bnProcBuf  = (simde__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
+        p_llrProcBuf = (simde__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
+        p_llrRes     = (simde__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
 
         // Loop over BNs
         for (i=0,j=0; i<M; i++,j+=2)
@@ -1479,7 +137,7 @@ static inline void nrLDPC_bnProcPc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_
             ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j+1]);
 
             // Loop over CNs
-            for (k=1; k<27; k++)
+            for (k=1; k<=cnidx; k++)
             {
                 ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j]);
                 ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
@@ -1504,170 +162,104 @@ static inline void nrLDPC_bnProcPc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_
             // Next result
             p_llrRes++;
         }
+      }
     }
+#else
 
-    // =====================================================================
-    // Process group with 28 CNs
-
-    if (lut_numBnInBnGroups[27] > 0)
-    {
-        // If elements in group move to next address
-        idxBnGroup++;
-
-        // Number of groups of 32 BNs for parallel processing
-        M = (lut_numBnInBnGroups[27]*Z + 31)>>5;
-
-        // Set the offset to each CN within a group in terms of 16 Byte
-        cnOffsetInGroup = (lut_numBnInBnGroups[27]*NR_LDPC_ZMAX)>>4;
-
-        // Set pointers to start of group 28
-        p_bnProcBuf  = (__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
-        p_llrProcBuf = (__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
-        p_llrRes     = (__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
-
-        // Loop over BNs
-        for (i=0,j=0; i<M; i++,j+=2)
-        {
-            // First 16 LLRs of first CN
-            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j]);
-            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j+1]);
-
-            // Loop over CNs
-            for (k=1; k<28; k++)
-            {
-                ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j]);
-                ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-                ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j+1]);
-                ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-            }
-
-            // Add LLR from receiver input
-            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
-            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
-
-            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
-            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-
-            // Pack results back to epi8
-            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-            *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
-
-            // Next result
-            p_llrRes++;
-        }
-    }
+    simde__m128i ymm0, ymm1, ymmRes0, ymmRes1;
 
     // =====================================================================
-    // Process group with 29 CNs
-
-    if (lut_numBnInBnGroups[28] > 0)
-    {
-        // If elements in group move to next address
-        idxBnGroup++;
-
-        // Number of groups of 32 BNs for parallel processing
-        M = (lut_numBnInBnGroups[28]*Z + 31)>>5;
+    // Process group with 1 CN
 
-        // Set the offset to each CN within a group in terms of 16 Byte
-        cnOffsetInGroup = (lut_numBnInBnGroups[28]*NR_LDPC_ZMAX)>>4;
+    // There is always a BN group with 1 CN
+    // Number of groups of 32 BNs for parallel processing
+    M = (lut_numBnInBnGroups[0]*Z + 15)>>4;
 
-        // Set pointers to start of group 29
-        p_bnProcBuf  = (__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
-        p_llrProcBuf = (__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
-        p_llrRes     = (__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
+    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [lut_startAddrBnGroups   [idxBnGroup]];
+    p_bnProcBufRes  = (simde__m128i*) &bnProcBufRes [lut_startAddrBnGroups   [idxBnGroup]];
+    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [lut_startAddrBnGroupsLlr[idxBnGroup]];
+    p_llrRes        = (simde__m128i*) &llrRes       [lut_startAddrBnGroupsLlr[idxBnGroup]];
 
-        // Loop over BNs
-        for (i=0,j=0; i<M; i++,j+=2)
-        {
-            // First 16 LLRs of first CN
-            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j]);
-            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j+1]);
+    // Loop over BNs
+    for (i=0; i<M; i++)
+    {
+        // Store results in bnProcBufRes of first CN for further processing for next iteration
+        // In case parity check fails
+        p_bnProcBufRes[i] = p_llrProcBuf[i];
 
-            // Loop over CNs
-            for (k=1; k<29; k++)
-            {
-                ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j]);
-                ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
+        // First 16 LLRs of first CN
+        ymm0 = simde_mm_cvtepi8_epi16(p_bnProcBuf [i]);
+        ymm1 = simde_mm_cvtepi8_epi16(p_llrProcBuf[i]);
 
-                ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j+1]);
-                ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
-            }
+        ymmRes0 = simde_mm_adds_epi16(ymm0, ymm1);
 
-            // Add LLR from receiver input
-            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
-            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
+        // Second 16 LLRs of first CN
+        ymm0 = simde_mm_cvtepi8_epi16(simde_mm_srli_si128(p_bnProcBuf [i],8));
+        ymm1 = simde_mm_cvtepi8_epi16(simde_mm_srli_si128(p_llrProcBuf[i],8));
 
-            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
-            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
+        ymmRes1 = simde_mm_adds_epi16(ymm0, ymm1);
 
-            // Pack results back to epi8
-            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-            *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
+        // Pack results back to epi8
+        *p_llrRes = simde_mm_packs_epi16(ymmRes0, ymmRes1);
 
-            // Next result
-            p_llrRes++;
-        }
+        // Next result
+        p_llrRes++;
     }
 
+    for (uint32_t cnidx=1;cnidx<NR_LDPC_NUM_BN_GROUPS_BG1_R13;cnidx++) {
     // =====================================================================
-    // Process group with 30 CNs
+    // Process group with 2 CNs
 
-    if (lut_numBnInBnGroups[29] > 0)
-    {
+      if (lut_numBnInBnGroups[cnidx] > 0)
+      {
         // If elements in group move to next address
         idxBnGroup++;
 
         // Number of groups of 32 BNs for parallel processing
-        M = (lut_numBnInBnGroups[29]*Z + 31)>>5;
+        M = (lut_numBnInBnGroups[cnidx]*Z + 15)>>4;
 
         // Set the offset to each CN within a group in terms of 16 Byte
-        cnOffsetInGroup = (lut_numBnInBnGroups[29]*NR_LDPC_ZMAX)>>4;
+        cnOffsetInGroup = (lut_numBnInBnGroups[cnidx]*NR_LDPC_ZMAX)>>4;
 
-        // Set pointers to start of group 30
-        p_bnProcBuf  = (__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
-        p_llrProcBuf = (__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
-        p_llrRes     = (__m256i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
+        // Set pointers to start of group 2
+        p_bnProcBuf  = (simde__m128i*) &bnProcBuf  [lut_startAddrBnGroups   [idxBnGroup]];
+        p_llrProcBuf = (simde__m128i*) &llrProcBuf [lut_startAddrBnGroupsLlr[idxBnGroup]];
+        p_llrRes     = (simde__m128i*) &llrRes     [lut_startAddrBnGroupsLlr[idxBnGroup]];
 
         // Loop over BNs
-        for (i=0,j=0; i<M; i++,j+=2)
+        for (i=0; i<M; i++)
         {
             // First 16 LLRs of first CN
-            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j]);
-            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j+1]);
+            ymmRes0 = simde_mm_cvtepi8_epi16(p_bnProcBuf[i]);
+            ymmRes1 = simde_mm_cvtepi8_epi16(simde_mm_srli_si128(p_bnProcBuf[i],8));
 
             // Loop over CNs
-            for (k=1; k<30; k++)
+            for (k=1; k<=cnidx; k++)
             {
-                ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j]);
-                ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
+                ymm0 = simde_mm_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + i]);
+                ymmRes0 = simde_mm_adds_epi16(ymmRes0, ymm0);
 
-                ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[k*cnOffsetInGroup + j+1]);
-                ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
+                ymm1 = simde_mm_cvtepi8_epi16(simde_mm_srli_si128(p_bnProcBuf[k*cnOffsetInGroup + i],8));
+                ymmRes1 = simde_mm_adds_epi16(ymmRes1, ymm1);
             }
 
             // Add LLR from receiver input
-            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);
-            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);
+            ymm0    = simde_mm_cvtepi8_epi16(p_llrProcBuf[i]);
+            ymmRes0 = simde_mm_adds_epi16(ymmRes0, ymm0);
 
-            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j+1]);
-            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);
+            ymm1    = simde_mm_cvtepi8_epi16(simde_mm_srli_si128(p_llrProcBuf[i],8));
+            ymmRes1 = simde_mm_adds_epi16(ymmRes1, ymm1);
 
             // Pack results back to epi8
-            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-            *p_llrRes = simde_mm256_permute4x64_epi64(ymm0, 0xD8);
+            *p_llrRes = simde_mm_packs_epi16(ymmRes0, ymmRes1);
 
             // Next result
             p_llrRes++;
         }
+      }
     }
 
+#endif
 }
 
 /**
@@ -1685,10 +277,10 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
     const uint32_t* lut_startAddrBnGroups = p_lut->startAddrBnGroups;
     const uint16_t* lut_startAddrBnGroupsLlr = p_lut->startAddrBnGroupsLlr;
 
-    __m256i* p_bnProcBuf;
-    __m256i* p_bnProcBufRes;
-    __m256i* p_llrRes;
-    __m256i* p_res;
+    simde__m256i* p_bnProcBuf;
+    simde__m256i* p_bnProcBufRes;
+    simde__m256i* p_llrRes;
+    simde__m256i* p_res;
 
     // Number of BNs in Groups
     uint32_t M;
@@ -1718,14 +310,14 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
         cnOffsetInGroup = (lut_numBnInBnGroups[1]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        p_bnProcBuf    = (__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
-        p_bnProcBufRes = (__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBuf    = (simde__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBufRes = (simde__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
 
         // Loop over CNs
         for (k=0; k<2; k++)
         {
             p_res = &p_bnProcBufRes[k*cnOffsetInGroup];
-            p_llrRes = (__m256i*) &llrRes[lut_startAddrBnGroupsLlr[idxBnGroup]];
+            p_llrRes = (simde__m256i*) &llrRes[lut_startAddrBnGroupsLlr[idxBnGroup]];
 
             // Loop over BNs
             for (i=0; i<M; i++)
@@ -1753,14 +345,14 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
         cnOffsetInGroup = (lut_numBnInBnGroups[2]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 3
-        p_bnProcBuf    = (__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
-        p_bnProcBufRes = (__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBuf    = (simde__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBufRes = (simde__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
 
         // Loop over CNs
         for (k=0; k<3; k++)
         {
             p_res = &p_bnProcBufRes[k*cnOffsetInGroup];
-            p_llrRes = (__m256i*) &llrRes[lut_startAddrBnGroupsLlr[idxBnGroup]];
+            p_llrRes = (simde__m256i*) &llrRes[lut_startAddrBnGroupsLlr[idxBnGroup]];
 
             // Loop over BNs
             for (i=0; i<M; i++)
@@ -1788,14 +380,14 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
         cnOffsetInGroup = (lut_numBnInBnGroups[3]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 4
-        p_bnProcBuf    = (__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
-        p_bnProcBufRes = (__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBuf    = (simde__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBufRes = (simde__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
 
         // Loop over CNs
         for (k=0; k<4; k++)
         {
             p_res = &p_bnProcBufRes[k*cnOffsetInGroup];
-            p_llrRes = (__m256i*) &llrRes[lut_startAddrBnGroupsLlr[idxBnGroup]];
+            p_llrRes = (simde__m256i*) &llrRes[lut_startAddrBnGroupsLlr[idxBnGroup]];
 
             // Loop over BNs
             for (i=0; i<M; i++)
@@ -1823,14 +415,14 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
         cnOffsetInGroup = (lut_numBnInBnGroups[4]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 5
-        p_bnProcBuf    = (__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
-        p_bnProcBufRes = (__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBuf    = (simde__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBufRes = (simde__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
 
         // Loop over CNs
         for (k=0; k<5; k++)
         {
             p_res = &p_bnProcBufRes[k*cnOffsetInGroup];
-            p_llrRes = (__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
+            p_llrRes = (simde__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
 
             // Loop over BNs
             for (i=0; i<M; i++)
@@ -1858,14 +450,14 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
         cnOffsetInGroup = (lut_numBnInBnGroups[5]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 6
-        p_bnProcBuf    = (__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
-        p_bnProcBufRes = (__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBuf    = (simde__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBufRes = (simde__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
 
         // Loop over CNs
         for (k=0; k<6; k++)
         {
             p_res = &p_bnProcBufRes[k*cnOffsetInGroup];
-            p_llrRes = (__m256i*) &llrRes[lut_startAddrBnGroupsLlr[idxBnGroup]];
+            p_llrRes = (simde__m256i*) &llrRes[lut_startAddrBnGroupsLlr[idxBnGroup]];
 
             // Loop over BNs
             for (i=0; i<M; i++)
@@ -1893,14 +485,14 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
         cnOffsetInGroup = (lut_numBnInBnGroups[6]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 7
-        p_bnProcBuf    = (__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
-        p_bnProcBufRes = (__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBuf    = (simde__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBufRes = (simde__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
 
         // Loop over CNs
         for (k=0; k<7; k++)
         {
             p_res = &p_bnProcBufRes[k*cnOffsetInGroup];
-            p_llrRes = (__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
+            p_llrRes = (simde__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
 
             // Loop over BNs
             for (i=0; i<M; i++)
@@ -1928,14 +520,14 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
         cnOffsetInGroup = (lut_numBnInBnGroups[7]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 8
-        p_bnProcBuf    = (__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
-        p_bnProcBufRes = (__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBuf    = (simde__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBufRes = (simde__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
 
         // Loop over CNs
         for (k=0; k<8; k++)
         {
             p_res = &p_bnProcBufRes[k*cnOffsetInGroup];
-            p_llrRes = (__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
+            p_llrRes = (simde__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
 
             // Loop over BNs
             for (i=0; i<M; i++)
@@ -1963,14 +555,14 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
         cnOffsetInGroup = (lut_numBnInBnGroups[8]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 9
-        p_bnProcBuf    = (__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
-        p_bnProcBufRes = (__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBuf    = (simde__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBufRes = (simde__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
 
         // Loop over CNs
         for (k=0; k<9; k++)
         {
             p_res = &p_bnProcBufRes[k*cnOffsetInGroup];
-            p_llrRes = (__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
+            p_llrRes = (simde__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
 
             // Loop over BNs
             for (i=0; i<M; i++)
@@ -1998,14 +590,14 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
         cnOffsetInGroup = (lut_numBnInBnGroups[9]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 10
-        p_bnProcBuf    = (__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
-        p_bnProcBufRes = (__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBuf    = (simde__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBufRes = (simde__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
 
         // Loop over CNs
         for (k=0; k<10; k++)
         {
             p_res = &p_bnProcBufRes[k*cnOffsetInGroup];
-            p_llrRes = (__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
+            p_llrRes = (simde__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
 
             // Loop over BNs
             for (i=0; i<M; i++)
@@ -2033,14 +625,14 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
         cnOffsetInGroup = (lut_numBnInBnGroups[10]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 10
-        p_bnProcBuf    = (__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
-        p_bnProcBufRes = (__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBuf    = (simde__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBufRes = (simde__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
 
         // Loop over CNs
         for (k=0; k<11; k++)
         {
             p_res = &p_bnProcBufRes[k*cnOffsetInGroup];
-            p_llrRes = (__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
+            p_llrRes = (simde__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
 
             // Loop over BNs
             for (i=0; i<M; i++)
@@ -2068,14 +660,14 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
         cnOffsetInGroup = (lut_numBnInBnGroups[11]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 12
-        p_bnProcBuf    = (__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
-        p_bnProcBufRes = (__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBuf    = (simde__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBufRes = (simde__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
 
         // Loop over CNs
         for (k=0; k<12; k++)
         {
             p_res = &p_bnProcBufRes[k*cnOffsetInGroup];
-            p_llrRes = (__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
+            p_llrRes = (simde__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
 
         // Loop over BNs
             for (i=0; i<M; i++)
@@ -2103,14 +695,14 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
         cnOffsetInGroup = (lut_numBnInBnGroups[12]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 13
-        p_bnProcBuf    = (__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
-        p_bnProcBufRes = (__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBuf    = (simde__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBufRes = (simde__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
 
         // Loop over CNs
         for (k=0; k<13; k++)
         {
             p_res = &p_bnProcBufRes[k*cnOffsetInGroup];
-            p_llrRes = (__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
+            p_llrRes = (simde__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
 
             // Loop over BNs
             for (i=0; i<M; i++)
@@ -2138,14 +730,14 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
         cnOffsetInGroup = (lut_numBnInBnGroups[13]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 14
-        p_bnProcBuf    = (__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
-        p_bnProcBufRes = (__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBuf    = (simde__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBufRes = (simde__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
 
         // Loop over CNs
         for (k=0; k<14; k++)
         {
             p_res = &p_bnProcBufRes[k*cnOffsetInGroup];
-            p_llrRes = (__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
+            p_llrRes = (simde__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
 
             // Loop over BNs
             for (i=0; i<M; i++)
@@ -2173,14 +765,14 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
         cnOffsetInGroup = (lut_numBnInBnGroups[14]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 15
-        p_bnProcBuf    = (__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
-        p_bnProcBufRes = (__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBuf    = (simde__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBufRes = (simde__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
 
         // Loop over CNs
         for (k=0; k<15; k++)
         {
             p_res = &p_bnProcBufRes[k*cnOffsetInGroup];
-            p_llrRes = (__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
+            p_llrRes = (simde__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
 
             // Loop over BNs
             for (i=0; i<M; i++)
@@ -2208,14 +800,14 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
         cnOffsetInGroup = (lut_numBnInBnGroups[15]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 16
-        p_bnProcBuf    = (__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
-        p_bnProcBufRes = (__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBuf    = (simde__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBufRes = (simde__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
 
         // Loop over CNs
         for (k=0; k<16; k++)
         {
             p_res = &p_bnProcBufRes[k*cnOffsetInGroup];
-            p_llrRes = (__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
+            p_llrRes = (simde__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
 
             // Loop over BNs
             for (i=0; i<M; i++)
@@ -2243,14 +835,14 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
         cnOffsetInGroup = (lut_numBnInBnGroups[16]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 17
-        p_bnProcBuf    = (__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
-        p_bnProcBufRes = (__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBuf    = (simde__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBufRes = (simde__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
 
         // Loop over CNs
         for (k=0; k<17; k++)
         {
             p_res = &p_bnProcBufRes[k*cnOffsetInGroup];
-            p_llrRes = (__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
+            p_llrRes = (simde__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
 
             // Loop over BNs
             for (i=0; i<M; i++)
@@ -2278,14 +870,14 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
         cnOffsetInGroup = (lut_numBnInBnGroups[17]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 18
-        p_bnProcBuf    = (__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
-        p_bnProcBufRes = (__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBuf    = (simde__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBufRes = (simde__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
 
         // Loop over CNs
         for (k=0; k<18; k++)
         {
             p_res = &p_bnProcBufRes[k*cnOffsetInGroup];
-            p_llrRes = (__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
+            p_llrRes = (simde__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
 
             // Loop over BNs
             for (i=0; i<M; i++)
@@ -2313,14 +905,14 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
         cnOffsetInGroup = (lut_numBnInBnGroups[18]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 19
-        p_bnProcBuf    = (__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
-        p_bnProcBufRes = (__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBuf    = (simde__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBufRes = (simde__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
 
         // Loop over CNs
         for (k=0; k<19; k++)
         {
             p_res = &p_bnProcBufRes[k*cnOffsetInGroup];
-            p_llrRes = (__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
+            p_llrRes = (simde__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
 
             // Loop over BNs
             for (i=0; i<M; i++)
@@ -2348,14 +940,14 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
         cnOffsetInGroup = (lut_numBnInBnGroups[19]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 20
-        p_bnProcBuf    = (__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
-        p_bnProcBufRes = (__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBuf    = (simde__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBufRes = (simde__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
 
         // Loop over CNs
         for (k=0; k<20; k++)
         {
             p_res = &p_bnProcBufRes[k*cnOffsetInGroup];
-            p_llrRes = (__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
+            p_llrRes = (simde__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
 
             // Loop over BNs
             for (i=0; i<M; i++)
@@ -2383,14 +975,14 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
         cnOffsetInGroup = (lut_numBnInBnGroups[20]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 21
-        p_bnProcBuf    = (__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
-        p_bnProcBufRes = (__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBuf    = (simde__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBufRes = (simde__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
 
         // Loop over CNs
         for (k=0; k<21; k++)
         {
             p_res = &p_bnProcBufRes[k*cnOffsetInGroup];
-            p_llrRes = (__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
+            p_llrRes = (simde__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
 
             // Loop over BNs
             for (i=0; i<M; i++)
@@ -2418,14 +1010,14 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
         cnOffsetInGroup = (lut_numBnInBnGroups[21]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 22
-        p_bnProcBuf    = (__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
-        p_bnProcBufRes = (__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBuf    = (simde__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBufRes = (simde__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
 
         // Loop over CNs
         for (k=0; k<22; k++)
         {
             p_res = &p_bnProcBufRes[k*cnOffsetInGroup];
-            p_llrRes = (__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
+            p_llrRes = (simde__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
 
             // Loop over BNs
             for (i=0; i<M; i++)
@@ -2453,14 +1045,14 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
         cnOffsetInGroup = (lut_numBnInBnGroups[22]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 23
-        p_bnProcBuf    = (__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
-        p_bnProcBufRes = (__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBuf    = (simde__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBufRes = (simde__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
 
         // Loop over CNs
         for (k=0; k<23; k++)
         {
             p_res = &p_bnProcBufRes[k*cnOffsetInGroup];
-            p_llrRes = (__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
+            p_llrRes = (simde__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
 
             // Loop over BNs
             for (i=0; i<M; i++)
@@ -2488,14 +1080,14 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
         cnOffsetInGroup = (lut_numBnInBnGroups[23]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 24
-        p_bnProcBuf    = (__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
-        p_bnProcBufRes = (__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBuf    = (simde__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBufRes = (simde__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
 
         // Loop over CNs
         for (k=0; k<24; k++)
         {
             p_res = &p_bnProcBufRes[k*cnOffsetInGroup];
-            p_llrRes = (__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
+            p_llrRes = (simde__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
 
             // Loop over BNs
             for (i=0; i<M; i++)
@@ -2523,14 +1115,14 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
         cnOffsetInGroup = (lut_numBnInBnGroups[24]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 25
-        p_bnProcBuf    = (__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
-        p_bnProcBufRes = (__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBuf    = (simde__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBufRes = (simde__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
 
         // Loop over CNs
         for (k=0; k<25; k++)
         {
             p_res = &p_bnProcBufRes[k*cnOffsetInGroup];
-            p_llrRes = (__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
+            p_llrRes = (simde__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
 
             // Loop over BNs
             for (i=0; i<M; i++)
@@ -2558,14 +1150,14 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
         cnOffsetInGroup = (lut_numBnInBnGroups[25]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 26
-        p_bnProcBuf    = (__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
-        p_bnProcBufRes = (__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBuf    = (simde__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBufRes = (simde__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
 
         // Loop over CNs
         for (k=0; k<26; k++)
         {
             p_res = &p_bnProcBufRes[k*cnOffsetInGroup];
-            p_llrRes = (__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
+            p_llrRes = (simde__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
 
             // Loop over BNs
             for (i=0; i<M; i++)
@@ -2593,14 +1185,14 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
         cnOffsetInGroup = (lut_numBnInBnGroups[26]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 27
-        p_bnProcBuf    = (__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
-        p_bnProcBufRes = (__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBuf    = (simde__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBufRes = (simde__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
 
         // Loop over CNs
         for (k=0; k<27; k++)
         {
             p_res = &p_bnProcBufRes[k*cnOffsetInGroup];
-            p_llrRes = (__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
+            p_llrRes = (simde__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
 
             // Loop over BNs
             for (i=0; i<M; i++)
@@ -2628,14 +1220,14 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
         cnOffsetInGroup = (lut_numBnInBnGroups[27]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 28
-        p_bnProcBuf    = (__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
-        p_bnProcBufRes = (__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBuf    = (simde__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBufRes = (simde__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
 
         // Loop over CNs
         for (k=0; k<28; k++)
         {
             p_res = &p_bnProcBufRes[k*cnOffsetInGroup];
-            p_llrRes = (__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
+            p_llrRes = (simde__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
 
             // Loop over BNs
             for (i=0; i<M; i++)
@@ -2663,14 +1255,14 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
         cnOffsetInGroup = (lut_numBnInBnGroups[28]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 29
-        p_bnProcBuf    = (__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
-        p_bnProcBufRes = (__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBuf    = (simde__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBufRes = (simde__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
 
         // Loop over CNs
         for (k=0; k<29; k++)
         {
             p_res = &p_bnProcBufRes[k*cnOffsetInGroup];
-            p_llrRes = (__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
+            p_llrRes = (simde__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
 
             // Loop over BNs
             for (i=0; i<M; i++)
@@ -2698,14 +1290,14 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
         cnOffsetInGroup = (lut_numBnInBnGroups[29]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 30
-        p_bnProcBuf    = (__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
-        p_bnProcBufRes = (__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBuf    = (simde__m256i*) &bnProcBuf   [lut_startAddrBnGroups[idxBnGroup]];
+        p_bnProcBufRes = (simde__m256i*) &bnProcBufRes[lut_startAddrBnGroups[idxBnGroup]];
 
         // Loop over CNs
         for (k=0; k<30; k++)
         {
             p_res = &p_bnProcBufRes[k*cnOffsetInGroup];
-            p_llrRes = (__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
+            p_llrRes = (simde__m256i*) &llrRes [lut_startAddrBnGroupsLlr[idxBnGroup]];
 
             // Loop over BNs
             for (i=0; i<M; i++)
@@ -2728,13 +1320,16 @@ static inline void nrLDPC_bnProc(t_nrLDPC_lut* p_lut, int8_t* bnProcBuf, int8_t*
 */
 static inline void nrLDPC_llr2bit(int8_t* out, int8_t* llrOut, uint16_t numLLR)
 {
-  __m256i* p_llrOut = (__m256i*)llrOut;
-  __m256i* p_out = (__m256i*)out;
-  const int M = numLLR >> 5;
-  const int Mr = numLLR & 31;
+    simde__m256i* p_llrOut = (simde__m256i*) llrOut;
+    simde__m256i* p_out    = (simde__m256i*) out;
+    int8_t* p_llrOut8;
+    int8_t* p_out8;
+    uint32_t i;
+    uint32_t M  = numLLR>>5;
+    uint32_t Mr = numLLR&31;
 
-  const __m256i* p_zeros = (__m256i*)zeros256_epi8;
-  const __m256i* p_ones = (__m256i*)ones256_epi8;
+    const simde__m256i* p_zeros = (simde__m256i*) zeros256_epi8;
+    const simde__m256i* p_ones  = (simde__m256i*) ones256_epi8;
 
   for (int i = 0; i < M; i++) {
     *p_out++ = simde_mm256_and_si256(*p_ones, simde_mm256_cmpgt_epi8(*p_zeros, *p_llrOut));
@@ -2764,7 +1359,7 @@ static inline void nrLDPC_llr2bitPacked(int8_t* out, int8_t* llrOut, uint16_t nu
     const uint8_t constShuffle_256_epi8[32] __attribute__ ((aligned(32))) = {7,6,5,4,3,2,1,0,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0,15,14,13,12,11,10,9,8};
     const __m256i* p_shuffle = (__m256i*)constShuffle_256_epi8;
 
-    __m256i*  p_llrOut = (__m256i*)  llrOut;
+    simde__m256i*  p_llrOut = (simde__m256i*)  llrOut;
     uint32_t* p_bits   = (uint32_t*) out;
     const uint32_t M = numLLR >> 5;
     const uint32_t Mr = numLLR & 31;
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_cnProc.h b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_cnProc.h
index 984cb5e11dfd43968df24f6afa5980f243352ddd..714ca2bd341c603d5d1bdd32fa619f06b20f90d1 100644
--- a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_cnProc.h
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_cnProc.h
@@ -49,8 +49,8 @@ static inline void nrLDPC_cnProc_BG2(t_nrLDPC_lut* p_lut, int8_t* cnProcBuf, int
     const uint8_t*  lut_numCnInCnGroups   = p_lut->numCnInCnGroups;
     const uint32_t* lut_startAddrCnGroups = p_lut->startAddrCnGroups;
 
-    __m256i* p_cnProcBuf;
-    __m256i* p_cnProcBufRes;
+    simde__m256i* p_cnProcBuf;
+    simde__m256i* p_cnProcBufRes;
 
     // Number of CNs in Groups
     uint32_t M;
@@ -60,11 +60,11 @@ static inline void nrLDPC_cnProc_BG2(t_nrLDPC_lut* p_lut, int8_t* cnProcBuf, int
     // Offset to each bit within a group in terms of 32 Byte
     uint32_t bitOffsetInGroup;
 
-    __m256i ymm0, min, sgn;
-    __m256i* p_cnProcBufResBit;
+    simde__m256i ymm0, min, sgn;
+    simde__m256i* p_cnProcBufResBit;
 
-    const __m256i* p_ones   = (__m256i*) ones256_epi8;
-    const __m256i* p_maxLLR = (__m256i*) maxLLR256_epi8;
+    const simde__m256i* p_ones   = (simde__m256i*) ones256_epi8;
+    const simde__m256i* p_maxLLR = (simde__m256i*) maxLLR256_epi8;
 
     // LUT with offsets for bits that need to be processed
     // 1. bit proc requires LLRs of 2. and 3. bit, 2.bits of 1. and 3. etc.
@@ -83,8 +83,8 @@ static inline void nrLDPC_cnProc_BG2(t_nrLDPC_lut* p_lut, int8_t* cnProcBuf, int
         bitOffsetInGroup = (lut_numCnInCnGroups_BG2_R15[0]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 3
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[0]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[0]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[0]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[0]];
 
         // Loop over every BN
         for (j=0; j<3; j++)
@@ -92,8 +92,8 @@ static inline void nrLDPC_cnProc_BG2(t_nrLDPC_lut* p_lut, int8_t* cnProcBuf, int
             // Set of results pointer to correct BN address
             p_cnProcBufResBit = p_cnProcBufRes + (j*bitOffsetInGroup);
 
-            __m256i *pj0 = &p_cnProcBuf[lut_idxCnProcG3[j][0]];
-            __m256i *pj1 = &p_cnProcBuf[lut_idxCnProcG3[j][1]];
+            simde__m256i *pj0 = &p_cnProcBuf[lut_idxCnProcG3[j][0]];
+            simde__m256i *pj1 = &p_cnProcBuf[lut_idxCnProcG3[j][1]];
 
             // Loop over CNs
             for (i=0; i<M; i++)
@@ -134,8 +134,8 @@ static inline void nrLDPC_cnProc_BG2(t_nrLDPC_lut* p_lut, int8_t* cnProcBuf, int
         bitOffsetInGroup = (lut_numCnInCnGroups_BG2_R15[1]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 4
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
 
         // Loop over every BN
         for (j=0; j<4; j++)
@@ -183,8 +183,8 @@ static inline void nrLDPC_cnProc_BG2(t_nrLDPC_lut* p_lut, int8_t* cnProcBuf, int
         bitOffsetInGroup = (lut_numCnInCnGroups_BG2_R15[2]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 5
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[2]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[2]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[2]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[2]];
 
         // Loop over every BN
         for (j=0; j<5; j++)
@@ -233,8 +233,8 @@ static inline void nrLDPC_cnProc_BG2(t_nrLDPC_lut* p_lut, int8_t* cnProcBuf, int
         bitOffsetInGroup = (lut_numCnInCnGroups_BG2_R15[3]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 6
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[3]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[3]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[3]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[3]];
 
         // Loop over every BN
         for (j=0; j<6; j++)
@@ -284,8 +284,8 @@ static inline void nrLDPC_cnProc_BG2(t_nrLDPC_lut* p_lut, int8_t* cnProcBuf, int
         bitOffsetInGroup = (lut_numCnInCnGroups_BG2_R15[4]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 8
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[4]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[4]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[4]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[4]];
 
         // Loop over every BN
         for (j=0; j<8; j++)
@@ -336,8 +336,8 @@ static inline void nrLDPC_cnProc_BG2(t_nrLDPC_lut* p_lut, int8_t* cnProcBuf, int
         bitOffsetInGroup = (lut_numCnInCnGroups_BG2_R15[5]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 10
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[5]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[5]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[5]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[5]];
 
         // Loop over every BN
         for (j=0; j<10; j++)
@@ -390,8 +390,8 @@ static inline void nrLDPC_cnProc_BG1(t_nrLDPC_lut* p_lut, int8_t* cnProcBuf, int
     const uint8_t*  lut_numCnInCnGroups   = p_lut->numCnInCnGroups;
     const uint32_t* lut_startAddrCnGroups = p_lut->startAddrCnGroups;
 
-    __m256i* p_cnProcBuf;
-    __m256i* p_cnProcBufRes;
+    simde__m256i* p_cnProcBuf;
+    simde__m256i* p_cnProcBufRes;
 
     // Number of CNs in Groups
     uint32_t M;
@@ -401,11 +401,11 @@ static inline void nrLDPC_cnProc_BG1(t_nrLDPC_lut* p_lut, int8_t* cnProcBuf, int
     // Offset to each bit within a group in terms of 32 Byte
     uint32_t bitOffsetInGroup;
 
-    __m256i ymm0, min, sgn;
-    __m256i* p_cnProcBufResBit;
+    simde__m256i ymm0, min, sgn;
+    simde__m256i* p_cnProcBufResBit;
 
-    const __m256i* p_ones   = (__m256i*) ones256_epi8;
-    const __m256i* p_maxLLR = (__m256i*) maxLLR256_epi8;
+    const simde__m256i* p_ones   = (simde__m256i*) ones256_epi8;
+    const simde__m256i* p_maxLLR = (simde__m256i*) maxLLR256_epi8;
 
     // LUT with offsets for bits that need to be processed
     // 1. bit proc requires LLRs of 2. and 3. bit, 2.bits of 1. and 3. etc.
@@ -425,8 +425,8 @@ static inline void nrLDPC_cnProc_BG1(t_nrLDPC_lut* p_lut, int8_t* cnProcBuf, int
         bitOffsetInGroup = (lut_numCnInCnGroups_BG1_R13[0]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 3
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[0]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[0]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[0]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[0]];
 
         // Loop over every BN
         for (j=0; j<3; j++)
@@ -472,8 +472,8 @@ static inline void nrLDPC_cnProc_BG1(t_nrLDPC_lut* p_lut, int8_t* cnProcBuf, int
         bitOffsetInGroup = (lut_numCnInCnGroups_BG1_R13[1]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 4
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
 
         // Loop over every BN
         for (j=0; j<4; j++)
@@ -522,8 +522,8 @@ static inline void nrLDPC_cnProc_BG1(t_nrLDPC_lut* p_lut, int8_t* cnProcBuf, int
         bitOffsetInGroup = (lut_numCnInCnGroups_BG1_R13[2]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 5
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[2]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[2]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[2]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[2]];
 
         // Loop over every BN
         for (j=0; j<5; j++)
@@ -573,8 +573,8 @@ static inline void nrLDPC_cnProc_BG1(t_nrLDPC_lut* p_lut, int8_t* cnProcBuf, int
         bitOffsetInGroup = (lut_numCnInCnGroups_BG1_R13[3]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 6
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[3]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[3]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[3]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[3]];
 
         // Loop over every BN
         for (j=0; j<6; j++)
@@ -625,8 +625,8 @@ static inline void nrLDPC_cnProc_BG1(t_nrLDPC_lut* p_lut, int8_t* cnProcBuf, int
         bitOffsetInGroup = (lut_numCnInCnGroups_BG1_R13[4]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 7
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[4]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[4]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[4]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[4]];
 
         // Loop over every BN
         for (j=0; j<7; j++)
@@ -677,8 +677,8 @@ static inline void nrLDPC_cnProc_BG1(t_nrLDPC_lut* p_lut, int8_t* cnProcBuf, int
         bitOffsetInGroup = (lut_numCnInCnGroups_BG1_R13[5]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 8
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[5]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[5]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[5]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[5]];
 
         // Loop over every BN
         for (j=0; j<8; j++)
@@ -730,8 +730,8 @@ static inline void nrLDPC_cnProc_BG1(t_nrLDPC_lut* p_lut, int8_t* cnProcBuf, int
         bitOffsetInGroup = (lut_numCnInCnGroups_BG1_R13[6]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 9
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[6]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[6]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[6]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[6]];
 
         // Loop over every BN
         for (j=0; j<9; j++)
@@ -783,8 +783,8 @@ static inline void nrLDPC_cnProc_BG1(t_nrLDPC_lut* p_lut, int8_t* cnProcBuf, int
         bitOffsetInGroup = (lut_numCnInCnGroups_BG1_R13[7]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 10
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[7]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[7]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[7]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[7]];
 
         // Loop over every BN
         for (j=0; j<10; j++)
@@ -841,8 +841,8 @@ static inline void nrLDPC_cnProc_BG1(t_nrLDPC_lut* p_lut, int8_t* cnProcBuf, int
         bitOffsetInGroup = (lut_numCnInCnGroups_BG1_R13[8]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 19
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[8]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[8]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[8]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[8]];
 
         // Loop over every BN
         for (j=0; j<19; j++)
@@ -889,8 +889,8 @@ static inline uint32_t nrLDPC_cnProcPc_BG1(t_nrLDPC_lut* p_lut, int8_t* cnProcBu
     const uint8_t*  lut_numCnInCnGroups   = p_lut->numCnInCnGroups;
     const uint32_t* lut_startAddrCnGroups = p_lut->startAddrCnGroups;
 
-    __m256i* p_cnProcBuf;
-    __m256i* p_cnProcBufRes;
+    simde__m256i* p_cnProcBuf;
+    simde__m256i* p_cnProcBufRes;
 
     // Number of CNs in Groups
     uint32_t M;
@@ -901,7 +901,7 @@ static inline uint32_t nrLDPC_cnProcPc_BG1(t_nrLDPC_lut* p_lut, int8_t* cnProcBu
     uint32_t Mrem;
     uint32_t M32;
 
-    __m256i ymm0, ymm1;
+    simde__m256i ymm0, ymm1;
 
     // =====================================================================
     // Process group with 3 BNs
@@ -920,8 +920,8 @@ static inline uint32_t nrLDPC_cnProcPc_BG1(t_nrLDPC_lut* p_lut, int8_t* cnProcBu
         M32 = (M + 31)>>5;
 
         // Set pointers to start of group 3
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[0]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[0]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[0]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[0]];
 
         // Loop over CNs
         for (i=0; i<(M32-1); i++)
@@ -988,8 +988,8 @@ static inline uint32_t nrLDPC_cnProcPc_BG1(t_nrLDPC_lut* p_lut, int8_t* cnProcBu
         M32 = (M + 31)>>5;
 
         // Set pointers to start of group 4
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
 
         // Loop over CNs
         for (i=0; i<(M32-1); i++)
@@ -1056,8 +1056,8 @@ static inline uint32_t nrLDPC_cnProcPc_BG1(t_nrLDPC_lut* p_lut, int8_t* cnProcBu
         M32 = (M + 31)>>5;
 
         // Set pointers to start of group 5
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[2]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[2]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[2]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[2]];
 
         // Loop over CNs
         for (i=0; i<(M32-1); i++)
@@ -1125,8 +1125,8 @@ static inline uint32_t nrLDPC_cnProcPc_BG1(t_nrLDPC_lut* p_lut, int8_t* cnProcBu
         M32 = (M + 31)>>5;
 
         // Set pointers to start of group 6
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[3]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[3]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[3]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[3]];
 
         // Loop over CNs
         for (i=0; i<(M32-1); i++)
@@ -1193,8 +1193,8 @@ static inline uint32_t nrLDPC_cnProcPc_BG1(t_nrLDPC_lut* p_lut, int8_t* cnProcBu
         M32 = (M + 31)>>5;
 
         // Set pointers to start of group 7
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[4]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[4]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[4]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[4]];
 
         // Loop over CNs
         for (i=0; i<(M32-1); i++)
@@ -1261,8 +1261,8 @@ static inline uint32_t nrLDPC_cnProcPc_BG1(t_nrLDPC_lut* p_lut, int8_t* cnProcBu
         M32 = (M + 31)>>5;
 
         // Set pointers to start of group 8
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[5]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[5]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[5]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[5]];
 
         // Loop over CNs
         for (i=0; i<(M32-1); i++)
@@ -1329,8 +1329,8 @@ static inline uint32_t nrLDPC_cnProcPc_BG1(t_nrLDPC_lut* p_lut, int8_t* cnProcBu
         M32 = (M + 31)>>5;
 
         // Set pointers to start of group 9
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[6]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[6]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[6]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[6]];
 
         // Loop over CNs
         for (i=0; i<(M32-1); i++)
@@ -1397,8 +1397,8 @@ static inline uint32_t nrLDPC_cnProcPc_BG1(t_nrLDPC_lut* p_lut, int8_t* cnProcBu
         M32 = (M + 31)>>5;
 
         // Set pointers to start of group 10
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[7]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[7]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[7]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[7]];
 
         // Loop over CNs
         for (i=0; i<(M32-1); i++)
@@ -1465,8 +1465,8 @@ static inline uint32_t nrLDPC_cnProcPc_BG1(t_nrLDPC_lut* p_lut, int8_t* cnProcBu
         M32 = (M + 31)>>5;
 
         // Set pointers to start of group 19
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[8]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[8]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[8]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[8]];
 
         // Loop over CNs
         for (i=0; i<(M32-1); i++)
@@ -1530,8 +1530,8 @@ static inline uint32_t nrLDPC_cnProcPc_BG2(t_nrLDPC_lut* p_lut, int8_t* cnProcBu
     const uint8_t*  lut_numCnInCnGroups   = p_lut->numCnInCnGroups;
     const uint32_t* lut_startAddrCnGroups = p_lut->startAddrCnGroups;
 
-    __m256i* p_cnProcBuf;
-    __m256i* p_cnProcBufRes;
+    simde__m256i* p_cnProcBuf;
+    simde__m256i* p_cnProcBufRes;
 
     // Number of CNs in Groups
     uint32_t M;
@@ -1542,7 +1542,7 @@ static inline uint32_t nrLDPC_cnProcPc_BG2(t_nrLDPC_lut* p_lut, int8_t* cnProcBu
     uint32_t Mrem;
     uint32_t M32;
 
-    __m256i ymm0, ymm1;
+    simde__m256i ymm0, ymm1;
 
     // =====================================================================
     // Process group with 3 BNs
@@ -1561,8 +1561,8 @@ static inline uint32_t nrLDPC_cnProcPc_BG2(t_nrLDPC_lut* p_lut, int8_t* cnProcBu
         M32 = (M + 31)>>5;
 
         // Set pointers to start of group 3
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[0]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[0]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[0]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[0]];
 
         // Loop over CNs
         for (i=0; i<(M32-1); i++)
@@ -1629,8 +1629,8 @@ static inline uint32_t nrLDPC_cnProcPc_BG2(t_nrLDPC_lut* p_lut, int8_t* cnProcBu
         M32 = (M + 31)>>5;
 
         // Set pointers to start of group 4
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
 
         // Loop over CNs
         for (i=0; i<(M32-1); i++)
@@ -1697,8 +1697,8 @@ static inline uint32_t nrLDPC_cnProcPc_BG2(t_nrLDPC_lut* p_lut, int8_t* cnProcBu
         M32 = (M + 31)>>5;
 
         // Set pointers to start of group 5
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[2]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[2]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[2]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[2]];
 
         // Loop over CNs
         for (i=0; i<(M32-1); i++)
@@ -1765,8 +1765,8 @@ static inline uint32_t nrLDPC_cnProcPc_BG2(t_nrLDPC_lut* p_lut, int8_t* cnProcBu
         M32 = (M + 31)>>5;
 
         // Set pointers to start of group 6
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[3]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[3]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[3]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[3]];
 
         // Loop over CNs
         for (i=0; i<(M32-1); i++)
@@ -1833,8 +1833,8 @@ static inline uint32_t nrLDPC_cnProcPc_BG2(t_nrLDPC_lut* p_lut, int8_t* cnProcBu
         M32 = (M + 31)>>5;
 
         // Set pointers to start of group 8
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[4]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[4]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[4]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[4]];
 
         // Loop over CNs
         for (i=0; i<(M32-1); i++)
@@ -1901,8 +1901,8 @@ static inline uint32_t nrLDPC_cnProcPc_BG2(t_nrLDPC_lut* p_lut, int8_t* cnProcBu
         M32 = (M + 31)>>5;
 
         // Set pointers to start of group 10
-        p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[5]];
-        p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[5]];
+        p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[5]];
+        p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[5]];
 
         // Loop over CNs
         for (i=0; i<(M32-1); i++)
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_cnProc_avx512.h b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_cnProc_avx512.h
index 044f1048e71fc7df3a36f3771465c232f4183d62..a69d14e79f1c6cd5b6ba4cb3cd01e2d98d419780 100644
--- a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_cnProc_avx512.h
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_cnProc_avx512.h
@@ -32,14 +32,14 @@
 #ifndef __NR_LDPC_CNPROC__H__
 #define __NR_LDPC_CNPROC__H__
 
-#define conditional_negate(a,b,z) _mm512_mask_sub_epi8(a,_mm512_movepi8_mask(b),z,a)
+#define conditional_negate(a, b, z) simde_mm512_mask_sub_epi8(a, simde_mm512_movepi8_mask(b), z, a)
 static inline void nrLDPC_cnProc_BG2_AVX512(t_nrLDPC_lut* p_lut, int8_t* cnProcBuf, int8_t* cnProcBufRes, uint16_t Z)
 {
     const uint8_t*  lut_numCnInCnGroups   = p_lut->numCnInCnGroups;
     const uint32_t* lut_startAddrCnGroups = p_lut->startAddrCnGroups;
 
-    __m512i* p_cnProcBuf;
-    __m512i* p_cnProcBufRes;
+    simde__m512i* p_cnProcBuf;
+    simde__m512i* p_cnProcBufRes;
 
     // Number of CNs in Groups
     uint32_t M;
@@ -49,13 +49,13 @@ static inline void nrLDPC_cnProc_BG2_AVX512(t_nrLDPC_lut* p_lut, int8_t* cnProcB
     // Offset to each bit within a group in terms of 32 Byte
     uint32_t bitOffsetInGroup;
 
-    __m512i zmm0, min, sgn, zeros;
-    zeros  = _mm512_setzero_si512();
-//     maxLLR = _mm512_set1_epi8((char)127);
-    __m512i* p_cnProcBufResBit;
+    simde__m512i zmm0, min, sgn, zeros;
+    zeros = simde_mm512_setzero_si512();
+    //     maxLLR =simde_mm512_set1_epi8((char)127);
+    simde__m512i* p_cnProcBufResBit;
 
-    const __m512i* p_ones   = (__m512i*) ones512_epi8;
-    const __m512i* p_maxLLR = (__m512i*) maxLLR512_epi8;
+    const simde__m512i* p_ones = (simde__m512i*)ones512_epi8;
+    const simde__m512i* p_maxLLR = (simde__m512i*)maxLLR512_epi8;
 
     // LUT with offsets for bits that need to be processed
     // 1. bit proc requires LLRs of 2. and 3. bit, 2.bits of 1. and 3. etc.
@@ -74,8 +74,8 @@ static inline void nrLDPC_cnProc_BG2_AVX512(t_nrLDPC_lut* p_lut, int8_t* cnProcB
         bitOffsetInGroup = (lut_numCnInCnGroups_BG2_R15[0]*NR_LDPC_ZMAX)>>6;
 
         // Set pointers to start of group 3
-        p_cnProcBuf    = (__m512i*) &cnProcBuf   [lut_startAddrCnGroups[0]];
-        p_cnProcBufRes = (__m512i*) &cnProcBufRes[lut_startAddrCnGroups[0]];
+        p_cnProcBuf = (simde__m512i*)&cnProcBuf[lut_startAddrCnGroups[0]];
+        p_cnProcBufRes = (simde__m512i*)&cnProcBufRes[lut_startAddrCnGroups[0]];
 
         // Loop over every BN
         for (j=0; j<3; j++)
@@ -83,8 +83,8 @@ static inline void nrLDPC_cnProc_BG2_AVX512(t_nrLDPC_lut* p_lut, int8_t* cnProcB
             // Set of results pointer to correct BN address
             p_cnProcBufResBit = p_cnProcBufRes + (j*bitOffsetInGroup);
 
-            __m512i *pj0 = &p_cnProcBuf[(lut_idxCnProcG3[j][0]/2)];
-            __m512i *pj1 = &p_cnProcBuf[(lut_idxCnProcG3[j][1]/2)];
+            simde__m512i* pj0 = &p_cnProcBuf[(lut_idxCnProcG3[j][0] / 2)];
+            simde__m512i* pj1 = &p_cnProcBuf[(lut_idxCnProcG3[j][1] / 2)];
 
             // Loop over CNs
             for (i=0; i<M; i++)
@@ -92,20 +92,20 @@ static inline void nrLDPC_cnProc_BG2_AVX512(t_nrLDPC_lut* p_lut, int8_t* cnProcB
                 // Abs and sign of 32 CNs (first BN)
               //                zmm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
                 zmm0 = pj0[i];
-                sgn  = _mm512_xor_si512(*p_ones, zmm0);
-                min  = _mm512_abs_epi8(zmm0);
+                sgn = simde_mm512_xor_si512(*p_ones, zmm0);
+                min = simde_mm512_abs_epi8(zmm0);
 
                 // 32 CNs of second BN
                 //  zmm0 = p_cnProcBuf[(lut_idxCnProcG3[j][1]/2) + i];
                 zmm0 = pj1[i];
-                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-                sgn  = _mm512_xor_si512(sgn, zmm0);
+                min = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+                sgn = simde_mm512_xor_si512(sgn, zmm0);
 
                 // Store result
-                min = _mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
+                min = simde_mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
                 *p_cnProcBufResBit = conditional_negate(min, sgn,zeros);
                 p_cnProcBufResBit++;
-                //p_cnProcBufResBit[i]=_mm512_sign_epi8(min, sgn);
+                // p_cnProcBufResBit[i]=simde_mm512_sign_epi8(min, sgn);
             }
         }
     }
@@ -125,8 +125,8 @@ static inline void nrLDPC_cnProc_BG2_AVX512(t_nrLDPC_lut* p_lut, int8_t* cnProcB
         bitOffsetInGroup = (lut_numCnInCnGroups_BG2_R15[1]*NR_LDPC_ZMAX)>>6;
 
         // Set pointers to start of group 4
-        p_cnProcBuf    = (__m512i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
-        p_cnProcBufRes = (__m512i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
+        p_cnProcBuf = (simde__m512i*)&cnProcBuf[lut_startAddrCnGroups[1]];
+        p_cnProcBufRes = (simde__m512i*)&cnProcBufRes[lut_startAddrCnGroups[1]];
 
         // Loop over every BN
         for (j=0; j<4; j++)
@@ -139,19 +139,19 @@ static inline void nrLDPC_cnProc_BG2_AVX512(t_nrLDPC_lut* p_lut, int8_t* cnProcB
             {
                 // Abs and sign of 32 CNs (first BN)
                 zmm0 = p_cnProcBuf[(lut_idxCnProcG4[j][0]/2) + i];
-                sgn  = _mm512_xor_si512(*p_ones, zmm0);
-                min  = _mm512_abs_epi8(zmm0);
+                sgn = simde_mm512_xor_si512(*p_ones, zmm0);
+                min = simde_mm512_abs_epi8(zmm0);
 
                 // Loop over BNs
                 for (k=1; k<3; k++)
                 {
                     zmm0 = p_cnProcBuf[(lut_idxCnProcG4[j][k]/2) + i];
-                    min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-                   sgn  = _mm512_xor_si512(sgn, zmm0);
+                    min = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+                    sgn = simde_mm512_xor_si512(sgn, zmm0);
                 }
 
                 // Store result
-                min = _mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
+                min = simde_mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
                 *p_cnProcBufResBit = conditional_negate(min, sgn,zeros);
                 p_cnProcBufResBit++;
             }
@@ -174,8 +174,8 @@ static inline void nrLDPC_cnProc_BG2_AVX512(t_nrLDPC_lut* p_lut, int8_t* cnProcB
         bitOffsetInGroup = (lut_numCnInCnGroups_BG2_R15[2]*NR_LDPC_ZMAX)>>6;
 
         // Set pointers to start of group 5
-        p_cnProcBuf    = (__m512i*) &cnProcBuf   [lut_startAddrCnGroups[2]];
-        p_cnProcBufRes = (__m512i*) &cnProcBufRes[lut_startAddrCnGroups[2]];
+        p_cnProcBuf = (simde__m512i*)&cnProcBuf[lut_startAddrCnGroups[2]];
+        p_cnProcBufRes = (simde__m512i*)&cnProcBufRes[lut_startAddrCnGroups[2]];
 
         // Loop over every BN
         for (j=0; j<5; j++)
@@ -188,19 +188,19 @@ static inline void nrLDPC_cnProc_BG2_AVX512(t_nrLDPC_lut* p_lut, int8_t* cnProcB
             {
                 // Abs and sign of 32 CNs (first BN)
                 zmm0 = p_cnProcBuf[(lut_idxCnProcG5[j][0]/2) + i];
-                sgn  = _mm512_xor_si512(*p_ones, zmm0);
-                min  = _mm512_abs_epi8(zmm0);
+                sgn = simde_mm512_xor_si512(*p_ones, zmm0);
+                min = simde_mm512_abs_epi8(zmm0);
 
                 // Loop over BNs
                 for (k=1; k<4; k++)
                 {
                     zmm0 = p_cnProcBuf[(lut_idxCnProcG5[j][k]/2) + i];
-                    min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-                   sgn  = _mm512_xor_si512(sgn, zmm0);
+                    min = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+                    sgn = simde_mm512_xor_si512(sgn, zmm0);
                 }
 
                 // Store result
-                min = _mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
+                min = simde_mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
                 *p_cnProcBufResBit = conditional_negate(min, sgn,zeros);
                 p_cnProcBufResBit++;
             }
@@ -224,8 +224,8 @@ static inline void nrLDPC_cnProc_BG2_AVX512(t_nrLDPC_lut* p_lut, int8_t* cnProcB
         bitOffsetInGroup = (lut_numCnInCnGroups_BG2_R15[3]*NR_LDPC_ZMAX)>>6;
 
         // Set pointers to start of group 6
-        p_cnProcBuf    = (__m512i*) &cnProcBuf   [lut_startAddrCnGroups[3]];
-        p_cnProcBufRes = (__m512i*) &cnProcBufRes[lut_startAddrCnGroups[3]];
+        p_cnProcBuf = (simde__m512i*)&cnProcBuf[lut_startAddrCnGroups[3]];
+        p_cnProcBufRes = (simde__m512i*)&cnProcBufRes[lut_startAddrCnGroups[3]];
 
         // Loop over every BN
         for (j=0; j<6; j++)
@@ -238,19 +238,19 @@ static inline void nrLDPC_cnProc_BG2_AVX512(t_nrLDPC_lut* p_lut, int8_t* cnProcB
             {
                 // Abs and sign of 32 CNs (first BN)
                 zmm0 = p_cnProcBuf[(lut_idxCnProcG6[j][0]/2) + i];
-                sgn  = _mm512_xor_si512(*p_ones, zmm0);
-                min  = _mm512_abs_epi8(zmm0);
+                sgn = simde_mm512_xor_si512(*p_ones, zmm0);
+                min = simde_mm512_abs_epi8(zmm0);
 
                 // Loop over BNs
                 for (k=1; k<5; k++)
                 {
                     zmm0 = p_cnProcBuf[(lut_idxCnProcG6[j][k]/2) + i];
-                    min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-                   sgn  = _mm512_xor_si512(sgn, zmm0);
+                    min = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+                    sgn = simde_mm512_xor_si512(sgn, zmm0);
                 }
 
                 // Store result
-                min = _mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
+                min = simde_mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
                 *p_cnProcBufResBit = conditional_negate(min, sgn,zeros);
                 p_cnProcBufResBit++;
             }
@@ -275,8 +275,8 @@ static inline void nrLDPC_cnProc_BG2_AVX512(t_nrLDPC_lut* p_lut, int8_t* cnProcB
         bitOffsetInGroup = (lut_numCnInCnGroups_BG2_R15[4]*NR_LDPC_ZMAX)>>6;
 
         // Set pointers to start of group 8
-        p_cnProcBuf    = (__m512i*) &cnProcBuf   [lut_startAddrCnGroups[4]];
-        p_cnProcBufRes = (__m512i*) &cnProcBufRes[lut_startAddrCnGroups[4]];
+        p_cnProcBuf = (simde__m512i*)&cnProcBuf[lut_startAddrCnGroups[4]];
+        p_cnProcBufRes = (simde__m512i*)&cnProcBufRes[lut_startAddrCnGroups[4]];
 
         // Loop over every BN
         for (j=0; j<8; j++)
@@ -289,19 +289,19 @@ static inline void nrLDPC_cnProc_BG2_AVX512(t_nrLDPC_lut* p_lut, int8_t* cnProcB
             {
                 // Abs and sign of 32 CNs (first BN)
                 zmm0 = p_cnProcBuf[(lut_idxCnProcG8[j][0]/2) + i];
-                sgn  = _mm512_xor_si512(*p_ones, zmm0);
-                min  = _mm512_abs_epi8(zmm0);
+                sgn = simde_mm512_xor_si512(*p_ones, zmm0);
+                min = simde_mm512_abs_epi8(zmm0);
 
                 // Loop over BNs
                 for (k=1; k<7; k++)
                 {
                     zmm0 = p_cnProcBuf[(lut_idxCnProcG8[j][k]/2) + i];
-                    min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-                   sgn  = _mm512_xor_si512(sgn, zmm0);
+                    min = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+                    sgn = simde_mm512_xor_si512(sgn, zmm0);
                 }
 
                 // Store result
-                min = _mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
+                min = simde_mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
                 *p_cnProcBufResBit = conditional_negate(min, sgn,zeros);
                 p_cnProcBufResBit++;
             }
@@ -327,8 +327,8 @@ static inline void nrLDPC_cnProc_BG2_AVX512(t_nrLDPC_lut* p_lut, int8_t* cnProcB
         bitOffsetInGroup = (lut_numCnInCnGroups_BG2_R15[5]*NR_LDPC_ZMAX)>>6;
 
         // Set pointers to start of group 10
-        p_cnProcBuf    = (__m512i*) &cnProcBuf   [lut_startAddrCnGroups[5]];
-        p_cnProcBufRes = (__m512i*) &cnProcBufRes[lut_startAddrCnGroups[5]];
+        p_cnProcBuf = (simde__m512i*)&cnProcBuf[lut_startAddrCnGroups[5]];
+        p_cnProcBufRes = (simde__m512i*)&cnProcBufRes[lut_startAddrCnGroups[5]];
 
         // Loop over every BN
         for (j=0; j<10; j++)
@@ -341,19 +341,19 @@ static inline void nrLDPC_cnProc_BG2_AVX512(t_nrLDPC_lut* p_lut, int8_t* cnProcB
             {
                 // Abs and sign of 32 CNs (first BN)
                 zmm0 = p_cnProcBuf[(lut_idxCnProcG10[j][0]/2) + i];
-                sgn  = _mm512_xor_si512(*p_ones, zmm0);
-                min  = _mm512_abs_epi8(zmm0);
+                sgn = simde_mm512_xor_si512(*p_ones, zmm0);
+                min = simde_mm512_abs_epi8(zmm0);
 
                 // Loop over BNs
                 for (k=1; k<9; k++)
                 {
                     zmm0 = p_cnProcBuf[(lut_idxCnProcG10[j][k]/2) + i];
-                    min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-                   sgn  = _mm512_xor_si512(sgn, zmm0);
+                    min = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+                    sgn = simde_mm512_xor_si512(sgn, zmm0);
                 }
 
                 // Store result
-                min = _mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
+                min = simde_mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
                 *p_cnProcBufResBit = conditional_negate(min, sgn,zeros);
                 p_cnProcBufResBit++;
             }
@@ -370,8 +370,8 @@ static inline void nrLDPC_cnProc_BG1_AVX512(t_nrLDPC_lut* p_lut, t_nrLDPC_procBu
     int8_t* cnProcBuf    = p_procBuf->cnProcBuf;
     int8_t* cnProcBufRes = p_procBuf->cnProcBufRes;
 
-    __m512i* p_cnProcBuf;
-    __m512i* p_cnProcBufRes;
+    simde__m512i* p_cnProcBuf;
+    simde__m512i* p_cnProcBufRes;
 
     // Number of CNs in Groups
     uint32_t M;
@@ -381,18 +381,14 @@ static inline void nrLDPC_cnProc_BG1_AVX512(t_nrLDPC_lut* p_lut, t_nrLDPC_procBu
     // Offset to each bit within a group in terms of 32 Byte
     uint32_t bitOffsetInGroup;
 
-    __m512i zmm0, min, sgn, zeros;
-
-     zeros  = _mm512_setzero_si512();
-    // maxLLR = _mm512_set1_epi8((char)127);
-    __m512i* p_cnProcBufResBit;
-
-
-    const __m512i* p_ones   = (__m512i*) ones512_epi8;
-    const __m512i* p_maxLLR = (__m512i*) maxLLR512_epi8;
-
+    simde__m512i zmm0, min, sgn, zeros;
 
+    zeros = simde_mm512_setzero_si512();
+    // maxLLR =simde_mm512_set1_epi8((char)127);
+    simde__m512i* p_cnProcBufResBit;
 
+    const simde__m512i* p_ones = (simde__m512i*)ones512_epi8;
+    const simde__m512i* p_maxLLR = (simde__m512i*)maxLLR512_epi8;
 
     // LUT with offsets for bits that need to be processed
     // 1. bit proc requires LLRs of 2. and 3. bit, 2.bits of 1. and 3. etc.
@@ -412,8 +408,8 @@ static inline void nrLDPC_cnProc_BG1_AVX512(t_nrLDPC_lut* p_lut, t_nrLDPC_procBu
         bitOffsetInGroup = (lut_numCnInCnGroups_BG1_R13[0]*NR_LDPC_ZMAX)>>6;
 
         // Set pointers to start of group 3
-        p_cnProcBuf    = (__m512i*) &cnProcBuf   [lut_startAddrCnGroups[0]];
-        p_cnProcBufRes = (__m512i*) &cnProcBufRes[lut_startAddrCnGroups[0]];
+        p_cnProcBuf = (simde__m512i*)&cnProcBuf[lut_startAddrCnGroups[0]];
+        p_cnProcBufRes = (simde__m512i*)&cnProcBufRes[lut_startAddrCnGroups[0]];
 
         // Loop over every BN
         for (j=0; j<3; j++)
@@ -426,17 +422,16 @@ static inline void nrLDPC_cnProc_BG1_AVX512(t_nrLDPC_lut* p_lut, t_nrLDPC_procBu
             {
                 // Abs and sign of 32 CNs (first BN)
                 zmm0 = p_cnProcBuf[(lut_idxCnProcG3[j][0]/2) + i];
-                sgn  = _mm512_xor_si512(*p_ones, zmm0);
-                min  = _mm512_abs_epi8(zmm0);
+                sgn = simde_mm512_xor_si512(*p_ones, zmm0);
+                min = simde_mm512_abs_epi8(zmm0);
 
                 // 32 CNs of second BN
                 zmm0 = p_cnProcBuf[(lut_idxCnProcG3[j][1]/2) + i];
-                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-               sgn  = _mm512_xor_si512(sgn, zmm0);
-
+                min = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+                sgn = simde_mm512_xor_si512(sgn, zmm0);
 
                 // Store result
-                min = _mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
+                min = simde_mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
                 *p_cnProcBufResBit = conditional_negate(min, sgn,zeros);
                 p_cnProcBufResBit++;
             }
@@ -459,8 +454,8 @@ static inline void nrLDPC_cnProc_BG1_AVX512(t_nrLDPC_lut* p_lut, t_nrLDPC_procBu
         bitOffsetInGroup = (lut_numCnInCnGroups_BG1_R13[1]*NR_LDPC_ZMAX)>>6;
 
         // Set pointers to start of group 4
-        p_cnProcBuf    = (__m512i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
-        p_cnProcBufRes = (__m512i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
+        p_cnProcBuf = (simde__m512i*)&cnProcBuf[lut_startAddrCnGroups[1]];
+        p_cnProcBufRes = (simde__m512i*)&cnProcBufRes[lut_startAddrCnGroups[1]];
 
         // Loop over every BN
         for (j=0; j<4; j++)
@@ -473,19 +468,19 @@ static inline void nrLDPC_cnProc_BG1_AVX512(t_nrLDPC_lut* p_lut, t_nrLDPC_procBu
             {
                 // Abs and sign of 32 CNs (first BN)
                 zmm0 = p_cnProcBuf[(lut_idxCnProcG4[j][0]/2) + i];
-                sgn  = _mm512_xor_si512(*p_ones, zmm0);
-                min  = _mm512_abs_epi8(zmm0);
+                sgn = simde_mm512_xor_si512(*p_ones, zmm0);
+                min = simde_mm512_abs_epi8(zmm0);
 
                 // Loop over BNs
                 for (k=1; k<3; k++)
                 {
                     zmm0 = p_cnProcBuf[(lut_idxCnProcG4[j][k]/2) + i];
-                    min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-                   sgn  = _mm512_xor_si512(sgn, zmm0);
+                    min = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+                    sgn = simde_mm512_xor_si512(sgn, zmm0);
                 }
 
                 // Store result
-                min = _mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
+                min = simde_mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
                 *p_cnProcBufResBit = conditional_negate(min, sgn,zeros);
                 p_cnProcBufResBit++;
             }
@@ -509,8 +504,8 @@ static inline void nrLDPC_cnProc_BG1_AVX512(t_nrLDPC_lut* p_lut, t_nrLDPC_procBu
         bitOffsetInGroup = (lut_numCnInCnGroups_BG1_R13[2]*NR_LDPC_ZMAX)>>6;
 
         // Set pointers to start of group 5
-        p_cnProcBuf    = (__m512i*) &cnProcBuf   [lut_startAddrCnGroups[2]];
-        p_cnProcBufRes = (__m512i*) &cnProcBufRes[lut_startAddrCnGroups[2]];
+        p_cnProcBuf = (simde__m512i*)&cnProcBuf[lut_startAddrCnGroups[2]];
+        p_cnProcBufRes = (simde__m512i*)&cnProcBufRes[lut_startAddrCnGroups[2]];
 
         // Loop over every BN
         for (j=0; j<5; j++)
@@ -523,19 +518,19 @@ static inline void nrLDPC_cnProc_BG1_AVX512(t_nrLDPC_lut* p_lut, t_nrLDPC_procBu
             {
                 // Abs and sign of 32 CNs (first BN)
                 zmm0 = p_cnProcBuf[(lut_idxCnProcG5[j][0]/2) + i];
-                sgn  = _mm512_xor_si512(*p_ones, zmm0);
-                min  = _mm512_abs_epi8(zmm0);
+                sgn = simde_mm512_xor_si512(*p_ones, zmm0);
+                min = simde_mm512_abs_epi8(zmm0);
 
                 // Loop over BNs
                 for (k=1; k<4; k++)
                 {
                     zmm0 = p_cnProcBuf[(lut_idxCnProcG5[j][k]/2) + i];
-                    min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-                   sgn  = _mm512_xor_si512(sgn, zmm0);
+                    min = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+                    sgn = simde_mm512_xor_si512(sgn, zmm0);
                 }
 
                 // Store result
-                min = _mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
+                min = simde_mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
                 *p_cnProcBufResBit = conditional_negate(min, sgn,zeros);
                 p_cnProcBufResBit++;
             }
@@ -560,8 +555,8 @@ static inline void nrLDPC_cnProc_BG1_AVX512(t_nrLDPC_lut* p_lut, t_nrLDPC_procBu
         bitOffsetInGroup = (lut_numCnInCnGroups_BG1_R13[3]*NR_LDPC_ZMAX)>>6;
 
         // Set pointers to start of group 6
-        p_cnProcBuf    = (__m512i*) &cnProcBuf   [lut_startAddrCnGroups[3]];
-        p_cnProcBufRes = (__m512i*) &cnProcBufRes[lut_startAddrCnGroups[3]];
+        p_cnProcBuf = (simde__m512i*)&cnProcBuf[lut_startAddrCnGroups[3]];
+        p_cnProcBufRes = (simde__m512i*)&cnProcBufRes[lut_startAddrCnGroups[3]];
 
         // Loop over every BN
         for (j=0; j<6; j++)
@@ -574,19 +569,19 @@ static inline void nrLDPC_cnProc_BG1_AVX512(t_nrLDPC_lut* p_lut, t_nrLDPC_procBu
             {
                 // Abs and sign of 32 CNs (first BN)
                 zmm0 = p_cnProcBuf[(lut_idxCnProcG6[j][0]/2) + i];
-                sgn  = _mm512_xor_si512(*p_ones, zmm0);
-                min  = _mm512_abs_epi8(zmm0);
+                sgn = simde_mm512_xor_si512(*p_ones, zmm0);
+                min = simde_mm512_abs_epi8(zmm0);
 
                 // Loop over BNs
                 for (k=1; k<5; k++)
                 {
                     zmm0 = p_cnProcBuf[(lut_idxCnProcG6[j][k]/2) + i];
-                    min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-                   sgn  = _mm512_xor_si512(sgn, zmm0);
+                    min = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+                    sgn = simde_mm512_xor_si512(sgn, zmm0);
                 }
 
                 // Store result
-                min = _mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
+                min = simde_mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
                 *p_cnProcBufResBit = conditional_negate(min, sgn,zeros);
                 p_cnProcBufResBit++;
             }
@@ -612,8 +607,8 @@ static inline void nrLDPC_cnProc_BG1_AVX512(t_nrLDPC_lut* p_lut, t_nrLDPC_procBu
         bitOffsetInGroup = (lut_numCnInCnGroups_BG1_R13[4]*NR_LDPC_ZMAX)>>6;
 
         // Set pointers to start of group 7
-        p_cnProcBuf    = (__m512i*) &cnProcBuf   [lut_startAddrCnGroups[4]];
-        p_cnProcBufRes = (__m512i*) &cnProcBufRes[lut_startAddrCnGroups[4]];
+        p_cnProcBuf = (simde__m512i*)&cnProcBuf[lut_startAddrCnGroups[4]];
+        p_cnProcBufRes = (simde__m512i*)&cnProcBufRes[lut_startAddrCnGroups[4]];
 
         // Loop over every BN
         for (j=0; j<7; j++)
@@ -626,19 +621,19 @@ static inline void nrLDPC_cnProc_BG1_AVX512(t_nrLDPC_lut* p_lut, t_nrLDPC_procBu
             {
                 // Abs and sign of 32 CNs (first BN)
                 zmm0 = p_cnProcBuf[(lut_idxCnProcG7[j][0]/2) + i];
-                sgn  = _mm512_xor_si512(*p_ones, zmm0);
-                min  = _mm512_abs_epi8(zmm0);
+                sgn = simde_mm512_xor_si512(*p_ones, zmm0);
+                min = simde_mm512_abs_epi8(zmm0);
 
                 // Loop over BNs
                 for (k=1; k<6; k++)
                 {
                     zmm0 = p_cnProcBuf[(lut_idxCnProcG7[j][k]/2) + i];
-                    min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-                   sgn  = _mm512_xor_si512(sgn, zmm0);
+                    min = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+                    sgn = simde_mm512_xor_si512(sgn, zmm0);
                 }
 
                 // Store result
-                min = _mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
+                min = simde_mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
                 *p_cnProcBufResBit = conditional_negate(min, sgn,zeros);
                 p_cnProcBufResBit++;
             }
@@ -664,8 +659,8 @@ static inline void nrLDPC_cnProc_BG1_AVX512(t_nrLDPC_lut* p_lut, t_nrLDPC_procBu
         bitOffsetInGroup = (lut_numCnInCnGroups_BG1_R13[5]*NR_LDPC_ZMAX)>>6;
 
         // Set pointers to start of group 8
-        p_cnProcBuf    = (__m512i*) &cnProcBuf   [lut_startAddrCnGroups[5]];
-        p_cnProcBufRes = (__m512i*) &cnProcBufRes[lut_startAddrCnGroups[5]];
+        p_cnProcBuf = (simde__m512i*)&cnProcBuf[lut_startAddrCnGroups[5]];
+        p_cnProcBufRes = (simde__m512i*)&cnProcBufRes[lut_startAddrCnGroups[5]];
 
         // Loop over every BN
         for (j=0; j<8; j++)
@@ -678,19 +673,19 @@ static inline void nrLDPC_cnProc_BG1_AVX512(t_nrLDPC_lut* p_lut, t_nrLDPC_procBu
             {
                 // Abs and sign of 32 CNs (first BN)
                 zmm0 = p_cnProcBuf[(lut_idxCnProcG8[j][0]/2) + i];
-                sgn  = _mm512_xor_si512(*p_ones, zmm0);
-                min  = _mm512_abs_epi8(zmm0);
+                sgn = simde_mm512_xor_si512(*p_ones, zmm0);
+                min = simde_mm512_abs_epi8(zmm0);
 
                 // Loop over BNs
                 for (k=1; k<7; k++)
                 {
                     zmm0 = p_cnProcBuf[(lut_idxCnProcG8[j][k]/2) + i];
-                    min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-                   sgn  = _mm512_xor_si512(sgn, zmm0);
+                    min = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+                    sgn = simde_mm512_xor_si512(sgn, zmm0);
                 }
 
                 // Store result
-                min = _mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
+                min = simde_mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
                 *p_cnProcBufResBit = conditional_negate(min, sgn,zeros);
                 p_cnProcBufResBit++;
             }
@@ -717,8 +712,8 @@ static inline void nrLDPC_cnProc_BG1_AVX512(t_nrLDPC_lut* p_lut, t_nrLDPC_procBu
         bitOffsetInGroup = (lut_numCnInCnGroups_BG1_R13[6]*NR_LDPC_ZMAX)>>6;
 
         // Set pointers to start of group 9
-        p_cnProcBuf    = (__m512i*) &cnProcBuf   [lut_startAddrCnGroups[6]];
-        p_cnProcBufRes = (__m512i*) &cnProcBufRes[lut_startAddrCnGroups[6]];
+        p_cnProcBuf = (simde__m512i*)&cnProcBuf[lut_startAddrCnGroups[6]];
+        p_cnProcBufRes = (simde__m512i*)&cnProcBufRes[lut_startAddrCnGroups[6]];
 
         // Loop over every BN
         for (j=0; j<9; j++)
@@ -731,19 +726,19 @@ static inline void nrLDPC_cnProc_BG1_AVX512(t_nrLDPC_lut* p_lut, t_nrLDPC_procBu
             {
                 // Abs and sign of 32 CNs (first BN)
                 zmm0 = p_cnProcBuf[(lut_idxCnProcG9[j][0]/2) + i];
-                sgn  = _mm512_xor_si512(*p_ones, zmm0);
-                min  = _mm512_abs_epi8(zmm0);
+                sgn = simde_mm512_xor_si512(*p_ones, zmm0);
+                min = simde_mm512_abs_epi8(zmm0);
 
                 // Loop over BNs
                 for (k=1; k<8; k++)
                 {
                     zmm0 = p_cnProcBuf[(lut_idxCnProcG9[j][k]/2) + i];
-                    min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-                   sgn  = _mm512_xor_si512(sgn, zmm0);
+                    min = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+                    sgn = simde_mm512_xor_si512(sgn, zmm0);
                 }
 
                 // Store result
-                min = _mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
+                min = simde_mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
                 *p_cnProcBufResBit = conditional_negate(min, sgn,zeros);
                 p_cnProcBufResBit++;
             }
@@ -770,8 +765,8 @@ static inline void nrLDPC_cnProc_BG1_AVX512(t_nrLDPC_lut* p_lut, t_nrLDPC_procBu
         bitOffsetInGroup = (lut_numCnInCnGroups_BG1_R13[7]*NR_LDPC_ZMAX)>>6;
 
         // Set pointers to start of group 10
-        p_cnProcBuf    = (__m512i*) &cnProcBuf   [lut_startAddrCnGroups[7]];
-        p_cnProcBufRes = (__m512i*) &cnProcBufRes[lut_startAddrCnGroups[7]];
+        p_cnProcBuf = (simde__m512i*)&cnProcBuf[lut_startAddrCnGroups[7]];
+        p_cnProcBufRes = (simde__m512i*)&cnProcBufRes[lut_startAddrCnGroups[7]];
 
         // Loop over every BN
         for (j=0; j<10; j++)
@@ -784,19 +779,19 @@ static inline void nrLDPC_cnProc_BG1_AVX512(t_nrLDPC_lut* p_lut, t_nrLDPC_procBu
             {
                 // Abs and sign of 32 CNs (first BN)
                 zmm0 = p_cnProcBuf[(lut_idxCnProcG10[j][0]/2) + i];
-                sgn  = _mm512_xor_si512(*p_ones, zmm0);
-                min  = _mm512_abs_epi8(zmm0);
+                sgn = simde_mm512_xor_si512(*p_ones, zmm0);
+                min = simde_mm512_abs_epi8(zmm0);
 
                 // Loop over BNs
                 for (k=1; k<9; k++)
                 {
                     zmm0 = p_cnProcBuf[(lut_idxCnProcG10[j][k]/2) + i];
-                    min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-                   sgn  = _mm512_xor_si512(sgn, zmm0);
+                    min = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+                    sgn = simde_mm512_xor_si512(sgn, zmm0);
                 }
 
                 // Store result
-                min = _mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
+                min = simde_mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
                 *p_cnProcBufResBit = conditional_negate(min, sgn,zeros);
                 p_cnProcBufResBit++;
             }
@@ -828,8 +823,8 @@ static inline void nrLDPC_cnProc_BG1_AVX512(t_nrLDPC_lut* p_lut, t_nrLDPC_procBu
         bitOffsetInGroup = (lut_numCnInCnGroups_BG1_R13[8]*NR_LDPC_ZMAX)>>6;
 
         // Set pointers to start of group 19
-        p_cnProcBuf    = (__m512i*) &cnProcBuf   [lut_startAddrCnGroups[8]];
-        p_cnProcBufRes = (__m512i*) &cnProcBufRes[lut_startAddrCnGroups[8]];
+        p_cnProcBuf = (simde__m512i*)&cnProcBuf[lut_startAddrCnGroups[8]];
+        p_cnProcBufRes = (simde__m512i*)&cnProcBufRes[lut_startAddrCnGroups[8]];
 
         // Loop over every BN
         for (j=0; j<19; j++)
@@ -842,19 +837,19 @@ static inline void nrLDPC_cnProc_BG1_AVX512(t_nrLDPC_lut* p_lut, t_nrLDPC_procBu
             {
                 // Abs and sign of 32 CNs (first BN)
                 zmm0 = p_cnProcBuf[(lut_idxCnProcG19[j][0]/2) + i];
-                sgn  = _mm512_xor_si512(*p_ones, zmm0);
-                min  = _mm512_abs_epi8(zmm0);
+                sgn = simde_mm512_xor_si512(*p_ones, zmm0);
+                min = simde_mm512_abs_epi8(zmm0);
 
                 // Loop over BNs
                 for (k=1; k<18; k++)
                 {
                     zmm0 = p_cnProcBuf[(lut_idxCnProcG19[j][k]/2) + i];
-                    min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-                   sgn  = _mm512_xor_si512(sgn, zmm0);
+                    min = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+                    sgn = simde_mm512_xor_si512(sgn, zmm0);
                 }
 
                 // Store result
-                min = _mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
+                min = simde_mm512_min_epu8(min, *p_maxLLR); // 128 in epi8 is -127
                 *p_cnProcBufResBit = conditional_negate(min, sgn,zeros);
                 p_cnProcBufResBit++;
             }
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_decoder.c b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_decoder.c
index ced3b5831ee4258d408eb2a01b2fd7fb9665c2b8..5f56879eba58fe43a47dd40f5d2984dc842ba007 100644
--- a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_decoder.c
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_decoder.c
@@ -53,7 +53,7 @@
 #include "cnProc_avx512/nrLDPC_cnProc_BG2_R13_AVX512.h"
 #include "cnProc_avx512/nrLDPC_cnProc_BG2_R23_AVX512.h"
 
-#else
+#elif defined(__AVX2__)
 
 /*----------------------------------------------------------------------
 |                  cn Processing files -->AVX2
@@ -68,6 +68,16 @@
 #include "cnProc/nrLDPC_cnProc_BG2_R13_AVX2.h"
 #include "cnProc/nrLDPC_cnProc_BG2_R23_AVX2.h"
 
+#else
+
+//BG1------------------------------------------------------------------
+#include "cnProc128/nrLDPC_cnProc_BG1_R13_128.h"
+#include "cnProc128/nrLDPC_cnProc_BG1_R23_128.h"
+#include "cnProc128/nrLDPC_cnProc_BG1_R89_128.h"
+//BG2 --------------------------------------------------------------------
+#include "cnProc128/nrLDPC_cnProc_BG2_R15_128.h"
+#include "cnProc128/nrLDPC_cnProc_BG2_R13_128.h"
+#include "cnProc128/nrLDPC_cnProc_BG2_R23_128.h"
 #endif
 
 /*----------------------------------------------------------------------
@@ -75,6 +85,7 @@
 /----------------------------------------------------------------------*/
 
 //bnProcPc-------------------------------------------------------------
+#ifdef __AVX2__
 //BG1------------------------------------------------------------------
 #include "bnProcPc/nrLDPC_bnProcPc_BG1_R13_AVX2.h"
 #include "bnProcPc/nrLDPC_bnProcPc_BG1_R23_AVX2.h"
@@ -83,6 +94,14 @@
 #include "bnProcPc/nrLDPC_bnProcPc_BG2_R15_AVX2.h"
 #include "bnProcPc/nrLDPC_bnProcPc_BG2_R13_AVX2.h"
 #include "bnProcPc/nrLDPC_bnProcPc_BG2_R23_AVX2.h"
+#else
+#include "bnProcPc128/nrLDPC_bnProcPc_BG1_R13_128.h"
+#include "bnProcPc128/nrLDPC_bnProcPc_BG1_R23_128.h"
+#include "bnProcPc128/nrLDPC_bnProcPc_BG1_R89_128.h"
+#include "bnProcPc128/nrLDPC_bnProcPc_BG2_R15_128.h"
+#include "bnProcPc128/nrLDPC_bnProcPc_BG2_R13_128.h"
+#include "bnProcPc128/nrLDPC_bnProcPc_BG2_R23_128.h"
+#endif
 
 //bnProc----------------------------------------------------------------
 
@@ -96,7 +115,7 @@
 #include "bnProc_avx512/nrLDPC_bnProc_BG2_R13_AVX512.h"
 #include "bnProc_avx512/nrLDPC_bnProc_BG2_R23_AVX512.h"
 
-#else
+#elif defined(__AVX2__)
 #include "bnProc/nrLDPC_bnProc_BG1_R13_AVX2.h"
 #include "bnProc/nrLDPC_bnProc_BG1_R23_AVX2.h"
 #include "bnProc/nrLDPC_bnProc_BG1_R89_AVX2.h"
@@ -104,7 +123,14 @@
 #include "bnProc/nrLDPC_bnProc_BG2_R15_AVX2.h"
 #include "bnProc/nrLDPC_bnProc_BG2_R13_AVX2.h"
 #include "bnProc/nrLDPC_bnProc_BG2_R23_AVX2.h"
-
+#else
+#include "bnProc128/nrLDPC_bnProc_BG1_R13_128.h"
+#include "bnProc128/nrLDPC_bnProc_BG1_R23_128.h"
+#include "bnProc128/nrLDPC_bnProc_BG1_R89_128.h"
+//BG2 --------------------------------------------------------------------
+#include "bnProc128/nrLDPC_bnProc_BG2_R15_128.h"
+#include "bnProc128/nrLDPC_bnProc_BG2_R13_128.h"
+#include "bnProc128/nrLDPC_bnProc_BG2_R23_128.h"
 #endif
 
 //#define NR_LDPC_ENABLE_PARITY_CHECK
@@ -216,8 +242,10 @@ static inline uint32_t nrLDPC_decoder_core(int8_t* p_llr,
             {
                 #if defined(__AVX512BW__)
                 nrLDPC_cnProc_BG1_R13_AVX512(cnProcBuf, cnProcBufRes, Z);
-                #else
+                #elif defined(__AVX2__)
                 nrLDPC_cnProc_BG1_R13_AVX2(cnProcBuf, cnProcBufRes, Z);
+                #else
+                nrLDPC_cnProc_BG1_R13_128(cnProcBuf, cnProcBufRes, Z);
                 #endif
                 break;
             }
@@ -226,8 +254,10 @@ static inline uint32_t nrLDPC_decoder_core(int8_t* p_llr,
             {
                 #if defined(__AVX512BW__)
                 nrLDPC_cnProc_BG1_R23_AVX512(cnProcBuf,cnProcBufRes, Z);
-                #else
+                #elif defined(__AVX2__)
                 nrLDPC_cnProc_BG1_R23_AVX2(cnProcBuf, cnProcBufRes, Z);
+                #else                
+                nrLDPC_cnProc_BG1_R23_128(cnProcBuf, cnProcBufRes, Z);
                 #endif
                 break;
             }
@@ -236,8 +266,10 @@ static inline uint32_t nrLDPC_decoder_core(int8_t* p_llr,
             {
                 #if defined(__AVX512BW__)
                  nrLDPC_cnProc_BG1_R89_AVX512(cnProcBuf, cnProcBufRes, Z);
-                #else
+                #elif defined(__AVX2__)
                 nrLDPC_cnProc_BG1_R89_AVX2(cnProcBuf, cnProcBufRes, Z);
+                #else
+                nrLDPC_cnProc_BG1_R89_128(cnProcBuf, cnProcBufRes, Z);
                 #endif
                 break;
             }
@@ -253,26 +285,32 @@ static inline uint32_t nrLDPC_decoder_core(int8_t* p_llr,
             {
                 #if defined(__AVX512BW__)
                 nrLDPC_cnProc_BG2_R15_AVX512(cnProcBuf, cnProcBufRes, Z);
-                #else
+                #elif defined(__AVX2__)
                 nrLDPC_cnProc_BG2_R15_AVX2(cnProcBuf, cnProcBufRes, Z);
+                #else
+                nrLDPC_cnProc_BG2_R15_128(cnProcBuf, cnProcBufRes, Z);
                 #endif
                 break;
             }
             case 13:
             {
                 #if defined(__AVX512BW__)
-                 nrLDPC_cnProc_BG2_R13_AVX512(cnProcBuf, cnProcBufRes, Z);
-                #else
+                nrLDPC_cnProc_BG2_R13_AVX512(cnProcBuf, cnProcBufRes, Z);
+                #elif defined(__AVX2__)
                 nrLDPC_cnProc_BG2_R13_AVX2(cnProcBuf, cnProcBufRes, Z);
+                #else
+                nrLDPC_cnProc_BG2_R13_128(cnProcBuf, cnProcBufRes, Z);
                 #endif
                 break;
             }
             case 23:
             {
                 #if defined(__AVX512BW__)
-                 nrLDPC_cnProc_BG2_R23_AVX512(cnProcBuf, cnProcBufRes, Z);
-                #else
+                nrLDPC_cnProc_BG2_R23_AVX512(cnProcBuf, cnProcBufRes, Z);
+                #elif defined(__AVX2__)
                 nrLDPC_cnProc_BG2_R23_AVX2(cnProcBuf, cnProcBufRes, Z);
+                #else
+                nrLDPC_cnProc_BG2_R23_128(cnProcBuf, cnProcBufRes, Z);
                 #endif
                 break;
             }
@@ -309,17 +347,29 @@ static inline uint32_t nrLDPC_decoder_core(int8_t* p_llr,
         switch (R) {
             case 13:
             {
+#ifdef __AVX2__		    
                 nrLDPC_bnProcPc_BG1_R13_AVX2(bnProcBuf,bnProcBufRes,llrRes, llrProcBuf, Z);
-                break;
+#else
+		nrLDPC_bnProcPc_BG1_R13_128(bnProcBuf,bnProcBufRes,llrRes, llrProcBuf, Z);
+#endif 
+ 		break;
             }
             case 23:
             {
+#ifdef __AVX2__		    
                 nrLDPC_bnProcPc_BG1_R23_AVX2(bnProcBuf,bnProcBufRes, llrRes, llrProcBuf, Z);
+#else
+                nrLDPC_bnProcPc_BG1_R23_128(bnProcBuf,bnProcBufRes, llrRes, llrProcBuf, Z);
+#endif		
                 break;
             }
             case 89:
             {
+#ifdef __AVX2__
                 nrLDPC_bnProcPc_BG1_R89_AVX2(bnProcBuf,bnProcBufRes, llrRes, llrProcBuf, Z);
+#else
+                nrLDPC_bnProcPc_BG1_R89_128(bnProcBuf,bnProcBufRes, llrRes, llrProcBuf, Z);
+#endif
                 break;
             }
         }
@@ -327,18 +377,30 @@ static inline uint32_t nrLDPC_decoder_core(int8_t* p_llr,
         switch (R) {
             case 15:
             {
+#ifdef __AVX2__		    
                 nrLDPC_bnProcPc_BG2_R15_AVX2(bnProcBuf,bnProcBufRes, llrRes, llrProcBuf, Z);
+#else
+                nrLDPC_bnProcPc_BG2_R15_128(bnProcBuf,bnProcBufRes, llrRes, llrProcBuf, Z);
+#endif
                 break;
             }
             case 13:
             {
+#ifdef __AVX2__		    
                 nrLDPC_bnProcPc_BG2_R13_AVX2(bnProcBuf,bnProcBufRes,llrRes,llrProcBuf, Z);
+#else
+                nrLDPC_bnProcPc_BG2_R13_128(bnProcBuf,bnProcBufRes,llrRes,llrProcBuf, Z);
+#endif
                 break;
             }
 
             case 23:
             {
+#ifdef __AVX2__		    
                 nrLDPC_bnProcPc_BG2_R23_AVX2(bnProcBuf,bnProcBufRes,llrRes, llrProcBuf, Z);
+#else
+                nrLDPC_bnProcPc_BG2_R23_128(bnProcBuf,bnProcBufRes,llrRes, llrProcBuf, Z);
+#endif
                 break;
             }
         }
@@ -363,8 +425,10 @@ static inline uint32_t nrLDPC_decoder_core(int8_t* p_llr,
             {
                 #if defined(__AVX512BW__)
                 nrLDPC_bnProc_BG1_R13_AVX512(bnProcBuf, bnProcBufRes,llrRes, Z);
-                #else
+                #elif defined (__AVX2__)
                 nrLDPC_bnProc_BG1_R13_AVX2(bnProcBuf, bnProcBufRes,llrRes, Z);
+                #else
+                nrLDPC_bnProc_BG1_R13_128(bnProcBuf, bnProcBufRes,llrRes, Z);
                 #endif
                 break;
             }
@@ -372,8 +436,10 @@ static inline uint32_t nrLDPC_decoder_core(int8_t* p_llr,
             {
                 #if defined(__AVX512BW__)
                 nrLDPC_bnProc_BG1_R23_AVX512(bnProcBuf, bnProcBufRes,llrRes, Z);
+                #elif defined(__AVX2__)
+		nrLDPC_bnProc_BG1_R23_AVX2(bnProcBuf, bnProcBufRes,llrRes, Z);
                 #else
-                nrLDPC_bnProc_BG1_R23_AVX2(bnProcBuf, bnProcBufRes,llrRes, Z);
+                nrLDPC_bnProc_BG1_R23_128(bnProcBuf, bnProcBufRes,llrRes, Z);
                 #endif
                 break;
             }
@@ -381,8 +447,10 @@ static inline uint32_t nrLDPC_decoder_core(int8_t* p_llr,
             {
                 #if defined(__AVX512BW__)
                 nrLDPC_bnProc_BG1_R89_AVX512(bnProcBuf, bnProcBufRes,llrRes, Z);
-                #else
+                #elif defined(__AVX2__)
                 nrLDPC_bnProc_BG1_R89_AVX2(bnProcBuf, bnProcBufRes,llrRes, Z);
+                #else
+                nrLDPC_bnProc_BG1_R89_128(bnProcBuf, bnProcBufRes,llrRes, Z);
                 #endif
                 break;
             }
@@ -397,8 +465,10 @@ static inline uint32_t nrLDPC_decoder_core(int8_t* p_llr,
             {
                 #if defined(__AVX512BW__)
                 nrLDPC_bnProc_BG2_R15_AVX512(bnProcBuf, bnProcBufRes,llrRes, Z);
-                #else
+                #elif defined(__AVX2__)
                 nrLDPC_bnProc_BG2_R15_AVX2(bnProcBuf, bnProcBufRes,llrRes, Z);
+                #else
+                nrLDPC_bnProc_BG2_R15_128(bnProcBuf, bnProcBufRes,llrRes, Z);
                 #endif
                 break;
             }
@@ -406,8 +476,10 @@ static inline uint32_t nrLDPC_decoder_core(int8_t* p_llr,
             {
                 #if defined(__AVX512BW__)
                 nrLDPC_bnProc_BG2_R13_AVX512(bnProcBuf, bnProcBufRes,llrRes, Z);
-                #else
+                #elif defined(__AVX2__)
                 nrLDPC_bnProc_BG2_R13_AVX2(bnProcBuf, bnProcBufRes,llrRes, Z);
+                #else
+                nrLDPC_bnProc_BG2_R13_128(bnProcBuf, bnProcBufRes,llrRes, Z);
                 #endif
                 break;
             }
@@ -416,8 +488,10 @@ static inline uint32_t nrLDPC_decoder_core(int8_t* p_llr,
             {
                 #if defined(__AVX512BW__)
                 nrLDPC_bnProc_BG2_R23_AVX512(bnProcBuf, bnProcBufRes,llrRes, Z);
-                #else
+                #elif defined(__AVX2__)
                 nrLDPC_bnProc_BG2_R23_AVX2(bnProcBuf, bnProcBufRes,llrRes, Z);
+                #else
+                nrLDPC_bnProc_BG2_R23_128(bnProcBuf, bnProcBufRes,llrRes, Z);
                 #endif
                 break;
             }
@@ -475,26 +549,32 @@ static inline uint32_t nrLDPC_decoder_core(int8_t* p_llr,
             {
                 #if defined(__AVX512BW__)
                 nrLDPC_cnProc_BG1_R13_AVX512(cnProcBuf, cnProcBufRes, Z);
-                #else
+                #elif defined(__AVX2__)
                 nrLDPC_cnProc_BG1_R13_AVX2(cnProcBuf, cnProcBufRes, Z);
+                #else
+                nrLDPC_cnProc_BG1_R13_128(cnProcBuf, cnProcBufRes, Z);
                 #endif
                 break;
             }
             case 23:
             {
                 #if defined(__AVX512BW__)
-                 nrLDPC_cnProc_BG1_R23_AVX512(cnProcBuf, cnProcBufRes, Z);
-                #else
+                nrLDPC_cnProc_BG1_R23_AVX512(cnProcBuf, cnProcBufRes, Z);
+                #elif defined(__AVX2__)
                 nrLDPC_cnProc_BG1_R23_AVX2(cnProcBuf, cnProcBufRes, Z);
+                #else
+                nrLDPC_cnProc_BG1_R23_128(cnProcBuf, cnProcBufRes, Z);
                 #endif
                 break;
             }
             case 89:
             {
                 #if defined(__AVX512BW__)
-                 nrLDPC_cnProc_BG1_R89_AVX512(cnProcBuf, cnProcBufRes, Z);
-                #else
+                nrLDPC_cnProc_BG1_R89_AVX512(cnProcBuf, cnProcBufRes, Z);
+                #elif defined(__AVX2__)
                 nrLDPC_cnProc_BG1_R89_AVX2(cnProcBuf, cnProcBufRes, Z);
+                #else
+                nrLDPC_cnProc_BG1_R89_128(cnProcBuf, cnProcBufRes, Z);
                 #endif
                 break;
             }
@@ -509,26 +589,32 @@ static inline uint32_t nrLDPC_decoder_core(int8_t* p_llr,
             {
                 #if defined(__AVX512BW__)
                 nrLDPC_cnProc_BG2_R15_AVX512(cnProcBuf,cnProcBufRes, Z);
-                #else
+                #elif defined(__AVX2__)
                 nrLDPC_cnProc_BG2_R15_AVX2(cnProcBuf, cnProcBufRes, Z);
+                #else
+                nrLDPC_cnProc_BG2_R15_128(cnProcBuf, cnProcBufRes, Z);
                 #endif
                 break;
             }
             case 13:
             {
                 #if defined(__AVX512BW__)
-                 nrLDPC_cnProc_BG2_R13_AVX512(cnProcBuf, cnProcBufRes, Z);
-                #else
+                nrLDPC_cnProc_BG2_R13_AVX512(cnProcBuf, cnProcBufRes, Z);
+                #elif defined(__AVX2__)
                 nrLDPC_cnProc_BG2_R13_AVX2(cnProcBuf, cnProcBufRes, Z);
+                #else
+                nrLDPC_cnProc_BG2_R13_128(cnProcBuf, cnProcBufRes, Z);
                 #endif
                 break;
             } 
             case 23:
             {
                 #if defined(__AVX512BW__)
-                 nrLDPC_cnProc_BG2_R23_AVX512(cnProcBuf, cnProcBufRes, Z);
-                #else
+                nrLDPC_cnProc_BG2_R23_AVX512(cnProcBuf, cnProcBufRes, Z);
+                #elif defined(__AVX2__)
                 nrLDPC_cnProc_BG2_R23_AVX2(cnProcBuf, cnProcBufRes, Z);
+                #else
+                nrLDPC_cnProc_BG2_R23_128(cnProcBuf, cnProcBufRes, Z);
                 #endif
                 break;
             }
@@ -567,17 +653,29 @@ static inline uint32_t nrLDPC_decoder_core(int8_t* p_llr,
           switch (R) {
             case 13:
             {
+#ifdef __AVX2__
                 nrLDPC_bnProcPc_BG1_R13_AVX2(bnProcBuf,bnProcBufRes,llrRes, llrProcBuf, Z);
+#else		
+                nrLDPC_bnProcPc_BG1_R13_128(bnProcBuf,bnProcBufRes,llrRes, llrProcBuf, Z);
+#endif
                 break;
             }
             case 23:
             {
+#ifdef __AVX2__
                 nrLDPC_bnProcPc_BG1_R23_AVX2(bnProcBuf,bnProcBufRes, llrRes, llrProcBuf, Z);
+#else		
+                nrLDPC_bnProcPc_BG1_R23_128(bnProcBuf,bnProcBufRes, llrRes, llrProcBuf, Z);
+#endif
                 break;
             }
             case 89:
             {
+#ifdef __AVX2__
                 nrLDPC_bnProcPc_BG1_R89_AVX2(bnProcBuf,bnProcBufRes, llrRes, llrProcBuf, Z);
+#else		
+                nrLDPC_bnProcPc_BG1_R89_128(bnProcBuf,bnProcBufRes, llrRes, llrProcBuf, Z);
+#endif
                 break;
             }
           }
@@ -586,17 +684,29 @@ static inline uint32_t nrLDPC_decoder_core(int8_t* p_llr,
           {
             case 15:
             {
+#ifdef __AVX2__		    
                 nrLDPC_bnProcPc_BG2_R15_AVX2(bnProcBuf,bnProcBufRes,llrRes, llrProcBuf, Z);
+#else
+                nrLDPC_bnProcPc_BG2_R15_128(bnProcBuf,bnProcBufRes,llrRes, llrProcBuf, Z);
+#endif
                 break;
             }
             case 13:
             {
+#ifdef __AVX2__		    
                 nrLDPC_bnProcPc_BG2_R13_AVX2(bnProcBuf,bnProcBufRes,llrRes, llrProcBuf, Z);
+#else
+                nrLDPC_bnProcPc_BG2_R13_128(bnProcBuf,bnProcBufRes,llrRes, llrProcBuf, Z);
+#endif
                 break;
             }
             case 23:
             {
+#ifdef __AVX2__		    
                 nrLDPC_bnProcPc_BG2_R23_AVX2(bnProcBuf,bnProcBufRes,llrRes, llrProcBuf, Z);
+#else
+                nrLDPC_bnProcPc_BG2_R23_128(bnProcBuf,bnProcBufRes,llrRes, llrProcBuf, Z);
+#endif
                 break;
             }
           }
@@ -618,8 +728,10 @@ static inline uint32_t nrLDPC_decoder_core(int8_t* p_llr,
             {
                 #if defined(__AVX512BW__)
                 nrLDPC_bnProc_BG1_R13_AVX512(bnProcBuf, bnProcBufRes,llrRes, Z);
-                #else
+                #elif defined(__AVX2__)
                 nrLDPC_bnProc_BG1_R13_AVX2(bnProcBuf, bnProcBufRes,llrRes, Z);
+                #else
+                nrLDPC_bnProc_BG1_R13_128(bnProcBuf, bnProcBufRes,llrRes, Z);
                 #endif
                 break;
             }
@@ -627,8 +739,10 @@ static inline uint32_t nrLDPC_decoder_core(int8_t* p_llr,
             {
                 #if defined(__AVX512BW__)
                 nrLDPC_bnProc_BG1_R23_AVX512(bnProcBuf, bnProcBufRes,llrRes, Z);
-                #else
+                #elif defined(__AVX2__)
                 nrLDPC_bnProc_BG1_R23_AVX2(bnProcBuf,bnProcBufRes,llrRes, Z);
+                #else
+                nrLDPC_bnProc_BG1_R23_128(bnProcBuf,bnProcBufRes,llrRes, Z);
                 #endif
                 break;
             }
@@ -636,8 +750,10 @@ static inline uint32_t nrLDPC_decoder_core(int8_t* p_llr,
             {
                 #if defined(__AVX512BW__)
                 nrLDPC_bnProc_BG1_R89_AVX512(bnProcBuf, bnProcBufRes,llrRes, Z);
-                #else
+                #elif defined(__AVX2__)
                 nrLDPC_bnProc_BG1_R89_AVX2(bnProcBuf, bnProcBufRes,llrRes, Z);
+                #else
+                nrLDPC_bnProc_BG1_R89_128(bnProcBuf, bnProcBufRes,llrRes, Z);
                 #endif
                 break;
             }
@@ -649,8 +765,10 @@ static inline uint32_t nrLDPC_decoder_core(int8_t* p_llr,
             {
                 #if defined(__AVX512BW__)
                 nrLDPC_bnProc_BG2_R15_AVX512(bnProcBuf, bnProcBufRes,llrRes, Z);
-                #else
+                #elif defined(__AVX2__)
                 nrLDPC_bnProc_BG2_R15_AVX2(bnProcBuf, bnProcBufRes,llrRes, Z);
+                #else
+                nrLDPC_bnProc_BG2_R15_128(bnProcBuf, bnProcBufRes,llrRes, Z);
                 #endif
                 break;
             }
@@ -658,8 +776,10 @@ static inline uint32_t nrLDPC_decoder_core(int8_t* p_llr,
             {
                 #if defined(__AVX512BW__)
                 nrLDPC_bnProc_BG2_R13_AVX512(bnProcBuf, bnProcBufRes,llrRes, Z);
-                #else
+                #elif defined(__AVX2__)
                 nrLDPC_bnProc_BG2_R13_AVX2(bnProcBuf, bnProcBufRes,llrRes, Z);
+                #else
+                nrLDPC_bnProc_BG2_R13_128(bnProcBuf, bnProcBufRes,llrRes, Z);
                 #endif
                 break;
             }
@@ -667,8 +787,10 @@ static inline uint32_t nrLDPC_decoder_core(int8_t* p_llr,
             {
                 #if defined(__AVX512BW__)
                 nrLDPC_bnProc_BG2_R23_AVX512(bnProcBuf, bnProcBufRes,llrRes, Z);
-                #else
+                #elif defined(__AVX2__)
                 nrLDPC_bnProc_BG2_R23_AVX2(bnProcBuf, bnProcBufRes,llrRes, Z);
+                #else
+                nrLDPC_bnProc_BG2_R23_128(bnProcBuf, bnProcBufRes,llrRes, Z);
                 #endif
                 break;
             }
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/CMakeLists.txt b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/CMakeLists.txt
index 669ab17aa7d8ffe1491fd19ebd5ccd157ed15a36..b84b259152aa4db504186b714562237623eb01de 100644
--- a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/CMakeLists.txt
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/CMakeLists.txt
@@ -7,13 +7,17 @@ add_subdirectory(${CMAKE_CURRENT_LIST_DIR}/generator_cnProc_avx512 ldpc/generato
 add_custom_target(ldpc_generators)
 add_dependencies(ldpc_generators
                  bnProc_gen_avx2
+                 bnProc_gen_128
                  bnProc_gen_avx512
                  cnProc_gen_avx2
+                 cnProc_gen_128
                  cnProc_gen_avx512)
 
 add_library(ldpc_gen_HEADERS INTERFACE)
 target_link_libraries(ldpc_gen_HEADERS INTERFACE
                       bnProc_gen_avx2_HEADERS
+                      bnProc_gen_128_HEADERS
                       bnProc_gen_avx512_HEADERS
                       cnProc_gen_avx2_HEADERS
+                      cnProc_gen_128_HEADERS
                       cnProc_gen_avx512_HEADERS)
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/CMakeLists.txt b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/CMakeLists.txt
index baf2f19d97c85ded840674fbbed32e9a40a56ae9..96ad39f79c9190ee84265c44ffa12d0d774b1433 100644
--- a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/CMakeLists.txt
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/CMakeLists.txt
@@ -4,8 +4,16 @@ add_executable(bnProc_gen_avx2
                bnProcPc_gen_BG1_avx2.c
                bnProcPc_gen_BG2_avx2.c
                main.c)
-target_compile_options(bnProc_gen_avx2 PRIVATE -W -Wall -mavx2)
+add_executable(bnProc_gen_128
+               bnProc_gen_BG1_128.c
+               bnProc_gen_BG2_128.c
+               bnProcPc_gen_BG1_128.c
+               bnProcPc_gen_BG2_128.c
+               main128.c)
+	       
+target_compile_options(bnProc_gen_avx2 PRIVATE -W -Wall )
 
+target_compile_options(bnProc_gen_128 PRIVATE -W -Wall )
 #set(bnProc_headers
 #    bnProc/nrLDPC_bnProc_BG1_R13_AVX2.h
 #    bnProc/nrLDPC_bnProc_BG1_R23_AVX2.h
@@ -30,7 +38,18 @@ add_custom_command(TARGET bnProc_gen_avx2 POST_BUILD
   DEPENDS bnProc_gen_avx2
   COMMENT "Generating LDPC bnProc header files for AVX2"
 )
+add_custom_command(TARGET bnProc_gen_128 POST_BUILD
+  #OUTPUT ${bnProc_headers} ${bnProcPc_headers}
+  COMMAND ${CMAKE_COMMAND} -E make_directory bnProc128
+  COMMAND ${CMAKE_COMMAND} -E make_directory bnProcPc128
+  COMMAND bnProc_gen_128 .
+  DEPENDS bnProc_gen_128
+  COMMENT "Generating LDPC bnProc header files for 128-bit SIMD"
+)
 
 add_library(bnProc_gen_avx2_HEADERS INTERFACE)
 target_include_directories(bnProc_gen_avx2_HEADERS INTERFACE ${CMAKE_CURRENT_BINARY_DIR})
 add_dependencies(bnProc_gen_avx2_HEADERS bnProc_gen_avx2)
+add_library(bnProc_gen_128_HEADERS INTERFACE)
+target_include_directories(bnProc_gen_128_HEADERS INTERFACE ${CMAKE_CURRENT_BINARY_DIR})
+add_dependencies(bnProc_gen_128_HEADERS bnProc_gen_128)
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/bnProcPc_gen_BG1_128.c b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/bnProcPc_gen_BG1_128.c
new file mode 100644
index 0000000000000000000000000000000000000000..5dc665a8b9ab3f4b499602cd46ccb22b0a55dc53
--- /dev/null
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/bnProcPc_gen_BG1_128.c
@@ -0,0 +1,163 @@
+/*
+ * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The OpenAirInterface Software Alliance licenses this file to You under
+ * the OAI Public License, Version 1.1  (the "License"); you may not use this file
+ * except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.openairinterface.org/?page_id=698
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *-------------------------------------------------------------------------------
+ * For more information about the OpenAirInterface (OAI) Software Alliance:
+ *      contact@openairinterface.org
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <stdint.h>
+#include "../../nrLDPCdecoder_defs.h"
+#include "../../nrLDPC_types.h"
+
+
+void nrLDPC_bnProcPc_BG1_generator_128(const char *dir, int R)
+{
+  const char *ratestr[3]={"13","23","89"};
+
+  if (R<0 || R>2) {printf("Illegal R %d\n",R); abort();}
+
+
+ // system("mkdir -p ../ldpc_gen_files");
+
+  char fname[FILENAME_MAX+1];
+  snprintf(fname, sizeof(fname), "%s/bnProcPc128/nrLDPC_bnProcPc_BG1_R%s_128.h", dir, ratestr[R]);
+  FILE *fd=fopen(fname,"w");
+  if (fd == NULL) {
+    printf("Cannot create file %s\n", fname);
+    abort();
+  }
+
+  fprintf(fd,"#include <stdint.h>\n");
+  fprintf(fd,"#include \"PHY/sse_intrin.h\"\n");
+
+  fprintf(fd,"static inline void nrLDPC_bnProcPc_BG1_R%s_128(int8_t* bnProcBuf,int8_t* bnProcBufRes,int8_t* llrRes ,  int8_t* llrProcBuf, uint16_t Z ) {\n",ratestr[R]);
+    const uint8_t*  lut_numBnInBnGroups;
+    const uint32_t* lut_startAddrBnGroups;
+    const uint16_t* lut_startAddrBnGroupsLlr;
+    if (R==0) {
+
+
+      lut_numBnInBnGroups =  lut_numBnInBnGroups_BG1_R13;
+      lut_startAddrBnGroups = lut_startAddrBnGroups_BG1_R13;
+      lut_startAddrBnGroupsLlr = lut_startAddrBnGroupsLlr_BG1_R13;
+
+    }
+    else if (R==1){
+
+      lut_numBnInBnGroups =  lut_numBnInBnGroups_BG1_R23;
+      lut_startAddrBnGroups = lut_startAddrBnGroups_BG1_R23;
+      lut_startAddrBnGroupsLlr = lut_startAddrBnGroupsLlr_BG1_R23;
+    }
+    else if (R==2) {
+
+      lut_numBnInBnGroups = lut_numBnInBnGroups_BG1_R89;
+      lut_startAddrBnGroups = lut_startAddrBnGroups_BG1_R89;
+      lut_startAddrBnGroupsLlr = lut_startAddrBnGroupsLlr_BG1_R89;
+    }
+  else { printf("aborting, illegal R %d\n",R); fclose(fd);abort();}
+        // Number of BNs in Groups
+    uint32_t k;
+    // Offset to each bit within a group in terms of 32 Byte
+    uint32_t cnOffsetInGroup;
+    uint8_t idxBnGroup = 0;
+
+    fprintf(fd,"  // Process group with 1 CN\n");
+    fprintf(fd,"        uint32_t M = (%d*Z + 15)>>4;\n",lut_numBnInBnGroups[0]);
+
+    fprintf(fd,"        simde__m128i* p_bnProcBuf    = (simde__m128i*) &bnProcBuf    [%d];\n",lut_startAddrBnGroups   [idxBnGroup]);
+    fprintf(fd,"        simde__m128i* p_bnProcBufRes = (simde__m128i*) &bnProcBufRes [%d];\n",lut_startAddrBnGroups   [idxBnGroup]);
+    fprintf(fd,"        simde__m128i* p_llrProcBuf   = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+    fprintf(fd,"        simde__m128i* p_llrRes       = (simde__m128i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+    fprintf(fd,"        simde__m128i ymm0, ymm1, ymmRes0, ymmRes1;\n");
+
+
+    fprintf(fd,"        for (int i=0;i<M;i++) {\n");
+    fprintf(fd,"          p_bnProcBufRes[i] = p_llrProcBuf[i];\n");
+    fprintf(fd,"          ymm0 = simde_mm_cvtepi8_epi16(p_bnProcBuf [i]);\n");
+    fprintf(fd,"          ymm1 = simde_mm_cvtepi8_epi16(p_llrProcBuf[i]);\n");
+    fprintf(fd,"          ymmRes0 = simde_mm_adds_epi16(ymm0, ymm1);\n");
+    fprintf(fd,"          ymm0 = simde_mm_cvtepi8_epi16(simde_mm_srli_si128(p_bnProcBuf [i],8));\n");
+    fprintf(fd,"          ymm1 = simde_mm_cvtepi8_epi16(simde_mm_srli_si128(p_llrProcBuf[i],8));\n");
+    fprintf(fd,"          ymmRes1 = simde_mm_adds_epi16(ymm0, ymm1);\n");
+    fprintf(fd,"          *p_llrRes = simde_mm_packs_epi16(ymmRes0, ymmRes1);\n");
+    fprintf(fd,"          p_llrRes++;\n");
+    fprintf(fd,"        }\n");
+ 
+    
+    for (uint32_t cnidx=1;cnidx<30;cnidx++) {
+    // Process group with 4 CNs
+
+       if (lut_numBnInBnGroups[cnidx] > 0)
+       {
+        // If elements in group move to next address
+        idxBnGroup++;
+
+        fprintf(fd,"  M = (%d*Z + 15)>>4;\n",lut_numBnInBnGroups[cnidx]);
+
+        // Set the offset to each CN within a group in terms of 16 Byte
+        cnOffsetInGroup = (lut_numBnInBnGroups[cnidx]*NR_LDPC_ZMAX)>>4;
+
+        // Set pointers to start of group 2
+        fprintf(fd,"  p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%d];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"  p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"  p_llrRes        = (simde__m128i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+
+        // Loop over BNs
+        fprintf(fd,"        for (int i=0;i<M;i++) {\n");
+            // First 16 LLRs of first CN
+        fprintf(fd,"        ymmRes0 = simde_mm_cvtepi8_epi16(p_bnProcBuf [i]);\n");
+        fprintf(fd,"        ymmRes1 = simde_mm_cvtepi8_epi16(simde_mm_srli_si128(p_bnProcBuf [i],8));\n");
+
+            // Loop over CNs
+        for (k=1; k<=cnidx; k++)
+        {
+           fprintf(fd,"        ymm0 = simde_mm_cvtepi8_epi16(p_bnProcBuf[%d + i]);\n", k*cnOffsetInGroup);
+           fprintf(fd,"        ymmRes0 = simde_mm_adds_epi16(ymmRes0, ymm0);\n");
+
+           fprintf(fd,"        ymm1 = simde_mm_cvtepi8_epi16(simde_mm_srli_si128(p_bnProcBuf[%d + i],8));\n", k*cnOffsetInGroup);
+
+           fprintf(fd, "       ymmRes1 = simde_mm_adds_epi16(ymmRes1, ymm1); \n");
+        }
+
+            // Add LLR from receiver input
+        fprintf(fd,"        ymm0    = simde_mm_cvtepi8_epi16(p_llrProcBuf[i]);\n");
+        fprintf(fd,"        ymmRes0 = simde_mm_adds_epi16(ymmRes0, ymm0);\n");
+
+        fprintf(fd,"        ymm1    = simde_mm_cvtepi8_epi16(simde_mm_srli_si128(p_llrProcBuf[i],8));\n");
+        fprintf(fd,"        ymmRes1 = simde_mm_adds_epi16(ymmRes1, ymm1);\n");
+
+            // Pack results back to epi8
+        fprintf(fd,"        *p_llrRes = simde_mm_packs_epi16(ymmRes0, ymmRes1);\n");
+        fprintf(fd,"        p_llrRes++;\n");
+
+        fprintf(fd,"   }\n");
+       }
+
+    }
+
+    fprintf(fd,"}\n");
+    fclose(fd);
+}//end of the function  nrLDPC_bnProcPc_BG1
+
+
+
+
+
+
+
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/bnProcPc_gen_BG1_avx2.c b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/bnProcPc_gen_BG1_avx2.c
index fd24c4236e5325efab68dfb53ea59b6bdb3bbbc3..39f45745a1dcca1354b8117420ad402e26c1ca2d 100644
--- a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/bnProcPc_gen_BG1_avx2.c
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/bnProcPc_gen_BG1_avx2.c
@@ -70,7 +70,7 @@ void nrLDPC_bnProcPc_BG1_generator_AVX2(const char *dir, int R)
       lut_startAddrBnGroups = lut_startAddrBnGroups_BG1_R89;
       lut_startAddrBnGroupsLlr = lut_startAddrBnGroupsLlr_BG1_R89;
     }
-  else { printf("aborting, illegal R %d\n",R); fclose(fd);abort();}
+    else { printf("aborting, illegal R %d\n",R); fclose(fd);abort();}
         // Number of BNs in Groups
 //    uint32_t M;
     //uint32_t M32rem;
@@ -80,66 +80,15 @@ void nrLDPC_bnProcPc_BG1_generator_AVX2(const char *dir, int R)
     uint32_t cnOffsetInGroup;
     uint8_t idxBnGroup = 0;
 
-    fprintf(fd,"   __m256i ymm0, ymm1, ymmRes0, ymmRes1;  \n");
 
 
-    fprintf(fd,"        __m128i* p_bnProcBuf; \n");
-    fprintf(fd,"        __m128i* p_llrProcBuf;\n");
-    fprintf(fd,"        __m256i* p_llrRes; \n");
-  //  fprintf(fd,"        __m256i* p_bnProcBufRes; \n");
-//    fprintf(fd,"        __m256i* p_llrProcBuf256; \n");
+    fprintf(fd,"        simde__m256i* p_bnProcBuf; \n");
+    fprintf(fd,"        simde__m256i* p_llrProcBuf;\n");
+    fprintf(fd, "        simde__m256i* p_llrRes; \n");
+    //  fprintf(fd,"        simde__m256i* p_bnProcBufRes; \n");
+    //    fprintf(fd,"        simde__m256i* p_llrProcBuf256; \n");
     fprintf(fd,"         uint32_t M ;\n");
 
-
-    fprintf(fd,  "// Process group with 1 CNs \n");
-
-/*
- // Process group with 1 CNs
-
-   // if (lut_numBnInBnGroups[0] > 0)
-   // {
-        // If elements in group move to next address
-       // idxBnGroup++;
-
-        // Number of groups of 32 BNs for parallel processing
-        fprintf(fd," M = (%d*Z + 31)>>5;\n",lut_numBnInBnGroups[0] );
-
-        // Set the offset to each CN within a group in terms of 16 Byte
-       // cnOffsetInGroup = (lut_numBnInBnGroups[0]*NR_LDPC_ZMAX)>>4;
-
-        // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%d];\n",lut_startAddrBnGroups[idxBnGroup]);
-       // fprintf(fd,"    p_bnProcBufRes     = (__m256i*) &bnProcBufRes    [%d];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-       //  fprintf(fd,"   p_llrProcBuf256   = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-
-        // Loop over BNs
-        fprintf(fd,"            for (int i=0,j=0;i<M;i++,j+=2) {\n");
-         
-         fprintf(fd,"           p_bnProcBufRes[i] = p_llrProcBuf256[i];\n");
-       
-            // First 16 LLRs of first CN
-        fprintf(fd,"            ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"            ymm1 = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"           ymmRes0 = simde_mm256_adds_epi16(ymm0, ymm1);\n");
-
-        
-            // Second 16 LLRs of first CN
-        fprintf(fd,"            ymm0    = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j + 1 ]);\n");
-        fprintf(fd,"            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j + 1 ]);\n");
-        fprintf(fd,"            ymmRes1 = simde_mm256_adds_epi16(ymm0, ymm1);\n");
-
-            // Pack results back to epi8
-        fprintf(fd,"            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
-
-
-        fprintf(fd,"}\n");
-    //}
-*/  
     // =====================================================================
     // Process group with 2 CNs
 
@@ -160,39 +109,25 @@ fprintf(fd,  "// Process group with 2 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[1]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
 
         // Loop over BNs
-        fprintf(fd,"            for (int i=0,j=0;i<M;i++,j+=2) {\n");
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[j + 1]);\n");
+        fprintf(fd,"            p_llrRes[i] = p_bnProcBuf [i];\n");
 
             // Loop over CNs
         for (k=1; k<2; k++)
         {
-        fprintf(fd,"            ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"            ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_bnProcBuf[%u + i]);\n",k*cnOffsetInGroup);
 
-        fprintf(fd, "           ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_llrProcBuf[i]);\n");
 
-        fprintf(fd,"            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);\n");
-
-            // Pack results back to epi8
-        fprintf(fd,"            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
 
         fprintf(fd,"}\n");
@@ -218,39 +153,27 @@ fprintf(fd,  "// Process group with 3 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[2]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
-        fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd,"            p_llrRes[i] = p_bnProcBuf [i];\n");
 
             // Loop over CNs
         for (k=1; k<3; k++)
         {
-        fprintf(fd,"        ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"        ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
-
-        fprintf(fd,"        ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1); \n");
-            }
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_bnProcBuf[%u + i]);\n",k*cnOffsetInGroup);
+        }
 
             // Add LLR from receiver input
-        fprintf(fd,"        ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_llrProcBuf[i]);\n");
 
-        fprintf(fd,"        ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
+
     }
 
 
@@ -274,50 +197,35 @@ fprintf(fd,  "// Process group with 4 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[3]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-
+        fprintf(fd,"    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
-        fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd,"            p_llrRes[i] = p_bnProcBuf [i];\n");
 
             // Loop over CNs
         for (k=1; k<4; k++)
         {
-        fprintf(fd,"        ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"        ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
-
-        fprintf(fd, "       ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1); \n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_bnProcBuf[%u + i]);\n",k*cnOffsetInGroup);
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_llrProcBuf[i]);\n");
 
-        fprintf(fd,"        ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
 
 
-   // =====================================================================
+    // =====================================================================
     // Process group with 5 CNs
 
-fprintf(fd,  "// Process group with 5 CNs \n");
+    fprintf(fd,  "// Process group with 5 CNs \n");
 
- // Process group with 5 CNs
+    // Process group with 5 CNs
 
     if (lut_numBnInBnGroups[4] > 0)
     {
@@ -331,38 +239,24 @@ fprintf(fd,  "// Process group with 5 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[4]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
-        fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd,"            p_llrRes[i] = p_bnProcBuf [i];\n");
 
             // Loop over CNs
         for (k=1; k<5; k++)
         {
-        fprintf(fd,"        ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"        ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
-
-        fprintf(fd, "       ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1); \n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_bnProcBuf[%u + i]);\n",k*cnOffsetInGroup);
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_llrProcBuf[i]);\n");
 
-        fprintf(fd,"        ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -388,38 +282,24 @@ fprintf(fd,  "// Process group with 6 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[5]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
-        fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd,"            p_llrRes[i] = p_bnProcBuf [i];\n");
 
             // Loop over CNs
         for (k=1; k<6; k++)
         {
-        fprintf(fd,"        ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"        ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
-
-        fprintf(fd, "       ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1); \n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_bnProcBuf[%u + i]);\n",k*cnOffsetInGroup);
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_llrProcBuf[i]);\n");
 
-        fprintf(fd,"        ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -444,44 +324,25 @@ fprintf(fd,  "// Process group with 7 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[6]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
-        fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd,"            p_llrRes[i] = p_bnProcBuf [i];\n");
 
             // Loop over CNs
         for (k=1; k<7; k++)
         {
-        fprintf(fd,"        ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"        ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
-
-        fprintf(fd, "       ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1); \n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_bnProcBuf[%u + i]);\n",k*cnOffsetInGroup);
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"        ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);\n");
-
-            // Pack results back to epi8
-        fprintf(fd,"        ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        //fprintf(fd,"         (__m256i*) &llrRes[%d + i]    = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n",lut_startAddrBnGroupsLlr[idxBnGroup]>>5 );
-        fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_llrProcBuf[i]);\n");
 
         fprintf(fd,"}\n");
-    }
-
-
+   }
    // =====================================================================
     // Process group with 8 CNs
 
@@ -501,42 +362,27 @@ fprintf(fd,  "// Process group with 8 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[7]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
-        fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd,"            p_llrRes[i] = p_bnProcBuf [i];\n");
 
             // Loop over CNs
         for (k=1; k<8; k++)
         {
-        fprintf(fd,"        ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"        ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
-
-        fprintf(fd, "       ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1); \n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_bnProcBuf[%u + i]);\n",k*cnOffsetInGroup);
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_llrProcBuf[i]);\n");
 
-        fprintf(fd,"        ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        //fprintf(fd,"         (__m256i*) &llrRes[%d + i]    = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n",lut_startAddrBnGroupsLlr[idxBnGroup]>>5 );
-
-        fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
+
     }
 
    // =====================================================================
@@ -558,41 +404,25 @@ fprintf(fd,  "// Process group with 9 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[8]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
-        fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd,"            p_llrRes[i] = p_bnProcBuf [i];\n");
 
             // Loop over CNs
         for (k=1; k<9; k++)
         {
-        fprintf(fd,"        ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"        ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
-
-        fprintf(fd, "       ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1); \n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_bnProcBuf[%u + i]);\n",k*cnOffsetInGroup);
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"        ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);\n");
-
-            // Pack results back to epi8
-        fprintf(fd,"        ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        //fprintf(fd,"         (__m256i*) &llrRes[%d + i]    = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n",lut_startAddrBnGroupsLlr[idxBnGroup]>>5 );
-        fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_llrProcBuf[i]);\n");
 
         fprintf(fd,"}\n");
+
     }
 
 
@@ -615,40 +445,27 @@ fprintf(fd,  "// Process group with 10 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[9]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
-        fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd,"            p_llrRes[i] = p_bnProcBuf [i];\n");
 
             // Loop over CNs
         for (k=1; k<10; k++)
         {
-        fprintf(fd,"        ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"        ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
-
-        fprintf(fd, "       ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1); \n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_bnProcBuf[%u + i]);\n",k*cnOffsetInGroup);
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_llrProcBuf[i]);\n");
 
-        fprintf(fd,"        ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
+
     }
 
 
@@ -673,38 +490,24 @@ fprintf(fd,  "// Process group with 11 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[10]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
-        fprintf(fd,"            for (int i=0,j=0;i<M;i++,j+=2) {\n");
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd,"            p_llrRes[i] = p_bnProcBuf [i];\n");
 
             // Loop over CNs
         for (k=1; k<11; k++)
         {
-        fprintf(fd,"            ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"            ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
-
-        fprintf(fd, "           ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1); \n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_bnProcBuf[%u + i]);\n",k*cnOffsetInGroup);
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_llrProcBuf[i]);\n");
 
-        fprintf(fd,"            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -714,7 +517,7 @@ fprintf(fd,  "// Process group with 11 CNs \n");
 
 fprintf(fd,  "// Process group with 12 CNs \n");
 
- // Process group with 2 CNs
+ // Process group with 12 CNs
 
     if (lut_numBnInBnGroups[11] > 0)
     {
@@ -728,38 +531,24 @@ fprintf(fd,  "// Process group with 12 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[11]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
-        fprintf(fd,"            for (int i=0,j=0;i<M;i++,j+=2) {\n");
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd,"            p_llrRes[i] = p_bnProcBuf [i];\n");
 
             // Loop over CNs
         for (k=1; k<12; k++)
         {
-        fprintf(fd,"            ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"            ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
-
-        fprintf(fd, "           ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1); \n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_bnProcBuf[%u + i]);\n",k*cnOffsetInGroup);
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_llrProcBuf[i]);\n");
 
-        fprintf(fd,"            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -770,7 +559,7 @@ fprintf(fd,  "// Process group with 12 CNs \n");
 
 fprintf(fd,  "// Process group with 13 CNs \n");
 
- // Process group with 3 CNs
+ // Process group with 13 CNs
 
     if (lut_numBnInBnGroups[12] > 0)
     {
@@ -784,38 +573,24 @@ fprintf(fd,  "// Process group with 13 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[12]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
-        fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd,"            p_llrRes[i] = p_bnProcBuf [i];\n");
 
             // Loop over CNs
         for (k=1; k<13; k++)
         {
-        fprintf(fd,"        ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"        ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
-
-        fprintf(fd,"        ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1); \n");
-            }
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_bnProcBuf[%u + i]);\n",k*cnOffsetInGroup);
+        }
 
             // Add LLR from receiver input
-        fprintf(fd,"        ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_llrProcBuf[i]);\n");
 
-        fprintf(fd,"        ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -823,7 +598,7 @@ fprintf(fd,  "// Process group with 13 CNs \n");
 
 
     // =====================================================================
-    // Process group with 4 CNs
+    // Process group with 14 CNs
 
 fprintf(fd,  "// Process group with 14 CNs \n");
 
@@ -841,38 +616,24 @@ fprintf(fd,  "// Process group with 14 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[13]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
-        fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd,"            p_llrRes[i] = p_bnProcBuf [i];\n");
 
             // Loop over CNs
         for (k=1; k<14; k++)
         {
-        fprintf(fd,"        ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"        ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
-
-        fprintf(fd, "       ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1); \n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_bnProcBuf[%u + i]);\n",k*cnOffsetInGroup);
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_llrProcBuf[i]);\n");
 
-        fprintf(fd,"        ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -897,38 +658,24 @@ fprintf(fd,  "// Process group with 15 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[14]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
-        fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd,"            p_llrRes[i] = p_bnProcBuf [i];\n");
 
             // Loop over CNs
         for (k=1; k<15; k++)
         {
-        fprintf(fd,"        ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"        ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
-
-        fprintf(fd, "       ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1); \n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_bnProcBuf[%u + i]);\n",k*cnOffsetInGroup);
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_llrProcBuf[i]);\n");
 
-        fprintf(fd,"        ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-         fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -954,38 +701,24 @@ fprintf(fd,  "// Process group with 16 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[15]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
-        fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd,"            p_llrRes[i] = p_bnProcBuf [i];\n");
 
             // Loop over CNs
         for (k=1; k<16; k++)
         {
-        fprintf(fd,"        ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"        ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
-
-        fprintf(fd, "       ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1); \n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_bnProcBuf[%u + i]);\n",k*cnOffsetInGroup);
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_llrProcBuf[i]);\n");
 
-        fprintf(fd,"        ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1010,38 +743,24 @@ fprintf(fd,  "// Process group with 17 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[16]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
-        fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd,"            p_llrRes[i] = p_bnProcBuf [i];\n");
 
             // Loop over CNs
         for (k=1; k<17; k++)
         {
-        fprintf(fd,"        ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"        ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
-
-        fprintf(fd, "       ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1); \n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_bnProcBuf[%u + i]);\n",k*cnOffsetInGroup);
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_llrProcBuf[i]);\n");
 
-        fprintf(fd,"        ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1066,44 +785,29 @@ fprintf(fd,  "// Process group with 18 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[17]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        // Loop over BNs
-        fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
+        fprintf(fd,"    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd,"            p_llrRes[i] = p_bnProcBuf [i];\n");
 
             // Loop over CNs
         for (k=1; k<18; k++)
         {
-        fprintf(fd,"        ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"        ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
-
-        fprintf(fd, "       ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1); \n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_bnProcBuf[%u + i]);\n",k*cnOffsetInGroup);
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_llrProcBuf[i]);\n");
 
-        fprintf(fd,"        ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
 
    // =====================================================================
-    // Process group with 9 CNs
+    // Process group with 19 CNs
 
 fprintf(fd,  "// Process group with 19 CNs \n");
 
@@ -1121,38 +825,24 @@ fprintf(fd,  "// Process group with 19 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[18]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        // Loop over BNs
-        fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
+        fprintf(fd,"    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
             // First 16 LLRs of first CN
-        fprintf(fd,"        ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        // Loop over BNs
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
+        fprintf(fd,"            p_llrRes[i] = p_bnProcBuf [i];\n");
 
             // Loop over CNs
         for (k=1; k<19; k++)
         {
-        fprintf(fd,"        ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"        ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
-
-        fprintf(fd, "       ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1); \n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_bnProcBuf[%u + i]);\n",k*cnOffsetInGroup);
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_llrProcBuf[i]);\n");
 
-        fprintf(fd,"        ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1177,38 +867,24 @@ fprintf(fd,  "// Process group with 20 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[19]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        // Loop over BNs
-        fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
+        fprintf(fd,"    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
             // First 16 LLRs of first CN
-        fprintf(fd,"        ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        // Loop over BNs
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
+        fprintf(fd,"            p_llrRes[i] = p_bnProcBuf [i];\n");
 
             // Loop over CNs
         for (k=1; k<20; k++)
         {
-        fprintf(fd,"        ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"        ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
-
-        fprintf(fd, "       ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1); \n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_bnProcBuf[%u + i]);\n",k*cnOffsetInGroup);
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_llrProcBuf[i]);\n");
 
-        fprintf(fd,"        ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1237,38 +913,24 @@ fprintf(fd,  "// Process group with 21 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[20]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        // Loop over BNs
-        fprintf(fd,"            for (int i=0,j=0;i<M;i++,j+=2) {\n");
+        fprintf(fd,"    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
             // First 16 LLRs of first CN
-        fprintf(fd,"            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        // Loop over BNs
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
+        fprintf(fd,"            p_llrRes[i] = p_bnProcBuf [i];\n");
 
             // Loop over CNs
         for (k=1; k<21; k++)
         {
-        fprintf(fd,"            ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"            ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
-
-        fprintf(fd, "           ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1); \n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_bnProcBuf[%u + i]);\n",k*cnOffsetInGroup);
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_llrProcBuf[i]);\n");
 
-        fprintf(fd,"            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1292,44 +954,30 @@ fprintf(fd,  "// Process group with 22 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[21]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        // Loop over BNs
-        fprintf(fd,"            for (int i=0,j=0;i<M;i++,j+=2) {\n");
+        fprintf(fd,"    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
             // First 16 LLRs of first CN
-        fprintf(fd,"            ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"            ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        // Loop over BNs
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
+        fprintf(fd,"            p_llrRes[i] = p_bnProcBuf [i];\n");
 
             // Loop over CNs
         for (k=1; k<22; k++)
         {
-        fprintf(fd,"            ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"            ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
-
-        fprintf(fd, "           ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1); \n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_bnProcBuf[%u + i]);\n",k*cnOffsetInGroup);
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"            ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"            ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_llrProcBuf[i]);\n");
 
-        fprintf(fd,"            ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"            ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"            ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-         fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
 
     // =====================================================================
-    // Process group with 13 CNs
+    // Process group with 23 CNs
 
 
 fprintf(fd,  "// Process group with <23 CNs \n");
@@ -1348,38 +996,24 @@ fprintf(fd,  "// Process group with <23 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[22]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        // Loop over BNs
-        fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
+        fprintf(fd,"    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
             // First 16 LLRs of first CN
-        fprintf(fd,"        ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        // Loop over BNs
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
+        fprintf(fd,"            p_llrRes[i] = p_bnProcBuf [i];\n");
 
             // Loop over CNs
         for (k=1; k<23; k++)
         {
-        fprintf(fd,"        ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"        ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
-
-        fprintf(fd,"        ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1); \n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_bnProcBuf[%u + i]);\n",k*cnOffsetInGroup);
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_llrProcBuf[i]);\n");
 
-        fprintf(fd,"        ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1387,11 +1021,11 @@ fprintf(fd,  "// Process group with <23 CNs \n");
 
 
     // =====================================================================
-    // Process group with 4 CNs
+    // Process group with 24 CNs
 
 fprintf(fd,  "// Process group with 24 CNs \n");
 
- // Process group with 4 CNs
+ // Process group with 24 CNs
 
     if (lut_numBnInBnGroups[23] > 0)
     {
@@ -1405,38 +1039,24 @@ fprintf(fd,  "// Process group with 24 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[23]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        // Loop over BNs
-        fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
+        fprintf(fd,"    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
             // First 16 LLRs of first CN
-        fprintf(fd,"        ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        // Loop over BNs
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
+        fprintf(fd,"            p_llrRes[i] = p_bnProcBuf [i];\n");
 
             // Loop over CNs
         for (k=1; k<24; k++)
         {
-        fprintf(fd,"        ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"        ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
-
-        fprintf(fd, "       ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1); \n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_bnProcBuf[%u + i]);\n",k*cnOffsetInGroup);
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_llrProcBuf[i]);\n");
 
-        fprintf(fd,"        ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1461,38 +1081,24 @@ fprintf(fd,  "// Process group with 25 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[24]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        // Loop over BNs
-        fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
+        fprintf(fd,"    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
             // First 16 LLRs of first CN
-        fprintf(fd,"        ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        // Loop over BNs
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
+        fprintf(fd,"            p_llrRes[i] = p_bnProcBuf [i];\n");
 
             // Loop over CNs
         for (k=1; k<25; k++)
         {
-        fprintf(fd,"        ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"        ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
-
-        fprintf(fd, "       ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1); \n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_bnProcBuf[%u + i]);\n",k*cnOffsetInGroup);
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_llrProcBuf[i]);\n");
 
-        fprintf(fd,"        ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1518,49 +1124,35 @@ fprintf(fd,  "// Process group with 26 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[25]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        // Loop over BNs
-        fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
+        fprintf(fd,"    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
             // First 16 LLRs of first CN
-        fprintf(fd,"        ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        // Loop over BNs
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
+        fprintf(fd,"            p_llrRes[i] = p_bnProcBuf [i];\n");
 
             // Loop over CNs
         for (k=1; k<26; k++)
         {
-        fprintf(fd,"        ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"        ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
-
-        fprintf(fd, "       ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1); \n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_bnProcBuf[%u + i]);\n",k*cnOffsetInGroup);
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_llrProcBuf[i]);\n");
 
-        fprintf(fd,"        ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
 
 
    // =====================================================================
-    // Process group with 17 CNs
+    // Process group with 27 CNs
 
 fprintf(fd,  "// Process group with 27 CNs \n");
 
- // Process group with 17 CNs
+ // Process group with 27 CNs
 
     if (lut_numBnInBnGroups[26] > 0)
     {
@@ -1574,49 +1166,35 @@ fprintf(fd,  "// Process group with 27 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[26]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        // Loop over BNs
-        fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
+        fprintf(fd,"    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
             // First 16 LLRs of first CN
-        fprintf(fd,"        ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        // Loop over BNs
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
+        fprintf(fd,"            p_llrRes[i] = p_bnProcBuf [i];\n");
 
             // Loop over CNs
         for (k=1; k<27; k++)
         {
-        fprintf(fd,"        ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"        ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
-
-        fprintf(fd, "       ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1); \n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_bnProcBuf[%u + i]);\n",k*cnOffsetInGroup);
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_llrProcBuf[i]);\n");
 
-        fprintf(fd,"        ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
 
 
    // =====================================================================
-    // Process group with 18 CNs
+    // Process group with 28 CNs
 
 fprintf(fd,  "// Process group with 28 CNs \n");
 
- // Process group with 8 CNs
+ // Process group with 28 CNs
 
     if (lut_numBnInBnGroups[27] > 0)
     {
@@ -1630,48 +1208,34 @@ fprintf(fd,  "// Process group with 28 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[27]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        // Loop over BNs
-        fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
+        fprintf(fd,"    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
             // First 16 LLRs of first CN
-        fprintf(fd,"        ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        // Loop over BNs
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
+        fprintf(fd,"            p_llrRes[i] = p_bnProcBuf [i];\n");
 
             // Loop over CNs
         for (k=1; k<28; k++)
         {
-        fprintf(fd,"        ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"        ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
-
-        fprintf(fd, "       ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1); \n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_bnProcBuf[%u + i]);\n",k*cnOffsetInGroup);
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_llrProcBuf[i]);\n");
 
-        fprintf(fd,"        ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
 
    // =====================================================================
-    // Process group with 9 CNs
+    // Process group with 29 CNs
 
 fprintf(fd,  "// Process group with 29 CNs \n");
 
- // Process group with 9 CNs
+ // Process group with 29 CNs
 
     if (lut_numBnInBnGroups[28] > 0)
     {
@@ -1685,49 +1249,35 @@ fprintf(fd,  "// Process group with 29 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[28]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        // Loop over BNs
-        fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
+        fprintf(fd,"    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
             // First 16 LLRs of first CN
-        fprintf(fd,"        ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        // Loop over BNs
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
+        fprintf(fd,"            p_llrRes[i] = p_bnProcBuf [i];\n");
 
             // Loop over CNs
         for (k=1; k<29; k++)
         {
-        fprintf(fd,"        ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"        ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
-
-        fprintf(fd, "       ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1); \n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_bnProcBuf[%u + i]);\n",k*cnOffsetInGroup);
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_llrProcBuf[i]);\n");
 
-        fprintf(fd,"        ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
 
 
    // =====================================================================
-    // Process group with 20 CNs
+    // Process group with 30 CNs
 
 fprintf(fd,  "// Process group with 30 CNs \n");
 
- // Process group with 20 CNs
+ // Process group with 30 CNs
 
     if (lut_numBnInBnGroups[29] > 0)
     {
@@ -1741,44 +1291,30 @@ fprintf(fd,  "// Process group with 30 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[29]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        // Loop over BNs
-        fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
+        fprintf(fd,"    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
             // First 16 LLRs of first CN
-        fprintf(fd,"        ymmRes0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        // Loop over BNs
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
+        fprintf(fd,"            p_llrRes[i] = p_bnProcBuf [i];\n");
 
             // Loop over CNs
         for (k=1; k<30; k++)
         {
-        fprintf(fd,"        ymm0 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
-
-        fprintf(fd,"        ymm1 = simde_mm256_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
-
-        fprintf(fd, "       ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1); \n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_bnProcBuf[%u + i]);\n",k*cnOffsetInGroup);
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        ymm0    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        ymmRes0 = simde_mm256_adds_epi16(ymmRes0, ymm0);\n");
+        fprintf(fd,"            p_llrRes[i] = simde_mm256_adds_epi8(p_llrRes[i], p_llrProcBuf[i]);\n");
 
-        fprintf(fd,"        ymm1    = simde_mm256_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        ymmRes1 = simde_mm256_adds_epi16(ymmRes1, ymm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
-            // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
-            // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
 
     fprintf(fd,"}\n");
-  fclose(fd);
+    fclose(fd);
 }//end of the function  nrLDPC_bnProcPc_BG1
 
 
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/bnProcPc_gen_BG2_128.c b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/bnProcPc_gen_BG2_128.c
new file mode 100644
index 0000000000000000000000000000000000000000..6332b9d1ba2c3be5da9c7fb280715df096ae5b32
--- /dev/null
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/bnProcPc_gen_BG2_128.c
@@ -0,0 +1,166 @@
+/*
+ * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The OpenAirInterface Software Alliance licenses this file to You under
+ * the OAI Public License, Version 1.1  (the "License"); you may not use this file
+ * except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.openairinterface.org/?page_id=698
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *-------------------------------------------------------------------------------
+ * For more information about the OpenAirInterface (OAI) Software Alliance:
+ *      contact@openairinterface.org
+ */
+
+#include <stdio.h>
+#include <stdint.h>
+#include "PHY/sse_intrin.h"
+#include "../../nrLDPCdecoder_defs.h"
+#include "../../nrLDPC_types.h"
+
+
+void nrLDPC_bnProcPc_BG2_generator_128(const char *dir, int R)
+{
+  const char *ratestr[3]={"15","13","23"};
+
+  if (R<0 || R>2) {printf("Illegal R %d\n",R); abort();}
+
+
+ // system("mkdir -p ../ldpc_gen_files");
+
+  char fname[FILENAME_MAX+1];
+  snprintf(fname, sizeof(fname), "%s/bnProcPc128/nrLDPC_bnProcPc_BG2_R%s_128.h", dir, ratestr[R]);
+  FILE *fd=fopen(fname,"w");
+  if (fd == NULL) {
+    printf("Cannot create file %s\n", fname);
+    abort();
+  }
+
+  fprintf(fd,"#include <stdint.h>\n");
+  fprintf(fd,"#include \"PHY/sse_intrin.h\"\n");
+
+  fprintf(fd,"static inline void nrLDPC_bnProcPc_BG2_R%s_128(int8_t* bnProcBuf,int8_t* bnProcBufRes,int8_t* llrRes ,  int8_t* llrProcBuf, uint16_t Z  ) {\n",ratestr[R]);
+    const uint8_t*  lut_numBnInBnGroups;
+    const uint32_t* lut_startAddrBnGroups;
+    const uint16_t* lut_startAddrBnGroupsLlr;
+    if (R==0) {
+
+
+      lut_numBnInBnGroups =  lut_numBnInBnGroups_BG2_R15;
+      lut_startAddrBnGroups = lut_startAddrBnGroups_BG2_R15;
+      lut_startAddrBnGroupsLlr = lut_startAddrBnGroupsLlr_BG2_R15;
+
+    }
+    else if (R==1){
+
+      lut_numBnInBnGroups =  lut_numBnInBnGroups_BG2_R13;
+      lut_startAddrBnGroups = lut_startAddrBnGroups_BG2_R13;
+      lut_startAddrBnGroupsLlr = lut_startAddrBnGroupsLlr_BG2_R13;
+    }
+    else if (R==2) {
+
+      lut_numBnInBnGroups = lut_numBnInBnGroups_BG2_R23;
+      lut_startAddrBnGroups = lut_startAddrBnGroups_BG2_R23;
+      lut_startAddrBnGroupsLlr = lut_startAddrBnGroupsLlr_BG2_R23;
+    }
+  else { printf("aborting, illegal R %d\n",R); fclose(fd);abort();}
+
+        // Number of BNs in Groups
+    uint32_t k;
+    // Offset to each bit within a group in terms of 32 Byte
+    uint32_t cnOffsetInGroup;
+    uint8_t idxBnGroup = 0;
+
+    fprintf(fd,"  // Process group with 1 CN\n");
+    fprintf(fd,"        uint32_t M = (%d*Z + 15)>>4;\n",lut_numBnInBnGroups[0]);
+
+    fprintf(fd,"        simde__m128i* p_bnProcBuf    = (simde__m128i*) &bnProcBuf    [%d];\n",lut_startAddrBnGroups   [idxBnGroup]);
+    fprintf(fd,"        simde__m128i* p_bnProcBufRes = (simde__m128i*) &bnProcBufRes [%d];\n",lut_startAddrBnGroups   [idxBnGroup]);
+    fprintf(fd,"        simde__m128i* p_llrProcBuf   = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+    fprintf(fd,"        simde__m128i* p_llrRes       = (simde__m128i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+    fprintf(fd,"        simde__m128i ymm0, ymm1, ymmRes0, ymmRes1;\n");
+
+
+    fprintf(fd,"        for (int i=0;i<M;i++) {\n");
+    fprintf(fd,"          p_bnProcBufRes[i] = p_llrProcBuf[i];\n");
+    fprintf(fd,"          ymm0 = simde_mm_cvtepi8_epi16(p_bnProcBuf [i]);\n");
+    fprintf(fd,"          ymm1 = simde_mm_cvtepi8_epi16(p_llrProcBuf[i]);\n");
+    fprintf(fd,"          ymmRes0 = simde_mm_adds_epi16(ymm0, ymm1);\n");
+    fprintf(fd,"          ymm0 = simde_mm_cvtepi8_epi16(simde_mm_srli_si128(p_bnProcBuf [i],8));\n");
+    fprintf(fd,"          ymm1 = simde_mm_cvtepi8_epi16(simde_mm_srli_si128(p_llrProcBuf[i],8));\n");
+    fprintf(fd,"          ymmRes1 = simde_mm_adds_epi16(ymm0, ymm1);\n");
+    fprintf(fd,"          *p_llrRes = simde_mm_packs_epi16(ymmRes0, ymmRes1);\n");
+    fprintf(fd,"          p_llrRes++;\n");
+    fprintf(fd,"        }\n");
+ 
+    
+    for (uint32_t cnidx=1;cnidx<30;cnidx++) {
+    // Process group with 4 CNs
+
+       if (lut_numBnInBnGroups[cnidx] > 0)
+       {
+        // If elements in group move to next address
+        idxBnGroup++;
+
+        fprintf(fd,"  M = (%d*Z + 15)>>4;\n",lut_numBnInBnGroups[cnidx]);
+
+        // Set the offset to each CN within a group in terms of 16 Byte
+        cnOffsetInGroup = (lut_numBnInBnGroups[cnidx]*NR_LDPC_ZMAX)>>4;
+
+        // Set pointers to start of group 2
+        fprintf(fd,"  p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%d];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"  p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"  p_llrRes        = (simde__m128i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+
+        // Loop over BNs
+        fprintf(fd,"        for (int i=0;i<M;i++) {\n");
+            // First 16 LLRs of first CN
+        fprintf(fd,"        ymmRes0 = simde_mm_cvtepi8_epi16(p_bnProcBuf [i]);\n");
+        fprintf(fd,"        ymmRes1 = simde_mm_cvtepi8_epi16(simde_mm_srli_si128(p_bnProcBuf [i],8));\n");
+
+            // Loop over CNs
+        for (k=1; k<=cnidx; k++)
+        {
+           fprintf(fd,"        ymm0 = simde_mm_cvtepi8_epi16(p_bnProcBuf[%d + i]);\n", k*cnOffsetInGroup);
+           fprintf(fd,"        ymmRes0 = simde_mm_adds_epi16(ymmRes0, ymm0);\n");
+
+           fprintf(fd,"        ymm1 = simde_mm_cvtepi8_epi16(simde_mm_srli_si128(p_bnProcBuf[%d + i],8));\n", k*cnOffsetInGroup);
+
+           fprintf(fd, "       ymmRes1 = simde_mm_adds_epi16(ymmRes1, ymm1); \n");
+        }
+
+            // Add LLR from receiver input
+        fprintf(fd,"        ymm0    = simde_mm_cvtepi8_epi16(p_llrProcBuf[i]);\n");
+        fprintf(fd,"        ymmRes0 = simde_mm_adds_epi16(ymmRes0, ymm0);\n");
+
+        fprintf(fd,"        ymm1    = simde_mm_cvtepi8_epi16(simde_mm_srli_si128(p_llrProcBuf[i],8));\n");
+        fprintf(fd,"        ymmRes1 = simde_mm_adds_epi16(ymmRes1, ymm1);\n");
+
+            // Pack results back to epi8
+        fprintf(fd,"        *p_llrRes = simde_mm_packs_epi16(ymmRes0, ymmRes1);\n");
+        fprintf(fd,"        p_llrRes++;\n");
+
+        fprintf(fd,"   }\n");
+       }
+
+    }
+
+
+    fprintf(fd,"}\n");
+    fclose(fd);
+}//end of the function  nrLDPC_bnProcPc_BG2
+
+
+
+
+
+
+
+
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/bnProcPc_gen_BG2_avx2.c b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/bnProcPc_gen_BG2_avx2.c
index a7b724494443668f12bc458ee10b999d76011598..484c56710b3db5f1cf5dc7172a6411ced39e117a 100644
--- a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/bnProcPc_gen_BG2_avx2.c
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/bnProcPc_gen_BG2_avx2.c
@@ -80,14 +80,14 @@ void nrLDPC_bnProcPc_BG2_generator_AVX2(const char *dir, int R)
     uint32_t cnOffsetInGroup;
     uint8_t idxBnGroup = 0;
 
-    fprintf(fd,"   __m256i ymm0, ymm1, ymmRes0, ymmRes1;  \n");
+    fprintf(fd,"  simde__m256i ymm0, ymm1, ymmRes0, ymmRes1;  \n");
 
 
-    fprintf(fd,"        __m128i* p_bnProcBuf; \n");
-    fprintf(fd,"        __m128i* p_llrProcBuf;\n");
-    fprintf(fd,"        __m256i* p_llrRes; \n");
-   // fprintf(fd,"        __m256i* p_bnProcBufRes; \n");
-   // fprintf(fd,"        __m256i* p_llrProcBuf256; \n");
+    fprintf(fd,"        simde__m128i* p_bnProcBuf; \n");
+    fprintf(fd,"        simde__m128i* p_llrProcBuf;\n");
+    fprintf(fd,"        simde__m256i* p_llrRes; \n");
+   // fprintf(fd,"        simde__m256i* p_bnProcBufRes; \n");
+   // fprintf(fd,"        simde__m256i* p_llrProcBuf256; \n");
     fprintf(fd,"         uint32_t M ;\n");
 
 
@@ -108,11 +108,11 @@ void nrLDPC_bnProcPc_BG2_generator_AVX2(const char *dir, int R)
        // cnOffsetInGroup = (lut_numBnInBnGroups[0]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%d];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_bnProcBufRes     = (__m256i*) &bnProcBufRes    [%d];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-         fprintf(fd,"   p_llrProcBuf256   = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%d];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBufRes     = (simde__m256i*) &bnProcBufRes    [%d];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+         fprintf(fd,"   p_llrProcBuf256   = (simde__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
 
         // Loop over BNs
         fprintf(fd,"            for (int i=0,j=0;i<M;i++,j+=2) {\n");
@@ -159,9 +159,9 @@ fprintf(fd,  "// Process group with 2 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[1]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
 
         // Loop over BNs
         fprintf(fd,"            for (int i=0,j=0;i<M;i++,j+=2) {\n");
@@ -217,9 +217,9 @@ fprintf(fd,  "// Process group with 3 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[2]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
@@ -273,9 +273,9 @@ fprintf(fd,  "// Process group with 4 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[3]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
 
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
@@ -330,9 +330,9 @@ fprintf(fd,  "// Process group with 5 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[4]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
@@ -387,9 +387,9 @@ fprintf(fd,  "// Process group with 6 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[5]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
@@ -443,9 +443,9 @@ fprintf(fd,  "// Process group with 7 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[6]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
@@ -474,7 +474,7 @@ fprintf(fd,  "// Process group with 7 CNs \n");
         fprintf(fd,"        ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
             // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
             // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        //fprintf(fd,"         (__m256i*) &llrRes[%d + i]    = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n",lut_startAddrBnGroupsLlr[idxBnGroup]>>5 );
+        //fprintf(fd,"         (simde__m256i*) &llrRes[%d + i]    = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n",lut_startAddrBnGroupsLlr[idxBnGroup]>>5 );
         fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
@@ -500,9 +500,9 @@ fprintf(fd,  "// Process group with 8 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[7]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
@@ -531,7 +531,7 @@ fprintf(fd,  "// Process group with 8 CNs \n");
         fprintf(fd,"        ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
             // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
             // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        //fprintf(fd,"         (__m256i*) &llrRes[%d + i]    = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n",lut_startAddrBnGroupsLlr[idxBnGroup]>>5 );
+        //fprintf(fd,"         (simde__m256i*) &llrRes[%d + i]    = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n",lut_startAddrBnGroupsLlr[idxBnGroup]>>5 );
 
         fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
@@ -557,9 +557,9 @@ fprintf(fd,  "// Process group with 9 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[8]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
@@ -588,7 +588,7 @@ fprintf(fd,  "// Process group with 9 CNs \n");
         fprintf(fd,"        ymm0 = simde_mm256_packs_epi16(ymmRes0, ymmRes1);\n");
             // ymm0     = [ymmRes1[255:128] ymmRes0[255:128] ymmRes1[127:0] ymmRes0[127:0]]
             // p_llrRes = [ymmRes1[255:128] ymmRes1[127:0] ymmRes0[255:128] ymmRes0[127:0]]
-        //fprintf(fd,"         (__m256i*) &llrRes[%d + i]    = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n",lut_startAddrBnGroupsLlr[idxBnGroup]>>5 );
+        //fprintf(fd,"         (simde__m256i*) &llrRes[%d + i]    = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n",lut_startAddrBnGroupsLlr[idxBnGroup]>>5 );
         fprintf(fd,"            p_llrRes[i] = simde_mm256_permute4x64_epi64(ymm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
@@ -614,9 +614,9 @@ fprintf(fd,  "// Process group with 10 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[9]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
@@ -672,9 +672,9 @@ fprintf(fd,  "// Process group with 11 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[10]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"            for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
@@ -727,9 +727,9 @@ fprintf(fd,  "// Process group with 12 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[11]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"            for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
@@ -783,9 +783,9 @@ fprintf(fd,  "// Process group with 13 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[12]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
@@ -840,9 +840,9 @@ fprintf(fd,  "// Process group with 14 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[13]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
@@ -896,9 +896,9 @@ fprintf(fd,  "// Process group with 15 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[14]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%u];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
@@ -953,9 +953,9 @@ fprintf(fd,  "// Process group with 16 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[15]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
@@ -1009,9 +1009,9 @@ fprintf(fd,  "// Process group with 17 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[16]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
@@ -1065,9 +1065,9 @@ fprintf(fd,  "// Process group with 18 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[17]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
@@ -1120,9 +1120,9 @@ fprintf(fd,  "// Process group with 19 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[18]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
@@ -1176,9 +1176,9 @@ fprintf(fd,  "// Process group with 20 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[19]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
@@ -1236,9 +1236,9 @@ fprintf(fd,  "// Process group with 21 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[20]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"            for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
@@ -1291,9 +1291,9 @@ fprintf(fd,  "// Process group with 22 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[21]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"            for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
@@ -1347,9 +1347,9 @@ fprintf(fd,  "// Process group with <23 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[22]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
@@ -1404,9 +1404,9 @@ fprintf(fd,  "// Process group with 24 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[23]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
@@ -1460,9 +1460,9 @@ fprintf(fd,  "// Process group with 25 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[24]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
@@ -1517,9 +1517,9 @@ fprintf(fd,  "// Process group with 26 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[25]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
@@ -1573,9 +1573,9 @@ fprintf(fd,  "// Process group with 27 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[26]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
@@ -1629,9 +1629,9 @@ fprintf(fd,  "// Process group with 28 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[27]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
@@ -1684,9 +1684,9 @@ fprintf(fd,  "// Process group with 29 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[28]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
@@ -1740,9 +1740,9 @@ fprintf(fd,  "// Process group with 30 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[29]*NR_LDPC_ZMAX)>>4;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_bnProcBuf     = (simde__m128i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd,"    p_llrProcBuf    = (simde__m128i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd,"    p_llrRes        = (simde__m256i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/bnProc_gen_BG1_128.c b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/bnProc_gen_BG1_128.c
new file mode 100644
index 0000000000000000000000000000000000000000..b61a5e373b7c6adcd3e7849152d52e2a07c1a224
--- /dev/null
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/bnProc_gen_BG1_128.c
@@ -0,0 +1,1002 @@
+/*
+ * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The OpenAirInterface Software Alliance licenses this file to You under
+ * the OAI Public License, Version 1.1  (the "License"); you may not use this file
+ * except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.openairinterface.org/?page_id=698
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *-------------------------------------------------------------------------------
+ * For more information about the OpenAirInterface (OAI) Software Alliance:
+ *      contact@openairinterface.org
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <stdint.h>
+#include "PHY/sse_intrin.h"
+#include "../../nrLDPCdecoder_defs.h"
+#include "../../nrLDPC_types.h"
+
+void nrLDPC_bnProc_BG1_generator_128(const char* dir, int R)
+{
+  const char* ratestr[3] = {"13", "23", "89"};
+
+  if (R < 0 || R > 2) {
+    printf("Illegal R %d\n", R);
+    abort();
+  }
+
+  // system("mkdir -p ../ldpc_gen_files");
+
+  char fname[FILENAME_MAX + 1];
+  snprintf(fname, sizeof(fname), "%s/bnProc128/nrLDPC_bnProc_BG1_R%s_128.h", dir, ratestr[R]);
+  FILE* fd = fopen(fname, "w");
+  if (fd == NULL) {
+    printf("Cannot create file %s\n", fname);
+    abort();
+  }
+
+  // fprintf(fd,"#include <stdint.h>\n");
+  // fprintf(fd,"#include \"PHY/sse_intrin.h\"\n");
+
+  fprintf(fd, "static inline void nrLDPC_bnProc_BG1_R%s_128(int8_t* bnProcBuf,int8_t* bnProcBufRes,  int8_t* llrRes, uint16_t Z ) {\n", ratestr[R]);
+
+  const uint8_t* lut_numBnInBnGroups;
+  const uint32_t* lut_startAddrBnGroups;
+  const uint16_t* lut_startAddrBnGroupsLlr;
+  if (R == 0) {
+    lut_numBnInBnGroups = lut_numBnInBnGroups_BG1_R13;
+    lut_startAddrBnGroups = lut_startAddrBnGroups_BG1_R13;
+    lut_startAddrBnGroupsLlr = lut_startAddrBnGroupsLlr_BG1_R13;
+
+  } else if (R == 1) {
+    lut_numBnInBnGroups = lut_numBnInBnGroups_BG1_R23;
+    lut_startAddrBnGroups = lut_startAddrBnGroups_BG1_R23;
+    lut_startAddrBnGroupsLlr = lut_startAddrBnGroupsLlr_BG1_R23;
+  } else if (R == 2) {
+    lut_numBnInBnGroups = lut_numBnInBnGroups_BG1_R89;
+    lut_startAddrBnGroups = lut_startAddrBnGroups_BG1_R89;
+    lut_startAddrBnGroupsLlr = lut_startAddrBnGroupsLlr_BG1_R89;
+  } else {
+    printf("aborting, illegal R %d\n", R);
+    fclose(fd);
+    abort();
+  }
+
+  // uint32_t M;
+  // uint32_t M32rem;
+  // uint32_t i;
+  uint32_t k;
+  // Offset to each bit within a group in terms of 32 Byte
+  uint32_t cnOffsetInGroup;
+  uint8_t idxBnGroup = 0;
+  fprintf(fd, "        uint32_t M, i; \n");
+
+  // =====================================================================
+  // Process group with 1 CN
+  // Already done in bnProcBufPc
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 2 CNs \n");
+
+  if (lut_numBnInBnGroups[1] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs or parallel processing
+    fprintf(fd, " M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[1]);
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[1] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 2; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 3 CNs \n");
+
+  if (lut_numBnInBnGroups[2] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[2]);
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[2] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+    // fprintf(fd,"    ((simde__m128i*) bnProcBuf)     = ((simde__m128i*) &bnProcBuf)    [%d];\n",lut_startAddrBnGroups[idxBnGroup]);
+
+    for (k = 0; k < 3; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 4 CNs \n");
+
+  if (lut_numBnInBnGroups[3] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[3]);
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[3] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    for (k = 0; k < 4; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              ((lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup),
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              ((lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup));
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 5 CNs \n");
+
+  if (lut_numBnInBnGroups[4] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[4]);
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[4] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 5; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 6 CNs \n");
+
+  // Process group with 6 CNs
+
+  if (lut_numBnInBnGroups[5] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[5]);
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[5] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 6; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 7 CNs \n");
+
+  // Process group with 7 CNs
+
+  if (lut_numBnInBnGroups[6] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[6]);
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[6] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 7; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 8 CNs \n");
+
+  // Process group with 8 CNs
+
+  if (lut_numBnInBnGroups[7] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[7]);
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[7] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 8; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 9 CNs \n");
+
+  // Process group with 9 CNs
+
+  if (lut_numBnInBnGroups[8] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[8]);
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[8] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 9; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 10 CNs \n");
+
+  // Process group with 10 CNs
+
+  if (lut_numBnInBnGroups[9] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[9]);
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[9] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 10; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 11 CNs \n");
+
+  if (lut_numBnInBnGroups[10] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[10]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[10] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 11; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 12 CNs \n");
+
+  if (lut_numBnInBnGroups[11] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[11]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[11] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 12; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 13 CNs \n");
+
+  if (lut_numBnInBnGroups[12] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[12]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[12] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 13; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 14 CNs \n");
+
+  if (lut_numBnInBnGroups[13] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[13]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[13] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 14; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 15 CNs \n");
+
+  if (lut_numBnInBnGroups[14] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[14]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[14] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 15; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 16 CNs \n");
+
+  if (lut_numBnInBnGroups[15] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[15]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[15] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 16; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+  // Process group with 17 CNs
+
+  fprintf(fd, "// Process group with 17 CNs \n");
+
+  // Process group with 17 CNs
+
+  if (lut_numBnInBnGroups[16] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[16]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[16] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 17; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 18 CNs \n");
+
+  // Process group with 8 CNs
+
+  if (lut_numBnInBnGroups[17] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[17]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[17] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 18; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 19 CNs \n");
+
+  if (lut_numBnInBnGroups[18] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[18]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[18] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 19; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 20 CNs \n");
+
+  if (lut_numBnInBnGroups[19] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[19]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[19] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 20; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 21 CNs \n");
+
+  if (lut_numBnInBnGroups[20] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[20]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[20] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 21; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 22 CNs \n");
+
+  if (lut_numBnInBnGroups[21] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[21]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[21] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 22; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with <23 CNs \n");
+
+  if (lut_numBnInBnGroups[22] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[22]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[22] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 23; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 24 CNs \n");
+
+  // Process group with 4 CNs
+
+  if (lut_numBnInBnGroups[23] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[23]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[23] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 24; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 25 CNs \n");
+
+  if (lut_numBnInBnGroups[24] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[24]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[24] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 25; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 26 CNs \n");
+
+  if (lut_numBnInBnGroups[25] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[25]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[25] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 26; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 27 CNs \n");
+
+  // Process group with 17 CNs
+
+  if (lut_numBnInBnGroups[26] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[26]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[26] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 27; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 28 CNs \n");
+
+  // Process group with 8 CNs
+
+  if (lut_numBnInBnGroups[27] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[27]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[27] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 28; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 29 CNs \n");
+
+  // Process group with 9 CNs
+
+  if (lut_numBnInBnGroups[28] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[28]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[28] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 29; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 30 CNs \n");
+
+  // Process group with 20 CNs
+
+  if (lut_numBnInBnGroups[29] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[29]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[29] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 30; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  fprintf(fd, "}\n");
+  fclose(fd);
+} // end of the function  nrLDPC_bnProc_BG1
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/bnProc_gen_BG1_avx2.c b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/bnProc_gen_BG1_avx2.c
index bfc22c0524dd76c67462eba76dc0d389ab50cf54..0c48812974b5b2e74cbe8dc8afbd342909fbbf4f 100644
--- a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/bnProc_gen_BG1_avx2.c
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/bnProc_gen_BG1_avx2.c
@@ -106,7 +106,7 @@ void nrLDPC_bnProc_BG1_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -130,13 +130,13 @@ void nrLDPC_bnProc_BG1_generator_AVX2(const char* dir, int R)
     cnOffsetInGroup = (lut_numBnInBnGroups[2] * NR_LDPC_ZMAX) >> 5;
 
     // Set pointers to start of group 2
-    // fprintf(fd,"    ((__m256i*) bnProcBuf)     = ((__m256i*) &bnProcBuf)    [%d];\n",lut_startAddrBnGroups[idxBnGroup]);
+    // fprintf(fd,"    ((simde__m256i*) bnProcBuf)     = ((simde__m256i*) &bnProcBuf)    [%d];\n",lut_startAddrBnGroups[idxBnGroup]);
 
     for (k = 0; k < 3; k++) {
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -165,7 +165,7 @@ void nrLDPC_bnProc_BG1_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               ((lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup),
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               ((lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup));
@@ -195,7 +195,7 @@ void nrLDPC_bnProc_BG1_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -227,7 +227,7 @@ void nrLDPC_bnProc_BG1_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -259,7 +259,7 @@ void nrLDPC_bnProc_BG1_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -291,7 +291,7 @@ void nrLDPC_bnProc_BG1_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -323,7 +323,7 @@ void nrLDPC_bnProc_BG1_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -355,7 +355,7 @@ void nrLDPC_bnProc_BG1_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -386,7 +386,7 @@ void nrLDPC_bnProc_BG1_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -416,7 +416,7 @@ void nrLDPC_bnProc_BG1_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -447,7 +447,7 @@ void nrLDPC_bnProc_BG1_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -478,7 +478,7 @@ void nrLDPC_bnProc_BG1_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -509,7 +509,7 @@ void nrLDPC_bnProc_BG1_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -540,7 +540,7 @@ void nrLDPC_bnProc_BG1_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -574,7 +574,7 @@ void nrLDPC_bnProc_BG1_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -607,7 +607,7 @@ void nrLDPC_bnProc_BG1_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -638,7 +638,7 @@ void nrLDPC_bnProc_BG1_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -669,7 +669,7 @@ void nrLDPC_bnProc_BG1_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -700,7 +700,7 @@ void nrLDPC_bnProc_BG1_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -730,7 +730,7 @@ void nrLDPC_bnProc_BG1_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -761,7 +761,7 @@ void nrLDPC_bnProc_BG1_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -794,7 +794,7 @@ void nrLDPC_bnProc_BG1_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -825,7 +825,7 @@ void nrLDPC_bnProc_BG1_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -856,7 +856,7 @@ void nrLDPC_bnProc_BG1_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -889,7 +889,7 @@ void nrLDPC_bnProc_BG1_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -922,7 +922,7 @@ void nrLDPC_bnProc_BG1_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -955,7 +955,7 @@ void nrLDPC_bnProc_BG1_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -988,7 +988,7 @@ void nrLDPC_bnProc_BG1_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/bnProc_gen_BG2_128.c b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/bnProc_gen_BG2_128.c
new file mode 100644
index 0000000000000000000000000000000000000000..c6dede94f655a56113f1b43ebb681860a46177ba
--- /dev/null
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/bnProc_gen_BG2_128.c
@@ -0,0 +1,998 @@
+/*
+ * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The OpenAirInterface Software Alliance licenses this file to You under
+ * the OAI Public License, Version 1.1  (the "License"); you may not use this file
+ * except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.openairinterface.org/?page_id=698
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *-------------------------------------------------------------------------------
+ * For more information about the OpenAirInterface (OAI) Software Alliance:
+ *      contact@openairinterface.org
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <stdint.h>
+#include "../../nrLDPCdecoder_defs.h"
+#include "../../nrLDPC_types.h"
+
+void nrLDPC_bnProc_BG2_generator_128(const char* dir, int R)
+{
+  const char* ratestr[3] = {"15", "13", "23"};
+
+  if (R < 0 || R > 2) {
+    printf("Illegal R %d\n", R);
+    abort();
+  }
+
+  // system("mkdir -p ../ldpc_gen_files");
+
+  char fname[FILENAME_MAX + 1];
+  snprintf(fname, sizeof(fname), "%s/bnProc128/nrLDPC_bnProc_BG2_R%s_128.h", dir, ratestr[R]);
+  FILE* fd = fopen(fname, "w");
+  if (fd == NULL) {
+    printf("Cannot create file %s\n", fname);
+    abort();
+  }
+
+  fprintf(fd, "static inline void nrLDPC_bnProc_BG2_R%s_128(int8_t* bnProcBuf,int8_t* bnProcBufRes,  int8_t* llrRes, uint16_t Z  ) {\n", ratestr[R]);
+  const uint8_t* lut_numBnInBnGroups;
+  const uint32_t* lut_startAddrBnGroups;
+  const uint16_t* lut_startAddrBnGroupsLlr;
+  if (R == 0) {
+    lut_numBnInBnGroups = lut_numBnInBnGroups_BG2_R15;
+    lut_startAddrBnGroups = lut_startAddrBnGroups_BG2_R15;
+    lut_startAddrBnGroupsLlr = lut_startAddrBnGroupsLlr_BG2_R15;
+
+  } else if (R == 1) {
+    lut_numBnInBnGroups = lut_numBnInBnGroups_BG2_R13;
+    lut_startAddrBnGroups = lut_startAddrBnGroups_BG2_R13;
+    lut_startAddrBnGroupsLlr = lut_startAddrBnGroupsLlr_BG2_R13;
+  } else if (R == 2) {
+    lut_numBnInBnGroups = lut_numBnInBnGroups_BG2_R23;
+    lut_startAddrBnGroups = lut_startAddrBnGroups_BG2_R23;
+    lut_startAddrBnGroupsLlr = lut_startAddrBnGroupsLlr_BG2_R23;
+  } else {
+    printf("aborting, illegal R %d\n", R);
+    fclose(fd);
+    abort();
+  }
+
+  // uint32_t M;
+  // uint32_t M32rem;
+  // uint32_t i;
+  uint32_t k;
+  // Offset to each bit within a group in terms of 32 Byte
+  uint32_t cnOffsetInGroup;
+  uint8_t idxBnGroup = 0;
+
+  fprintf(fd, "        uint32_t M, i; \n");
+
+  // =====================================================================
+  // Process group with 1 CN
+  // Already done in bnProcBufPc
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 2 CNs \n");
+
+  if (lut_numBnInBnGroups[1] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs or parallel processing
+    fprintf(fd, " M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[1]);
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[1] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 2; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 3 CNs \n");
+
+  if (lut_numBnInBnGroups[2] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[2]);
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[2] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+    // fprintf(fd,"    ((simde__m128i*) bnProcBuf)     = ((simde__m128i*) &bnProcBuf)    [%d];\n",lut_startAddrBnGroups[idxBnGroup]);
+
+    for (k = 0; k < 3; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 4 CNs \n");
+
+  if (lut_numBnInBnGroups[3] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[3]);
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[3] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    for (k = 0; k < 4; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              ((lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup),
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              ((lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup));
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 5 CNs \n");
+
+  if (lut_numBnInBnGroups[4] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[4]);
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[4] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 5; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 6 CNs \n");
+
+  // Process group with 6 CNs
+
+  if (lut_numBnInBnGroups[5] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[5]);
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[5] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 6; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 7 CNs \n");
+
+  // Process group with 7 CNs
+
+  if (lut_numBnInBnGroups[6] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[6]);
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[6] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 7; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 8 CNs \n");
+
+  // Process group with 8 CNs
+
+  if (lut_numBnInBnGroups[7] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[7]);
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[7] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 8; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 9 CNs \n");
+
+  // Process group with 9 CNs
+
+  if (lut_numBnInBnGroups[8] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[8]);
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[8] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 9; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 10 CNs \n");
+
+  // Process group with 10 CNs
+
+  if (lut_numBnInBnGroups[9] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[9]);
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[9] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 10; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 11 CNs \n");
+
+  if (lut_numBnInBnGroups[10] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[10]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[10] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 11; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 12 CNs \n");
+
+  if (lut_numBnInBnGroups[11] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[11]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[11] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 12; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 13 CNs \n");
+
+  if (lut_numBnInBnGroups[12] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[12]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[12] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 13; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 14 CNs \n");
+
+  if (lut_numBnInBnGroups[13] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[13]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[13] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 14; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 15 CNs \n");
+
+  if (lut_numBnInBnGroups[14] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[14]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[14] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 15; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 16 CNs \n");
+
+  if (lut_numBnInBnGroups[15] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[15]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[15] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 16; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+  // Process group with 17 CNs
+
+  fprintf(fd, "// Process group with 17 CNs \n");
+
+  // Process group with 17 CNs
+
+  if (lut_numBnInBnGroups[16] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[16]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[16] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 17; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 18 CNs \n");
+
+  // Process group with 8 CNs
+
+  if (lut_numBnInBnGroups[17] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[17]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[17] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 18; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 19 CNs \n");
+
+  if (lut_numBnInBnGroups[18] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[18]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[18] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 19; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 20 CNs \n");
+
+  if (lut_numBnInBnGroups[19] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[19]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[19] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 20; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 21 CNs \n");
+
+  if (lut_numBnInBnGroups[20] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[20]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[20] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 21; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 22 CNs \n");
+
+  if (lut_numBnInBnGroups[21] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[21]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[21] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 22; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with <23 CNs \n");
+
+  if (lut_numBnInBnGroups[22] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[22]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[22] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 23; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 24 CNs \n");
+
+  // Process group with 4 CNs
+
+  if (lut_numBnInBnGroups[23] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[23]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[23] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 24; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 25 CNs \n");
+
+  if (lut_numBnInBnGroups[24] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[24]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[24] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 25; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 26 CNs \n");
+
+  if (lut_numBnInBnGroups[25] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[25]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[25] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 26; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 27 CNs \n");
+
+  // Process group with 17 CNs
+
+  if (lut_numBnInBnGroups[26] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[26]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[26] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 27; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 28 CNs \n");
+
+  // Process group with 8 CNs
+
+  if (lut_numBnInBnGroups[27] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[27]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[27] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 28; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 29 CNs \n");
+
+  // Process group with 9 CNs
+
+  if (lut_numBnInBnGroups[28] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[28]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[28] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 29; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  // =====================================================================
+
+  fprintf(fd, "// Process group with 30 CNs \n");
+
+  // Process group with 20 CNs
+
+  if (lut_numBnInBnGroups[29] > 0) {
+    // If elements in group move to next address
+    idxBnGroup++;
+
+    // Number of groups of 32 BNs for parallel processing
+    fprintf(fd, "       M = (%d*Z + 15)>>4;\n", lut_numBnInBnGroups[29]);
+    ;
+
+    // Set the offset to each CN within a group in terms of 16 Byte
+    cnOffsetInGroup = (lut_numBnInBnGroups[29] * NR_LDPC_ZMAX) >> 4;
+
+    // Set pointers to start of group 2
+
+    // Loop over CNs
+    for (k = 0; k < 30; k++) {
+      // Loop over BNs
+      fprintf(fd, "            for (i=0;i<M;i++) {\n");
+      fprintf(fd,
+              "            ((simde__m128i*)bnProcBufRes)[%d + i ] = simde_mm_subs_epi8(((simde__m128i*)llrRes)[%d + i ], ((simde__m128i*) bnProcBuf)[%d + i]);\n",
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup,
+              (lut_startAddrBnGroupsLlr[idxBnGroup] >> 4),
+              (lut_startAddrBnGroups[idxBnGroup] >> 4) + k * cnOffsetInGroup);
+
+      fprintf(fd, "}\n");
+    }
+  }
+
+  fprintf(fd, "}\n");
+  fclose(fd);
+} // end of the function  nrLDPC_bnProc_BG2
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/bnProc_gen_BG2_avx2.c b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/bnProc_gen_BG2_avx2.c
index 278b8883192de127d880e500d3ce644c6218f6bc..39c1d35c3b45e5e2924a07f7dc5ace598191ca49 100644
--- a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/bnProc_gen_BG2_avx2.c
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/bnProc_gen_BG2_avx2.c
@@ -102,7 +102,7 @@ void nrLDPC_bnProc_BG2_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -126,13 +126,13 @@ void nrLDPC_bnProc_BG2_generator_AVX2(const char* dir, int R)
     cnOffsetInGroup = (lut_numBnInBnGroups[2] * NR_LDPC_ZMAX) >> 5;
 
     // Set pointers to start of group 2
-    // fprintf(fd,"    ((__m256i*) bnProcBuf)     = ((__m256i*) &bnProcBuf)    [%d];\n",lut_startAddrBnGroups[idxBnGroup]);
+    // fprintf(fd,"    ((simde__m256i*) bnProcBuf)     = ((simde__m256i*) &bnProcBuf)    [%d];\n",lut_startAddrBnGroups[idxBnGroup]);
 
     for (k = 0; k < 3; k++) {
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -161,7 +161,7 @@ void nrLDPC_bnProc_BG2_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               ((lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup),
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               ((lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup));
@@ -191,7 +191,7 @@ void nrLDPC_bnProc_BG2_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -223,7 +223,7 @@ void nrLDPC_bnProc_BG2_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -255,7 +255,7 @@ void nrLDPC_bnProc_BG2_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -287,7 +287,7 @@ void nrLDPC_bnProc_BG2_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -319,7 +319,7 @@ void nrLDPC_bnProc_BG2_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -351,7 +351,7 @@ void nrLDPC_bnProc_BG2_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -382,7 +382,7 @@ void nrLDPC_bnProc_BG2_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -412,7 +412,7 @@ void nrLDPC_bnProc_BG2_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -443,7 +443,7 @@ void nrLDPC_bnProc_BG2_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -474,7 +474,7 @@ void nrLDPC_bnProc_BG2_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -505,7 +505,7 @@ void nrLDPC_bnProc_BG2_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -536,7 +536,7 @@ void nrLDPC_bnProc_BG2_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -570,7 +570,7 @@ void nrLDPC_bnProc_BG2_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -603,7 +603,7 @@ void nrLDPC_bnProc_BG2_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -634,7 +634,7 @@ void nrLDPC_bnProc_BG2_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -665,7 +665,7 @@ void nrLDPC_bnProc_BG2_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -696,7 +696,7 @@ void nrLDPC_bnProc_BG2_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -726,7 +726,7 @@ void nrLDPC_bnProc_BG2_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -757,7 +757,7 @@ void nrLDPC_bnProc_BG2_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -790,7 +790,7 @@ void nrLDPC_bnProc_BG2_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -821,7 +821,7 @@ void nrLDPC_bnProc_BG2_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -852,7 +852,7 @@ void nrLDPC_bnProc_BG2_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -885,7 +885,7 @@ void nrLDPC_bnProc_BG2_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -918,7 +918,7 @@ void nrLDPC_bnProc_BG2_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -951,7 +951,7 @@ void nrLDPC_bnProc_BG2_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
@@ -984,7 +984,7 @@ void nrLDPC_bnProc_BG2_generator_AVX2(const char* dir, int R)
       // Loop over BNs
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       fprintf(fd,
-              "            ((__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((__m256i*)llrRes)[%d + i ], ((__m256i*) bnProcBuf)[%d + i]);\n",
+              "            ((simde__m256i*)bnProcBufRes)[%d + i ] = simde_mm256_subs_epi8(((simde__m256i*)llrRes)[%d + i ], ((simde__m256i*) bnProcBuf)[%d + i]);\n",
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup,
               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 5),
               (lut_startAddrBnGroups[idxBnGroup] >> 5) + k * cnOffsetInGroup);
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/main128.c b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/main128.c
new file mode 100644
index 0000000000000000000000000000000000000000..95541220a43184f1ac91ca91c795bf33a039ce05
--- /dev/null
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc/main128.c
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The OpenAirInterface Software Alliance licenses this file to You under
+ * the OAI Public License, Version 1.1  (the "License"); you may not use this file
+ * except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.openairinterface.org/?page_id=698
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *-------------------------------------------------------------------------------
+ * For more information about the OpenAirInterface (OAI) Software Alliance:
+ *      contact@openairinterface.org
+ */
+
+#include <stdio.h>
+#include <stdint.h>
+#define NB_R  3
+void nrLDPC_bnProc_BG1_generator_128(const char*, int);
+void nrLDPC_bnProc_BG2_generator_128(const char*, int);
+void nrLDPC_bnProcPc_BG1_generator_128(const char*, int);
+void nrLDPC_bnProcPc_BG2_generator_128(const char*, int);
+
+const char *__asan_default_options()
+{
+  /* don't do leak checking in nr_ulsim, creates problems in the CI */
+  return "detect_leaks=0";
+}
+
+int main(int argc, char *argv[])
+{
+  if (argc != 2) {
+    fprintf(stderr, "usage: %s <output-dir>\n", argv[0]);
+    return 1;
+  }
+  const char *dir = argv[1];
+
+  int R[NB_R]={0,1,2};
+  for(int i=0; i<NB_R;i++){
+    nrLDPC_bnProc_BG1_generator_128(dir, R[i]);
+    nrLDPC_bnProc_BG2_generator_128(dir, R[i]);
+
+    nrLDPC_bnProcPc_BG1_generator_128(dir, R[i]);
+    nrLDPC_bnProcPc_BG2_generator_128(dir, R[i]);
+  }
+
+  return(0);
+}
+
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc_avx512/CMakeLists.txt b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc_avx512/CMakeLists.txt
index f0b3d49b1e6e28bb2f8d824ff88a3417db0777b6..f8da0c9c2484624635333eef5c3c130d641777bf 100644
--- a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc_avx512/CMakeLists.txt
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc_avx512/CMakeLists.txt
@@ -4,7 +4,7 @@ add_executable(bnProc_gen_avx512
                bnProcPc_gen_BG1_avx512.c
                bnProcPc_gen_BG2_avx512.c
                main.c)
-target_compile_options(bnProc_gen_avx512 PRIVATE -W -Wall -mavx2)
+target_compile_options(bnProc_gen_avx512 PRIVATE -W -Wall )
 
 #set(bnProc_avx512_headers
 #    bnProc_avx512/rLDPC_bnProc_BG1_R13_AVX512.h
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc_avx512/bnProcPc_gen_BG1_avx512.c b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc_avx512/bnProcPc_gen_BG1_avx512.c
index e4a7fb67ebaffd0e8e6326642d585b691cd779b5..e45a1d77c17e80dca120e072f46cba63b5d9e568 100644
--- a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc_avx512/bnProcPc_gen_BG1_avx512.c
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc_avx512/bnProcPc_gen_BG1_avx512.c
@@ -80,12 +80,11 @@ void nrLDPC_bnProcPc_BG1_generator_AVX512(const char *dir, int R)
     uint32_t cnOffsetInGroup;
     uint8_t idxBnGroup = 0;
 
-    fprintf(fd,"   __m512i zmm0, zmm1, zmmRes0, zmmRes1;  \n");
+    fprintf(fd, "   simde__m512i zmm0, zmm1, zmmRes0, zmmRes1;  \n");
 
-
-    fprintf(fd,"        __m256i* p_bnProcBuf; \n");
-    fprintf(fd,"        __m256i* p_llrProcBuf;\n");
-    fprintf(fd,"        __m512i* p_llrRes; \n");
+    fprintf(fd, "        simde__m256i* p_bnProcBuf; \n");
+    fprintf(fd, "        simde__m256i* p_llrProcBuf;\n");
+    fprintf(fd, "        simde__m512i* p_llrRes; \n");
     fprintf(fd,"         uint32_t M ;\n");
 
 
@@ -106,40 +105,39 @@ fprintf(fd,  "// Process group with 1 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[0]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
 
         // Loop over BNs
         fprintf(fd,"            for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"            zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"            zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[j + 1]);\n");
+        fprintf(fd, "            zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "            zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[j + 1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<1; k++)
         {
-        fprintf(fd,"            zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"            zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "            zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "            zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"            zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "            zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%d + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "           zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "           zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"            zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"            zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
-
-        fprintf(fd,"            zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"            zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
+        fprintf(fd, "            zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "            zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"            zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
-            // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        fprintf(fd, "            zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "            zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
+        // Pack results back to epi8
+        fprintf(fd, "            zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
+        // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -163,40 +161,39 @@ fprintf(fd,  "// Process group with 2 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[1]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
 
         // Loop over BNs
         fprintf(fd,"            for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"            zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"            zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf[j + 1]);\n");
+        fprintf(fd, "            zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "            zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[j + 1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<2; k++)
         {
-        fprintf(fd,"            zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"            zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "            zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "            zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"            zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "            zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%d + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "           zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "           zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"            zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"            zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+        fprintf(fd, "            zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "            zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"            zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"            zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
-
-            // Pack results back to epi8
-        fprintf(fd,"            zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
-            // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        fprintf(fd, "            zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "            zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
+        // Pack results back to epi8
+        fprintf(fd, "            zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
+        // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -221,39 +218,39 @@ fprintf(fd,  "// Process group with 3 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[2]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<3; k++)
         {
-        fprintf(fd,"        zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "        zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "        zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd,"        zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "        zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
             }
 
             // Add LLR from receiver input
-        fprintf(fd,"        zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+            fprintf(fd, "        zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+            fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
+            fprintf(fd, "        zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+            fprintf(fd, "        zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
             // Pack results back to epi8
-        fprintf(fd,"        zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+            fprintf(fd, "        zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
             // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+            fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
-        fprintf(fd,"}\n");
+            fprintf(fd, "}\n");
     }
 
 
@@ -277,39 +274,39 @@ fprintf(fd,  "// Process group with 4 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[3]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
 
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<4; k++)
         {
-        fprintf(fd,"        zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "        zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "        zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%d + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "       zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+        fprintf(fd, "        zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
+        fprintf(fd, "        zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
-            // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "        zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
+        // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -334,38 +331,38 @@ fprintf(fd,  "// Process group with 5 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[4]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<5; k++)
         {
-        fprintf(fd,"        zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "        zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "        zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%d + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "       zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+        fprintf(fd, "        zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
+        fprintf(fd, "        zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
-            // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "        zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
+        // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -391,38 +388,38 @@ fprintf(fd,  "// Process group with 6 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[5]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<6; k++)
         {
-        fprintf(fd,"        zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "        zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "        zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "       zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+        fprintf(fd, "        zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
+        fprintf(fd, "        zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
-            // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "        zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
+        // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -447,39 +444,40 @@ fprintf(fd,  "// Process group with 7 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[6]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<7; k++)
         {
-        fprintf(fd,"        zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "        zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "        zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%d + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "       zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+        fprintf(fd, "        zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
+        fprintf(fd, "        zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
-            // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
-        //fprintf(fd,"         (__m512i*) &llrRes[%d + i]    = _mm512_permutex_epi64(zmm0, 0xD8);\n",lut_startAddrBnGroupsLlr[idxBnGroup]>>6 );
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "        zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
+        // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
+        // fprintf(fd,"         (simde__m512i*) &llrRes[%d + i]    = simde_mm512_permutex_epi64(zmm0,
+        // 0xD8);\n",lut_startAddrBnGroupsLlr[idxBnGroup]>>6 );
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -504,40 +502,41 @@ fprintf(fd,  "// Process group with 8 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[7]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<8; k++)
         {
-        fprintf(fd,"        zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "        zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "        zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "       zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+        fprintf(fd, "        zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
+        fprintf(fd, "        zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
-            // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
-        //fprintf(fd,"         (__m512i*) &llrRes[%d + i]    = _mm512_permutex_epi64(zmm0, 0xD8);\n",lut_startAddrBnGroupsLlr[idxBnGroup]>>6 );
+        // Pack results back to epi8
+        fprintf(fd, "        zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
+        // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
+        // fprintf(fd,"         (simde__m512i*) &llrRes[%d + i]    = simde_mm512_permutex_epi64(zmm0,
+        // 0xD8);\n",lut_startAddrBnGroupsLlr[idxBnGroup]>>6 );
 
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -561,39 +560,40 @@ fprintf(fd,  "// Process group with 9 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[8]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<9; k++)
         {
-        fprintf(fd,"        zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "        zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "        zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "       zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+        fprintf(fd, "        zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
+        fprintf(fd, "        zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
-            // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
-        //fprintf(fd,"         (__m512i*) &llrRes[%d + i]    = _mm512_permutex_epi64(zmm0, 0xD8);\n",lut_startAddrBnGroupsLlr[idxBnGroup]>>6 );
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "        zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
+        // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
+        // fprintf(fd,"         (simde__m512i*) &llrRes[%d + i]    = simde_mm512_permutex_epi64(zmm0,
+        // 0xD8);\n",lut_startAddrBnGroupsLlr[idxBnGroup]>>6 );
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -618,38 +618,38 @@ fprintf(fd,  "// Process group with 10 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[9]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<10; k++)
         {
-        fprintf(fd,"        zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "        zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "        zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "       zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+        fprintf(fd, "        zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
+        fprintf(fd, "        zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
-            // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "        zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
+        // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -676,38 +676,38 @@ fprintf(fd,  "// Process group with 11 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[10]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"            for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"            zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"            zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "            zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "            zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<11; k++)
         {
-        fprintf(fd,"            zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"            zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "            zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "            zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"            zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "            zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "           zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "           zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"            zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"            zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+        fprintf(fd, "            zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "            zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"            zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"            zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
+        fprintf(fd, "            zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "            zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"            zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
-            // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "            zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
+        // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -731,38 +731,38 @@ fprintf(fd,  "// Process group with 12 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[11]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"            for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"            zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"            zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "            zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "            zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<12; k++)
         {
-        fprintf(fd,"            zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"            zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "            zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "            zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"            zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "            zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "           zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "           zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"            zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"            zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+        fprintf(fd, "            zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "            zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"            zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"            zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
+        fprintf(fd, "            zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "            zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"            zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
-            // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "            zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
+        // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -787,40 +787,40 @@ fprintf(fd,  "// Process group with 13 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[12]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<13; k++)
         {
-        fprintf(fd,"        zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "        zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "        zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd,"        zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "        zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
             }
 
             // Add LLR from receiver input
-        fprintf(fd,"        zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+            fprintf(fd, "        zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+            fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
+            fprintf(fd, "        zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+            fprintf(fd, "        zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
             // Pack results back to epi8
-        fprintf(fd,"        zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+            fprintf(fd, "        zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
             // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
             // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+            fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
-        fprintf(fd,"}\n");
+            fprintf(fd, "}\n");
     }
 
 
@@ -844,38 +844,38 @@ fprintf(fd,  "// Process group with 14 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[13]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<14; k++)
         {
-        fprintf(fd,"        zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "        zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "        zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "       zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+        fprintf(fd, "        zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
+        fprintf(fd, "        zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
-            // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "        zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
+        // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -900,38 +900,38 @@ fprintf(fd,  "// Process group with 15 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[14]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<15; k++)
         {
-        fprintf(fd,"        zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "        zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "        zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "       zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+        fprintf(fd, "        zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
+        fprintf(fd, "        zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
-            // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
-         fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "        zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
+        // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -957,38 +957,38 @@ fprintf(fd,  "// Process group with 16 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[15]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<16; k++)
         {
-        fprintf(fd,"        zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "        zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "        zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "       zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+        fprintf(fd, "        zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
+        fprintf(fd, "        zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
-            // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "        zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
+        // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1013,38 +1013,38 @@ fprintf(fd,  "// Process group with 17 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[16]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<17; k++)
         {
-        fprintf(fd,"        zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "        zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "        zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "       zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+        fprintf(fd, "        zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
+        fprintf(fd, "        zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
-            // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "        zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
+        // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1069,38 +1069,38 @@ fprintf(fd,  "// Process group with 18 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[17]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<18; k++)
         {
-        fprintf(fd,"        zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "        zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "        zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "       zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+        fprintf(fd, "        zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
+        fprintf(fd, "        zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
-            // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "        zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
+        // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1124,38 +1124,38 @@ fprintf(fd,  "// Process group with 19 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[18]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<19; k++)
         {
-        fprintf(fd,"        zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "        zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "        zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "       zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+        fprintf(fd, "        zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
+        fprintf(fd, "        zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
-            // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "        zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
+        // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1180,38 +1180,38 @@ fprintf(fd,  "// Process group with 20 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[19]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<20; k++)
         {
-        fprintf(fd,"        zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "        zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "        zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "       zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+        fprintf(fd, "        zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
+        fprintf(fd, "        zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
-            // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "        zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
+        // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1240,38 +1240,38 @@ fprintf(fd,  "// Process group with 21 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[20]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"            for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"            zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"            zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "            zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "            zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<21; k++)
         {
-        fprintf(fd,"            zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"            zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "            zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "            zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"            zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "            zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "           zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "           zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"            zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"            zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+        fprintf(fd, "            zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "            zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"            zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"            zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
+        fprintf(fd, "            zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "            zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"            zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
-            // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "            zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
+        // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1295,38 +1295,38 @@ fprintf(fd,  "// Process group with 22 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[21]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"            for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"            zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"            zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "            zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "            zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<22; k++)
         {
-        fprintf(fd,"            zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"            zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "            zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "            zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"            zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "            zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "           zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "           zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"            zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"            zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+        fprintf(fd, "            zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "            zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"            zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"            zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
+        fprintf(fd, "            zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "            zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"            zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
-            // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
-         fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "            zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
+        // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1351,40 +1351,40 @@ fprintf(fd,  "// Process group with <23 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[22]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<23; k++)
         {
-        fprintf(fd,"        zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "        zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "        zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd,"        zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "        zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
             }
 
             // Add LLR from receiver input
-        fprintf(fd,"        zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+            fprintf(fd, "        zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+            fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
+            fprintf(fd, "        zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+            fprintf(fd, "        zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
             // Pack results back to epi8
-        fprintf(fd,"        zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+            fprintf(fd, "        zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
             // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
             // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+            fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
-        fprintf(fd,"}\n");
+            fprintf(fd, "}\n");
     }
 
 
@@ -1408,38 +1408,38 @@ fprintf(fd,  "// Process group with 24 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[23]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<24; k++)
         {
-        fprintf(fd,"        zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "        zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "        zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "       zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+        fprintf(fd, "        zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
+        fprintf(fd, "        zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
-            // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "        zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
+        // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1464,38 +1464,38 @@ fprintf(fd,  "// Process group with 25 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[24]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<25; k++)
         {
-        fprintf(fd,"        zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "        zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "        zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%d + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "       zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+        fprintf(fd, "        zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
+        fprintf(fd, "        zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
-            // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "        zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
+        // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1521,38 +1521,38 @@ fprintf(fd,  "// Process group with 26 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[25]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<26; k++)
         {
-        fprintf(fd,"        zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "        zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "        zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "       zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+        fprintf(fd, "        zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
+        fprintf(fd, "        zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
-            // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "        zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
+        // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1577,38 +1577,38 @@ fprintf(fd,  "// Process group with 27 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[26]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<27; k++)
         {
-        fprintf(fd,"        zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "        zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "        zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "       zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+        fprintf(fd, "        zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
+        fprintf(fd, "        zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
-            // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "        zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
+        // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1633,38 +1633,38 @@ fprintf(fd,  "// Process group with 28 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[27]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<28; k++)
         {
-        fprintf(fd,"        zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "        zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "        zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "       zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+        fprintf(fd, "        zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
+        fprintf(fd, "        zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
-            // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "        zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
+        // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1688,38 +1688,38 @@ fprintf(fd,  "// Process group with 29 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[28]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<29; k++)
         {
-        fprintf(fd,"        zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "        zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "        zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "       zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+        fprintf(fd, "        zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
+        fprintf(fd, "        zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
-            // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "        zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
+        // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1744,38 +1744,38 @@ fprintf(fd,  "// Process group with 30 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[29]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"        zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<30; k++)
         {
-        fprintf(fd,"        zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+          fprintf(fd, "        zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "        zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "       zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1); \n");
+          fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"        zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"        zmmRes0 = _mm512_adds_epi16(zmmRes0, zmm0);\n");
+        fprintf(fd, "        zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "        zmmRes0 = simde_mm512_adds_epi16(zmmRes0, zmm0);\n");
 
-        fprintf(fd,"        zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"        zmmRes1 = _mm512_adds_epi16(zmmRes1, zmm1);\n");
+        fprintf(fd, "        zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "        zmmRes1 = simde_mm512_adds_epi16(zmmRes1, zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"        zmm0 = _mm512_packs_epi16(zmmRes0, zmmRes1);\n");
-            // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "        zmm0 = simde_mm512_packs_epi16(zmmRes0, zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256] zmmRes0[255:256] zmmRes1[127:0] zmmRes0[127:0]]
+        // p_llrRes = [zmmRes1[255:256] zmmRes1[127:0] zmmRes0[255:256] zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc_avx512/bnProcPc_gen_BG2_avx512.c b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc_avx512/bnProcPc_gen_BG2_avx512.c
index 93584246e822abd07db84f53681d13e89bb9cbaf..d2ec9ec5a9275ae7972fec6c7478b54dc3348069 100644
--- a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc_avx512/bnProcPc_gen_BG2_avx512.c
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc_avx512/bnProcPc_gen_BG2_avx512.c
@@ -79,12 +79,11 @@ void nrLDPC_bnProcPc_BG2_generator_AVX512(const char *dir, int R)
     uint32_t cnOffsetInGroup;
     uint8_t idxBnGroup = 0;
 
-    fprintf(fd,"   __m512i zmm0,zmm1,zmmRes0,zmmRes1;  \n");
+    fprintf(fd, "   simde__m512i zmm0,zmm1,zmmRes0,zmmRes1;  \n");
 
-
-    fprintf(fd,"        __m256i* p_bnProcBuf; \n");
-    fprintf(fd,"        __m256i* p_llrProcBuf;\n");
-    fprintf(fd,"        __m512i* p_llrRes; \n");
+    fprintf(fd, "        simde__m256i* p_bnProcBuf; \n");
+    fprintf(fd, "        simde__m256i* p_llrProcBuf;\n");
+    fprintf(fd, "        simde__m512i* p_llrRes; \n");
     fprintf(fd,"         uint32_t M ;\n");
 
 
@@ -105,40 +104,39 @@ fprintf(fd,  "// Process group with 1 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[0]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
 
         // Loop over BNs
         fprintf(fd,"            for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"           zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"           zmm1 = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "           zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "           zmm1 = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         /*for (k=1; k<1; k++)
         {
-        fprintf(fd,"           zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%d + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"           zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+        fprintf(fd,"           zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%d + j]);\n", k*cnOffsetInGroup);
+        fprintf(fd,"           zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"           zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%d + j +1]);\n", k*cnOffsetInGroup);
+        fprintf(fd,"           zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%d + j +1]);\n", k*cnOffsetInGroup);
 
-        fprintf(fd, "          zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+        fprintf(fd, "          zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
         }
 */
-            // Add LLR from receiver input
-        fprintf(fd,"           zmm0    = _mm512_cvtepi8_epi16(p_bnProcBuf[j+1]);\n");
-        fprintf(fd,"           zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
-
-        fprintf(fd,"           zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"           zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
+        // Add LLR from receiver input
+        fprintf(fd, "           zmm0    = simde_mm512_cvtepi8_epi16(p_bnProcBuf[j+1]);\n");
+        fprintf(fd, "           zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"           zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
-            //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        fprintf(fd, "           zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "           zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
+        // Pack results back to epi8
+        fprintf(fd, "           zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
+        //  p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -162,40 +160,39 @@ fprintf(fd,  "// Process group with 2 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[1]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
 
         // Loop over BNs
         fprintf(fd,"            for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"           zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"           zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf[j + 1]);\n");
+        fprintf(fd, "           zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "           zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[j + 1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<2; k++)
         {
-        fprintf(fd,"           zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"           zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+          fprintf(fd, "           zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "           zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"           zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "           zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "          zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+          fprintf(fd, "          zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"           zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"           zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+        fprintf(fd, "           zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "           zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"           zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"           zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
-
-            // Pack results back to epi8
-        fprintf(fd,"           zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
-            //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        fprintf(fd, "           zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "           zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
+        // Pack results back to epi8
+        fprintf(fd, "           zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
+        //  p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -220,39 +217,40 @@ fprintf(fd,  "// Process group with 3 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[2]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"       zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<3; k++)
         {
-        fprintf(fd,"       zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+          fprintf(fd, "       zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
+
+          fprintf(fd, "       zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd,"       zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
 
-        fprintf(fd,"       zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+          fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
             }
 
             // Add LLR from receiver input
-        fprintf(fd,"       zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+            fprintf(fd, "       zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+            fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
+            fprintf(fd, "       zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+            fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
             // Pack results back to epi8
-        fprintf(fd,"       zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+            fprintf(fd, "       zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
             //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+            fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
-        fprintf(fd,"}\n");
+            fprintf(fd, "}\n");
     }
 
 
@@ -276,39 +274,39 @@ fprintf(fd,  "// Process group with 4 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[3]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
 
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"       zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<4; k++)
         {
-        fprintf(fd,"       zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+          fprintf(fd, "       zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "       zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "      zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+          fprintf(fd, "      zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"       zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+        fprintf(fd, "       zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
+        fprintf(fd, "       zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"       zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
-            //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "       zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
+        //  p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -333,38 +331,38 @@ fprintf(fd,  "// Process group with 5 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[4]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"       zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<5; k++)
         {
-        fprintf(fd,"       zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+          fprintf(fd, "       zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "       zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "      zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+          fprintf(fd, "      zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"       zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+        fprintf(fd, "       zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
+        fprintf(fd, "       zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"       zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
-            //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "       zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
+        //  p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -390,38 +388,38 @@ fprintf(fd,  "// Process group with 6 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[5]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"       zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<6; k++)
         {
-        fprintf(fd,"       zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+          fprintf(fd, "       zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "       zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "      zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+          fprintf(fd, "      zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"       zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+        fprintf(fd, "       zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
+        fprintf(fd, "       zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"       zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
-            //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "       zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
+        //  p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -446,39 +444,40 @@ fprintf(fd,  "// Process group with 7 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[6]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"       zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<7; k++)
         {
-        fprintf(fd,"       zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+          fprintf(fd, "       zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "       zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "      zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+          fprintf(fd, "      zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"       zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+        fprintf(fd, "       zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
+        fprintf(fd, "       zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"       zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
-            //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
-        //fprintf(fd,"         (__m512i*) &llrRes[%d + i]    = _mm512_permutex_epi64(zmm0, 0xD8);\n",lut_startAddrBnGroupsLlr[idxBnGroup]>>5 );
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "       zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
+        //  p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
+        // fprintf(fd,"         (simde__m512i*) &llrRes[%d + i]    = simde_mm512_permutex_epi64(zmm0,
+        // 0xD8);\n",lut_startAddrBnGroupsLlr[idxBnGroup]>>5 );
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -503,40 +502,41 @@ fprintf(fd,  "// Process group with 8 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[7]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"       zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<8; k++)
         {
-        fprintf(fd,"       zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+          fprintf(fd, "       zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "       zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "      zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+          fprintf(fd, "      zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"       zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+        fprintf(fd, "       zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
+        fprintf(fd, "       zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"       zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
-            //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
-        //fprintf(fd,"         (__m512i*) &llrRes[%d + i]    = _mm512_permutex_epi64(zmm0, 0xD8);\n",lut_startAddrBnGroupsLlr[idxBnGroup]>>5 );
+        // Pack results back to epi8
+        fprintf(fd, "       zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
+        //  p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
+        // fprintf(fd,"         (simde__m512i*) &llrRes[%d + i]    = simde_mm512_permutex_epi64(zmm0,
+        // 0xD8);\n",lut_startAddrBnGroupsLlr[idxBnGroup]>>5 );
 
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -560,39 +560,40 @@ fprintf(fd,  "// Process group with 9 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[8]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"       zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<9; k++)
         {
-        fprintf(fd,"       zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+          fprintf(fd, "       zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "       zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%d + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "      zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+          fprintf(fd, "      zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"       zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+        fprintf(fd, "       zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
+        fprintf(fd, "       zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"       zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
-            //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
-        //fprintf(fd,"         (__m512i*) &llrRes[%d + i]    = _mm512_permutex_epi64(zmm0, 0xD8);\n",lut_startAddrBnGroupsLlr[idxBnGroup]>>5 );
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "       zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
+        //  p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
+        // fprintf(fd,"         (simde__m512i*) &llrRes[%d + i]    = simde_mm512_permutex_epi64(zmm0,
+        // 0xD8);\n",lut_startAddrBnGroupsLlr[idxBnGroup]>>5 );
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -617,38 +618,38 @@ fprintf(fd,  "// Process group with 10 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[9]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"       zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<10; k++)
         {
-        fprintf(fd,"       zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+          fprintf(fd, "       zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "       zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "      zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+          fprintf(fd, "      zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"       zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+        fprintf(fd, "       zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
+        fprintf(fd, "       zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"       zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
-            //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "       zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
+        //  p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -675,38 +676,38 @@ fprintf(fd,  "// Process group with 11 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[10]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"            for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"           zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"           zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "           zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "           zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<11; k++)
         {
-        fprintf(fd,"           zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"           zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+          fprintf(fd, "           zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "           zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"           zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "           zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "          zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+          fprintf(fd, "          zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"           zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"           zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+        fprintf(fd, "           zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "           zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"           zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"           zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
+        fprintf(fd, "           zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "           zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"           zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
-            //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "           zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
+        //  p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -730,38 +731,38 @@ fprintf(fd,  "// Process group with 12 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[11]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"            for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"           zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"           zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "           zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "           zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<12; k++)
         {
-        fprintf(fd,"           zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"           zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+          fprintf(fd, "           zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "           zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"           zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "           zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "          zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+          fprintf(fd, "          zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"           zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"           zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+        fprintf(fd, "           zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "           zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"           zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"           zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
+        fprintf(fd, "           zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "           zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"           zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
-            //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "           zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
+        //  p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -786,40 +787,40 @@ fprintf(fd,  "// Process group with 13 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[12]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"       zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<13; k++)
         {
-        fprintf(fd,"       zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+          fprintf(fd, "       zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "       zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd,"       zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+          fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
             }
 
             // Add LLR from receiver input
-        fprintf(fd,"       zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+            fprintf(fd, "       zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+            fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
+            fprintf(fd, "       zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+            fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
             // Pack results back to epi8
-        fprintf(fd,"       zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+            fprintf(fd, "       zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
             //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
             // p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+            fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
-        fprintf(fd,"}\n");
+            fprintf(fd, "}\n");
     }
 
 
@@ -843,38 +844,38 @@ fprintf(fd,  "// Process group with 14 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[13]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"       zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<14; k++)
         {
-        fprintf(fd,"       zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+          fprintf(fd, "       zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "       zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "      zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+          fprintf(fd, "      zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"       zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+        fprintf(fd, "       zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
+        fprintf(fd, "       zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"       zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
-            //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "       zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
+        //  p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -899,38 +900,38 @@ fprintf(fd,  "// Process group with 15 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[14]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"       zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<15; k++)
         {
-        fprintf(fd,"       zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+          fprintf(fd, "       zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "       zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "      zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+          fprintf(fd, "      zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"       zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+        fprintf(fd, "       zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
+        fprintf(fd, "       zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"       zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
-            //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
-         fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "       zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
+        //  p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -956,38 +957,38 @@ fprintf(fd,  "// Process group with 16 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[15]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"       zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<16; k++)
         {
-        fprintf(fd,"       zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+          fprintf(fd, "       zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "       zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "      zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+          fprintf(fd, "      zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"       zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+        fprintf(fd, "       zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
+        fprintf(fd, "       zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"       zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
-            //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "       zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
+        //  p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1012,38 +1013,38 @@ fprintf(fd,  "// Process group with 17 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[16]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"       zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<17; k++)
         {
-        fprintf(fd,"       zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+          fprintf(fd, "       zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "       zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "      zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+          fprintf(fd, "      zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"       zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+        fprintf(fd, "       zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
+        fprintf(fd, "       zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"       zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
-            //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "       zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
+        //  p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1068,38 +1069,38 @@ fprintf(fd,  "// Process group with 18 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[17]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"       zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<18; k++)
         {
-        fprintf(fd,"       zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+          fprintf(fd, "       zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "       zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "      zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+          fprintf(fd, "      zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"       zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+        fprintf(fd, "       zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
+        fprintf(fd, "       zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"       zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
-            //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "       zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
+        //  p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1123,38 +1124,38 @@ fprintf(fd,  "// Process group with 19 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[18]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"       zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<19; k++)
         {
-        fprintf(fd,"       zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+          fprintf(fd, "       zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "       zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "      zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+          fprintf(fd, "      zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"       zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+        fprintf(fd, "       zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
+        fprintf(fd, "       zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"       zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
-            //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "       zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
+        //  p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1179,38 +1180,38 @@ fprintf(fd,  "// Process group with 20 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[19]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"       zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<20; k++)
         {
-        fprintf(fd,"       zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+          fprintf(fd, "       zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "       zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%d + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "      zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+          fprintf(fd, "      zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"       zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+        fprintf(fd, "       zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
+        fprintf(fd, "       zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"       zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
-            //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "       zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
+        //  p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1239,38 +1240,38 @@ fprintf(fd,  "// Process group with 21 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[20]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"            for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"           zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"           zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "           zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "           zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<21; k++)
         {
-        fprintf(fd,"           zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"           zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+          fprintf(fd, "           zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "           zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"           zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "           zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "          zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+          fprintf(fd, "          zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"           zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"           zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+        fprintf(fd, "           zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "           zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"           zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"           zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
+        fprintf(fd, "           zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "           zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"           zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
-            //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "           zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
+        //  p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1294,38 +1295,38 @@ fprintf(fd,  "// Process group with 22 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[21]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"            for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"           zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"           zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "           zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "           zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<22; k++)
         {
-        fprintf(fd,"           zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"           zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+          fprintf(fd, "           zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "           zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"           zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "           zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "          zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+          fprintf(fd, "          zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"           zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"           zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+        fprintf(fd, "           zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "           zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"           zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"           zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
+        fprintf(fd, "           zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "           zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"           zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
-            //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
-         fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "           zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
+        //  p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1350,40 +1351,40 @@ fprintf(fd,  "// Process group with <23 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[22]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"       zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<23; k++)
         {
-        fprintf(fd,"       zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+          fprintf(fd, "       zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "       zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd,"       zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+          fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
             }
 
             // Add LLR from receiver input
-        fprintf(fd,"       zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+            fprintf(fd, "       zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+            fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
+            fprintf(fd, "       zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+            fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
             // Pack results back to epi8
-        fprintf(fd,"       zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+            fprintf(fd, "       zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
             //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
             // p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+            fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
-        fprintf(fd,"}\n");
+            fprintf(fd, "}\n");
     }
 
 
@@ -1407,38 +1408,38 @@ fprintf(fd,  "// Process group with 24 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[23]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"       zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<24; k++)
         {
-        fprintf(fd,"       zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+          fprintf(fd, "       zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "       zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "      zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+          fprintf(fd, "      zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"       zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+        fprintf(fd, "       zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
+        fprintf(fd, "       zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"       zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
-            //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "       zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
+        //  p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1463,38 +1464,38 @@ fprintf(fd,  "// Process group with 25 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[24]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"       zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<25; k++)
         {
-        fprintf(fd,"       zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+          fprintf(fd, "       zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "       zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "      zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+          fprintf(fd, "      zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"       zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+        fprintf(fd, "       zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
+        fprintf(fd, "       zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"       zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
-            //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "       zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
+        //  p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1520,38 +1521,38 @@ fprintf(fd,  "// Process group with 26 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[25]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"       zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<26; k++)
         {
-        fprintf(fd,"       zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+          fprintf(fd, "       zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "       zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "      zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+          fprintf(fd, "      zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"       zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+        fprintf(fd, "       zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
+        fprintf(fd, "       zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"       zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
-            //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "       zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
+        //  p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1576,38 +1577,38 @@ fprintf(fd,  "// Process group with 27 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[26]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"       zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<27; k++)
         {
-        fprintf(fd,"       zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+          fprintf(fd, "       zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "       zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "      zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+          fprintf(fd, "      zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"       zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+        fprintf(fd, "       zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
+        fprintf(fd, "       zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"       zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
-            //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "       zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
+        //  p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1632,38 +1633,38 @@ fprintf(fd,  "// Process group with 28 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[27]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"       zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<28; k++)
         {
-        fprintf(fd,"       zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+          fprintf(fd, "       zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "       zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "      zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+          fprintf(fd, "      zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"       zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+        fprintf(fd, "       zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
+        fprintf(fd, "       zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"       zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
-            //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "       zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
+        //  p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1687,38 +1688,38 @@ fprintf(fd,  "// Process group with 29 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[28]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"       zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<29; k++)
         {
-        fprintf(fd,"       zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+          fprintf(fd, "       zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "       zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "      zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+          fprintf(fd, "      zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"       zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+        fprintf(fd, "       zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
+        fprintf(fd, "       zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"       zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
-            //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "       zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
+        //  p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
@@ -1743,38 +1744,38 @@ fprintf(fd,  "// Process group with 30 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[29]*NR_LDPC_ZMAX)>>5;
 
         // Set pointers to start of group 2
-        fprintf(fd,"    p_bnProcBuf     = (__m256i*) &bnProcBuf    [%u];\n",lut_startAddrBnGroups[idxBnGroup]);
-        fprintf(fd,"    p_llrProcBuf    = (__m256i*) &llrProcBuf   [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
-        fprintf(fd,"    p_llrRes        = (__m512i*) &llrRes       [%d];\n",lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_bnProcBuf     = (simde__m256i*) &bnProcBuf    [%u];\n", lut_startAddrBnGroups[idxBnGroup]);
+        fprintf(fd, "    p_llrProcBuf    = (simde__m256i*) &llrProcBuf   [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
+        fprintf(fd, "    p_llrRes        = (simde__m512i*) &llrRes       [%d];\n", lut_startAddrBnGroupsLlr[idxBnGroup]);
         // Loop over BNs
         fprintf(fd,"        for (int i=0,j=0;i<M;i++,j+=2) {\n");
             // First 16 LLRs of first CN
-        fprintf(fd,"       zmmRes0 = _mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf [j +1]);\n");
 
-            // Loop over CNs
+        // Loop over CNs
         for (k=1; k<30; k++)
         {
-        fprintf(fd,"       zmm0 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k*cnOffsetInGroup);
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+          fprintf(fd, "       zmm0 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%u + j]);\n", k * cnOffsetInGroup);
+          fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1 = _mm512_cvtepi8_epi16(p_bnProcBuf[%u + j +1]);\n", k*cnOffsetInGroup);
+          fprintf(fd, "       zmm1 = simde_mm512_cvtepi8_epi16(p_bnProcBuf[%d + j +1]);\n", k * cnOffsetInGroup);
 
-        fprintf(fd, "      zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1); \n");
+          fprintf(fd, "      zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1); \n");
         }
 
             // Add LLR from receiver input
-        fprintf(fd,"       zmm0    = _mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
-        fprintf(fd,"       zmmRes0 = _mm512_adds_epi16(zmmRes0,zmm0);\n");
+        fprintf(fd, "       zmm0    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j]);\n");
+        fprintf(fd, "       zmmRes0 = simde_mm512_adds_epi16(zmmRes0,zmm0);\n");
 
-        fprintf(fd,"       zmm1    = _mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
-        fprintf(fd,"       zmmRes1 = _mm512_adds_epi16(zmmRes1,zmm1);\n");
+        fprintf(fd, "       zmm1    = simde_mm512_cvtepi8_epi16(p_llrProcBuf[j +1 ]);\n");
+        fprintf(fd, "       zmmRes1 = simde_mm512_adds_epi16(zmmRes1,zmm1);\n");
 
-            // Pack results back to epi8
-        fprintf(fd,"       zmm0 = _mm512_packs_epi16(zmmRes0,zmmRes1);\n");
-            //zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
-            // p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
-        fprintf(fd,"            p_llrRes[i] = _mm512_permutex_epi64(zmm0, 0xD8);\n");
+        // Pack results back to epi8
+        fprintf(fd, "       zmm0 = simde_mm512_packs_epi16(zmmRes0,zmmRes1);\n");
+        // zmm0     = [zmmRes1[255:256]zmmRes0[255:256]zmmRes1[127:0]zmmRes0[127:0]]
+        //  p_llrRes = [zmmRes1[255:256]zmmRes1[127:0]zmmRes0[255:256]zmmRes0[127:0]]
+        fprintf(fd, "            p_llrRes[i] = simde_mm512_permutex_epi64(zmm0, 0xD8);\n");
 
         fprintf(fd,"}\n");
     }
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc_avx512/bnProc_gen_BG1_avx512.c b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc_avx512/bnProc_gen_BG1_avx512.c
index 533ebc95f33d7e4dcb5da2c81748b62a7a7ef556..d3f7dd5002b6d7b8f712f352c8fe3f9efe4a0faf 100644
--- a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc_avx512/bnProc_gen_BG1_avx512.c
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc_avx512/bnProc_gen_BG1_avx512.c
@@ -115,10 +115,14 @@ fprintf(fd,  "// Process group with 2 CNs \n");
     
           // Loop over BNs
         fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+        fprintf(fd,
+                "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+                "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+                (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+                (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+                (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+        fprintf(fd, "}\n");
         }
 
 
@@ -144,8 +148,8 @@ fprintf(fd,  "// Process group with 3 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[2]*NR_LDPC_ZMAX)>>6;
 
         // Set pointers to start of group 2
-        //fprintf(fd,"    ((__m512i*) bnProcBuf)     = ((__m512i*) &bnProcBuf)    [%d];\n",lut_startAddrBnGroups[idxBnGroup]);
-        
+        // fprintf(fd,"    ((simde__m512i*) bnProcBuf)     = ((simde__m512i*) &bnProcBuf)
+        // [%d];\n",lut_startAddrBnGroups[idxBnGroup]);
 
         for (k=0; k<3; k++)
         {
@@ -153,9 +157,14 @@ fprintf(fd,  "// Process group with 3 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -188,9 +197,14 @@ fprintf(fd,  "// Process group with 4 CNs \n");
   
           // Loop over BNs
         fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",((lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup),(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), ((lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup));
-
-         fprintf(fd,"}\n");
+        fprintf(fd,
+                "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+                "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+                ((lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup),
+                (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+                ((lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup));
+
+        fprintf(fd, "}\n");
         }
     }
 
@@ -222,10 +236,14 @@ fprintf(fd,  "// Process group with 4 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -258,10 +276,14 @@ fprintf(fd,  "// Process group with 6 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -293,10 +315,14 @@ fprintf(fd,  "// Process group with 7 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -328,10 +354,14 @@ fprintf(fd,  "// Process group with 8 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -362,10 +392,14 @@ fprintf(fd,  "// Process group with 9 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -396,10 +430,14 @@ fprintf(fd,  "// Process group with 10 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -429,10 +467,14 @@ fprintf(fd,  "// Process group with 11 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
       // =====================================================================
@@ -462,10 +504,14 @@ fprintf(fd,  "// Process group with 12 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -497,10 +543,14 @@ fprintf(fd,  "// Process group with 13 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -533,10 +583,14 @@ fprintf(fd,  "// Process group with 14 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -568,10 +622,14 @@ fprintf(fd,  "// Process group with 15 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -604,10 +662,14 @@ fprintf(fd,  "// Process group with 16 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -639,10 +701,14 @@ fprintf(fd,  "// Process group with 17 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -674,10 +740,14 @@ fprintf(fd,  "// Process group with 18 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -708,10 +778,14 @@ fprintf(fd,  "// Process group with 19 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -743,10 +817,14 @@ fprintf(fd,  "// Process group with 20 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -782,10 +860,14 @@ fprintf(fd,  "// Process group with 21 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
       // =====================================================================
@@ -815,10 +897,14 @@ fprintf(fd,  "// Process group with 22 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -850,10 +936,14 @@ fprintf(fd,  "// Process group with <23 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -886,10 +976,14 @@ fprintf(fd,  "// Process group with 24 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -921,11 +1015,14 @@ fprintf(fd,  "// Process group with 25 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -958,10 +1055,14 @@ fprintf(fd,  "// Process group with 26 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -993,10 +1094,14 @@ fprintf(fd,  "// Process group with 27 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -1028,10 +1133,14 @@ fprintf(fd,  "// Process group with 28 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -1061,10 +1170,14 @@ fprintf(fd,  "// Process group with 29 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -1095,10 +1208,14 @@ fprintf(fd,  "// Process group with 30 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc_avx512/bnProc_gen_BG2_avx512.c b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc_avx512/bnProc_gen_BG2_avx512.c
index bd7a9455a806ccea6488a84ce7371a208d0a3480..db4c0d618ccfbe1bfcdaa76503c4dbfa1455c8b1 100644
--- a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc_avx512/bnProc_gen_BG2_avx512.c
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_bnProc_avx512/bnProc_gen_BG2_avx512.c
@@ -111,10 +111,14 @@ fprintf(fd,  "// Process group with 2 CNs \n");
     
           // Loop over BNs
         fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+        fprintf(fd,
+                "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+                "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+                (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+                (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+                (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+        fprintf(fd, "}\n");
         }
 
 
@@ -140,8 +144,8 @@ fprintf(fd,  "// Process group with 3 CNs \n");
         cnOffsetInGroup = (lut_numBnInBnGroups[2]*NR_LDPC_ZMAX)>>6;
 
         // Set pointers to start of group 2
-        //fprintf(fd,"    ((__m512i*) bnProcBuf)     = ((__m512i*) &bnProcBuf)    [%d];\n",lut_startAddrBnGroups[idxBnGroup]);
-        
+        // fprintf(fd,"    ((simde__m512i*) bnProcBuf)     = ((simde__m512i*) &bnProcBuf)
+        // [%d];\n",lut_startAddrBnGroups[idxBnGroup]);
 
         for (k=0; k<3; k++)
         {
@@ -149,9 +153,14 @@ fprintf(fd,  "// Process group with 3 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -184,9 +193,14 @@ fprintf(fd,  "// Process group with 4 CNs \n");
   
           // Loop over BNs
         fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",((lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup),(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), ((lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup));
-
-         fprintf(fd,"}\n");
+        fprintf(fd,
+                "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+                "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+                ((lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup),
+                (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+                ((lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup));
+
+        fprintf(fd, "}\n");
         }
     }
 
@@ -218,10 +232,14 @@ fprintf(fd,  "// Process group with 4 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -254,10 +272,14 @@ fprintf(fd,  "// Process group with 6 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -289,10 +311,14 @@ fprintf(fd,  "// Process group with 7 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -324,10 +350,14 @@ fprintf(fd,  "// Process group with 8 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -358,10 +388,14 @@ fprintf(fd,  "// Process group with 9 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -392,10 +426,14 @@ fprintf(fd,  "// Process group with 10 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -425,10 +463,14 @@ fprintf(fd,  "// Process group with 11 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
       // =====================================================================
@@ -458,10 +500,14 @@ fprintf(fd,  "// Process group with 12 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -493,10 +539,14 @@ fprintf(fd,  "// Process group with 13 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -529,10 +579,14 @@ fprintf(fd,  "// Process group with 14 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -564,10 +618,14 @@ fprintf(fd,  "// Process group with 15 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -600,10 +658,14 @@ fprintf(fd,  "// Process group with 16 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -635,10 +697,14 @@ fprintf(fd,  "// Process group with 17 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -670,10 +736,14 @@ fprintf(fd,  "// Process group with 18 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -704,10 +774,14 @@ fprintf(fd,  "// Process group with 19 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -739,10 +813,14 @@ fprintf(fd,  "// Process group with 20 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -778,10 +856,14 @@ fprintf(fd,  "// Process group with 21 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
       // =====================================================================
@@ -811,10 +893,14 @@ fprintf(fd,  "// Process group with 22 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -846,10 +932,14 @@ fprintf(fd,  "// Process group with <23 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -882,10 +972,14 @@ fprintf(fd,  "// Process group with 24 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -917,11 +1011,14 @@ fprintf(fd,  "// Process group with 25 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -954,10 +1051,14 @@ fprintf(fd,  "// Process group with 26 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -989,10 +1090,14 @@ fprintf(fd,  "// Process group with 27 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -1024,10 +1129,14 @@ fprintf(fd,  "// Process group with 28 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -1057,10 +1166,14 @@ fprintf(fd,  "// Process group with 29 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
@@ -1091,10 +1204,14 @@ fprintf(fd,  "// Process group with 30 CNs \n");
 
           // Loop over BNs
        fprintf(fd,"            for (i=0;i<M;i++) {\n");
-        fprintf(fd,"            ((__m512i*)bnProcBufRes)[%d + i ] = _mm512_subs_epi8(((__m512i*)llrRes)[%d + i ], ((__m512i*) bnProcBuf)[%d + i]);\n",(lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup,(lut_startAddrBnGroupsLlr[idxBnGroup]>>6), (lut_startAddrBnGroups[idxBnGroup]>>6)+ k*cnOffsetInGroup);
-
-         fprintf(fd,"}\n");
-
+       fprintf(fd,
+               "            ((simde__m512i*)bnProcBufRes)[%d + i ] = simde_mm512_subs_epi8(((simde__m512i*)llrRes)[%d + i ], "
+               "((simde__m512i*) bnProcBuf)[%d + i]);\n",
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup,
+               (lut_startAddrBnGroupsLlr[idxBnGroup] >> 6),
+               (lut_startAddrBnGroups[idxBnGroup] >> 6) + k * cnOffsetInGroup);
+
+       fprintf(fd, "}\n");
         }
     }
 
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc/CMakeLists.txt b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc/CMakeLists.txt
index d78dfc4436c88de79fd9736251579bceefc5eee9..a54349c61dbd826e0258ce75c02d05a602bd4135 100644
--- a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc/CMakeLists.txt
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc/CMakeLists.txt
@@ -2,7 +2,12 @@ add_executable(cnProc_gen_avx2
                cnProc_gen_BG1_avx2.c
                cnProc_gen_BG2_avx2.c
                main.c)
-target_compile_options(cnProc_gen_avx2 PRIVATE -W -Wall -mavx2)
+add_executable(cnProc_gen_128
+               cnProc_gen_BG1_128.c
+               cnProc_gen_BG2_128.c
+               main128.c)
+target_compile_options(cnProc_gen_avx2 PRIVATE -W -Wall )
+target_compile_options(cnProc_gen_128 PRIVATE -W -Wall )
 
 #set(cnProc_headers
 #    cnProc/rLDPC_cnProc_BG1_R13_AVX2.h
@@ -20,6 +25,17 @@ add_custom_command(TARGET cnProc_gen_avx2 POST_BUILD
   COMMENT "Generating LDPC cnProc header files for AVX2"
 )
 
+add_custom_command(TARGET cnProc_gen_128 POST_BUILD
+  #OUTPUT ${cnProc_headers}
+  COMMAND ${CMAKE_COMMAND} -E make_directory cnProc128
+  COMMAND cnProc_gen_128 .
+  DEPENDS cnProc_gen_128
+  COMMENT "Generating LDPC cnProc header files for 128-bit SIMD"
+)
 add_library(cnProc_gen_avx2_HEADERS INTERFACE)
 target_include_directories(cnProc_gen_avx2_HEADERS INTERFACE ${CMAKE_CURRENT_BINARY_DIR})
 add_dependencies(cnProc_gen_avx2_HEADERS cnProc_gen_avx2)
+
+add_library(cnProc_gen_128_HEADERS INTERFACE)
+target_include_directories(cnProc_gen_128_HEADERS INTERFACE ${CMAKE_CURRENT_BINARY_DIR})
+add_dependencies(cnProc_gen_128_HEADERS cnProc_gen_128)
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc/cnProc_gen_BG1_128.c b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc/cnProc_gen_BG1_128.c
new file mode 100644
index 0000000000000000000000000000000000000000..6fb3aae1b981c189cedea09099ae9f7c918d0a3a
--- /dev/null
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc/cnProc_gen_BG1_128.c
@@ -0,0 +1,785 @@
+/*
+ * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The OpenAirInterface Software Alliance licenses this file to You under
+ * the OAI Public License, Version 1.1  (the "License"); you may not use this file
+ * except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.openairinterface.org/?page_id=698
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *-------------------------------------------------------------------------------
+ * For more information about the OpenAirInterface (OAI) Software Alliance:
+ *      contact@openairinterface.org
+ */
+
+#include <stdlib.h>
+#include <stdio.h>
+#include <stdint.h>
+#include "../../nrLDPCdecoder_defs.h"
+
+#define AVOID_MM256_SIGN 1
+#define DROP_MAXLLR 1
+void nrLDPC_cnProc_BG1_generator_128(const char* dir, int R)
+{
+  const char *ratestr[3]={"13","23","89"};
+
+  if (R<0 || R>2) {printf("Illegal R %d\n",R); abort();}
+
+
+ // system("mkdir -p ../ldpc_gen_files");
+
+  char fname[FILENAME_MAX+1];
+  snprintf(fname, sizeof(fname), "%s/cnProc128/nrLDPC_cnProc_BG1_R%s_128.h", dir, ratestr[R]);
+  FILE *fd=fopen(fname,"w");
+  if (fd == NULL) {
+    printf("Cannot create file %s\n", fname);
+    abort();
+  }
+
+  fprintf(fd,"#include <stdint.h>\n");
+  fprintf(fd,"#include \"PHY/sse_intrin.h\"\n");
+
+
+  fprintf(fd,"static inline void nrLDPC_cnProc_BG1_R%s_128(int8_t* cnProcBuf, int8_t* cnProcBufRes, uint16_t Z) {\n",ratestr[R]);
+
+  const uint8_t*  lut_numCnInCnGroups;
+  const uint32_t* lut_startAddrCnGroups = lut_startAddrCnGroups_BG1;
+
+  if (R==0)      lut_numCnInCnGroups = lut_numCnInCnGroups_BG1_R13;
+  else if (R==1) lut_numCnInCnGroups = lut_numCnInCnGroups_BG1_R23;
+  else if (R==2) lut_numCnInCnGroups = lut_numCnInCnGroups_BG1_R89;
+  else { printf("aborting, illegal R %d\n",R); fclose(fd);abort();}
+
+
+
+  //simde__m128i* p_cnProcBuf;
+  //simde__m128i* p_cnProcBufRes;
+
+  // Number of CNs in Groups
+  //uint32_t M;
+  uint32_t j;
+  uint32_t k;
+  // Offset to each bit within a group in terms of 16 Byte
+  uint32_t bitOffsetInGroup;
+
+  //simde__m128i ymm0, min, sgn;
+  //simde__m128i* p_cnProcBufResBit;
+
+  // const simde__m128i* p_ones   = (simde__m128i*) ones256_epi8;
+  // const simde__m128i* p_maxLLR = (simde__m128i*) maxLLR256_epi8;
+
+  // LUT with offsets for bits that need to be processed
+  // 1. bit proc requires LLRs of 2. and 3. bit, 2.bits of 1. and 3. etc.
+  // Offsets are in units of bitOffsetInGroup (1*384/32)
+  //    const uint8_t lut_idxCnProcG3[3][2] = {{12,24}, {0,24}, {0,12}};
+
+  // =====================================================================
+  // Process group with 3 BNs
+  fprintf(fd,"//Process group with 3 BNs\n");
+  // LUT with offsets for bits that need to be processed
+  // 1. bit proc requires LLRs of 2. and 3. bit, 2.bits of 1. and 3. etc.
+  // Offsets are in units of bitOffsetInGroup (1*384/32)
+  const uint8_t lut_idxCnProcG3[3][2] = {{12,24}, {0,24}, {0,12}};
+#ifndef DROP_MAXLLR
+  fprintf(fd,"                simde__m128i ymm0, min, sgn,ones,maxLLR;\n");
+#else
+  fprintf(fd,"                simde__m128i ymm0, min, sgn,ones;\n");
+#endif  
+  fprintf(fd,"                ones   = simde_mm_set1_epi8((int8_t)1);\n");
+
+  fprintf(fd,"                uint32_t  M;\n");
+
+  if (lut_numCnInCnGroups[0] > 0)
+    {
+      // Number of groups of 16 CNs for parallel processing
+      // Ceil for values not divisible by 16
+       fprintf(fd," M = (%d*Z + 15)>>4;\n",lut_numCnInCnGroups[0] );
+
+      // Set the offset to each bit within a group in terms of 16 Byte
+      bitOffsetInGroup = (lut_numCnInCnGroups_BG1_R13[0]*NR_LDPC_ZMAX)>>4;
+
+
+      // Set pointers to start of group 3
+      //p_cnProcBuf    = (simde__m128i*) &cnProcBuf   [lut_startAddrCnGroups[0]];
+      //p_cnProcBufRes = (simde__m128i*) &cnProcBufRes[lut_startAddrCnGroups[0]];
+
+      // Loop over every BN
+      
+      for (j=0; j<3; j++)
+        {
+	  // Set of results pointer to correct BN address
+	  //p_cnProcBufResBit = p_cnProcBufRes + (j*bitOffsetInGroup);
+
+	  // Loop over CNs
+	  //	  for (i=0; i<M; i++,iprime++)
+	  //            {
+	  
+	  fprintf(fd,"            for (int i=0;i<M;i++) {\n");
+	  // Abs and sign of 16 CNs (first BN)
+	  //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
+	  fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[0]>>4)+lut_idxCnProcG3[j][0]*2);
+	  //                sgn  = simde_mm_sign_epi8(ones, ymm0);
+#ifndef AVOID_MM256_SIGN
+	  fprintf(fd,"                sgn  = simde_mm_sign_epi8(ones, ymm0);\n");
+#else
+	  fprintf(fd,"                sgn  = simde_mm_xor_si128(ones, ymm0);\n");
+#endif	  
+	  //                min  = simde_mm_abs_epi8(ymm0);
+	  fprintf(fd,"                min  = simde_mm_abs_epi8(ymm0);\n");
+	  
+	  // 16 CNs of second BN
+	  //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][1] + i];
+	  fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[0]>>4)+lut_idxCnProcG3[j][1]*2);
+	  
+	  //                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));
+	  fprintf(fd,"                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));\n");
+	  
+	  //                sgn  = simde_mm_sign_epi8(sgn, ymm0);
+#ifndef AVOID_MM256_SIGN
+	  fprintf(fd,"                sgn  = simde_mm_sign_epi8(sgn, ymm0);\n");
+#else
+	  fprintf(fd,"                sgn  = simde_mm_xor_si128(sgn, ymm0);\n");
+#endif
+	  // Store result
+	  //                min = simde_mm_min_epu8(min, maxLLR); // 128 in epi8 is -127
+#ifndef DROP_MAXLLR
+	  fprintf(fd,"                min = simde_mm_min_epu8(min, maxLLR);\n");
+#endif
+	  //                *p_cnProcBufResBit = simde_mm_sign_epi8(min, sgn);
+	  //                p_cnProcBufResBit++;
+	  fprintf(fd,"                ((simde__m128i*)cnProcBufRes)[%d+i] = simde_mm_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[0]>>4)+(j*bitOffsetInGroup));
+	  fprintf(fd,"            }\n");
+        }
+    }
+  // =====================================================================
+  // Process group with 4 BNs
+  fprintf(fd,"//Process group with 4 BNs\n");
+  // Offset is 5*384/32 = 60
+  const uint8_t lut_idxCnProcG4[4][3] = {{60,120,180}, {0,120,180}, {0,60,180}, {0,60,120}};
+
+  if (lut_numCnInCnGroups[1] > 0)
+    {
+      // Number of groups of 16 CNs for parallel processing
+      // Ceil for values not divisible by 16
+      fprintf(fd," M = (%d*Z + 15)>>4;\n",lut_numCnInCnGroups[1] );
+
+      // Set the offset to each bit within a group in terms of 16 Byte
+      bitOffsetInGroup = (lut_numCnInCnGroups_BG1_R13[1]*NR_LDPC_ZMAX)>>4;
+
+
+      // Set pointers to start of group 4
+      //p_cnProcBuf    = (simde__m128i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
+      //p_cnProcBufRes = (simde__m128i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
+
+      // Loop over every BN
+      
+      for (j=0; j<4; j++)
+        {
+	  // Set of results pointer to correct BN address
+	  //p_cnProcBufResBit = p_cnProcBufRes + (j*bitOffsetInGroup);
+
+	  // Loop over CNs
+	  //	  for (i=0; i<M; i++,iprime++)
+	  //            {
+	  fprintf(fd,"            for (int i=0;i<M;i++) {\n");
+	  // Abs and sign of 16 CNs (first BN)
+	  //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
+	  fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[1]>>4)+lut_idxCnProcG4[j][0]*2);
+	  //                sgn  = simde_mm_sign_epi8(ones, ymm0);
+#ifndef AVOID_MM256_SIGN
+	  fprintf(fd,"                sgn  = simde_mm_sign_epi8(ones, ymm0);\n");
+#else
+	  fprintf(fd,"                sgn  = simde_mm_xor_si128(ones, ymm0);\n");
+#endif	  
+	  //                min  = simde_mm_abs_epi8(ymm0);
+	  fprintf(fd,"                min  = simde_mm_abs_epi8(ymm0);\n");
+	  
+	  
+	  // Loop over BNs
+	  for (k=1; k<3; k++)
+	    {
+	      fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[1]>>4)+lut_idxCnProcG4[j][k]*2);
+	      
+	      //                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));
+	      fprintf(fd,"                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));\n");
+	      
+	      //                sgn  = simde_mm_sign_epi8(sgn, ymm0);
+#ifndef AVOID_MM256_SIGN
+  	      fprintf(fd,"                sgn  = simde_mm_sign_epi8(sgn, ymm0);\n");
+#else
+	      fprintf(fd,"                sgn  = simde_mm_xor_si128(sgn, ymm0);\n");
+#endif
+	    }
+	  
+	  // Store result
+	  //                min = simde_mm_min_epu8(min, maxLLR); // 128 in epi8 is -127
+#ifndef DROP_MAXLLR
+	  fprintf(fd,"                min = simde_mm_min_epu8(min, maxLLR);\n");
+#endif	  
+	  //                *p_cnProcBufResBit = simde_mm_sign_epi8(min, sgn);
+	      //                p_cnProcBufResBit++;
+	  fprintf(fd,"                ((simde__m128i*)cnProcBufRes)[%d+i] = simde_mm_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[1]>>4)+(j*bitOffsetInGroup));
+	  fprintf(fd,"            }\n");
+        }
+    }
+
+
+  // =====================================================================
+  // Process group with 5 BNs
+  fprintf(fd,"//Process group with 5 BNs\n");
+  // Offset is 18*384/32 = 216
+  const uint16_t lut_idxCnProcG5[5][4] = {{216,432,648,864}, {0,432,648,864},
+					  {0,216,648,864}, {0,216,432,864}, {0,216,432,648}};
+
+
+  if (lut_numCnInCnGroups[2] > 0)
+    {
+      // Number of groups of 16 CNs for parallel processing
+      // Ceil for values not divisible by 16
+      fprintf(fd," M = (%d*Z + 15)>>4;\n",lut_numCnInCnGroups[2] );
+
+      // Set the offset to each bit within a group in terms of 16 Byte
+      bitOffsetInGroup = (lut_numCnInCnGroups_BG1_R13[2]*NR_LDPC_ZMAX)>>4;
+
+
+      // Set pointers to start of group 4
+      //p_cnProcBuf    = (simde__m128i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
+      //p_cnProcBufRes = (simde__m128i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
+
+      // Loop over every BN
+      
+      for (j=0; j<5; j++)
+	{
+	  // Set of results pointer to correct BN address
+	  //p_cnProcBufResBit = p_cnProcBufRes + (j*bitOffsetInGroup);
+
+	  // Loop over CNs
+	  //	  for (i=0; i<M; i++,iprime++)
+	  //            {
+	  fprintf(fd,"            for (int i=0;i<M;i++) {\n");
+	  // Abs and sign of 16 CNs (first BN)
+	  //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
+	  fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[2]>>4)+lut_idxCnProcG5[j][0]*2);
+	  //                sgn  = simde_mm_sign_epi8(ones, ymm0);
+#ifndef AVOID_MM256_SIGN
+	  fprintf(fd,"                sgn  = simde_mm_sign_epi8(ones, ymm0);\n");
+#else
+	  fprintf(fd,"                sgn  = simde_mm_xor_si128(ones, ymm0);\n");
+#endif	  
+	  //                min  = simde_mm_abs_epi8(ymm0);
+	  fprintf(fd,"                min  = simde_mm_abs_epi8(ymm0);\n");
+	  
+	  
+	  // Loop over BNs
+	  for (k=1; k<4; k++)
+	    {
+	      fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[2]>>4)+lut_idxCnProcG5[j][k]*2);
+	      
+	      //                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));
+	      fprintf(fd,"                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));\n");
+	      
+	      //                sgn  = simde_mm_sign_epi8(sgn, ymm0);
+#ifndef AVOID_MM256_SIGN
+  	      fprintf(fd,"                sgn  = simde_mm_sign_epi8(sgn, ymm0);\n");
+#else
+	      fprintf(fd,"                sgn  = simde_mm_xor_si128(sgn, ymm0);\n");
+#endif
+	    }
+	  
+	  // Store result
+	  //                min = simde_mm_min_epu8(min, maxLLR); // 128 in epi8 is -127
+#ifndef DROP_MAXLLR
+	  fprintf(fd,"                min = simde_mm_min_epu8(min, maxLLR);\n");
+#endif	  
+	  //                *p_cnProcBufResBit = simde_mm_sign_epi8(min, sgn);
+	  //                p_cnProcBufResBit++;
+	  fprintf(fd,"                ((simde__m128i*)cnProcBufRes)[%d+i] = simde_mm_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[2]>>4)+(j*bitOffsetInGroup));
+	  fprintf(fd,"           }\n");
+        }
+    }
+
+  // =====================================================================
+  // Process group with 6 BNs
+  fprintf(fd,"//Process group with 6 BNs\n");
+  // Offset is 8*384/32 = 96
+  const uint16_t lut_idxCnProcG6[6][5] = {{96,192,288,384,480}, {0,192,288,384,480},
+					  {0,96,288,384,480}, {0,96,192,384,480},
+					  {0,96,192,288,480}, {0,96,192,288,384}};
+
+
+  if (lut_numCnInCnGroups[3] > 0)
+    {
+      // Number of groups of 16 CNs for parallel processing
+      // Ceil for values not divisible by 16
+     fprintf(fd, "M = (%d*Z + 15)>>4;\n",lut_numCnInCnGroups[3] );
+
+      // Set the offset to each bit within a group in terms of 16 Byte
+      bitOffsetInGroup = (lut_numCnInCnGroups_BG1_R13[3]*NR_LDPC_ZMAX)>>4;
+
+
+      // Set pointers to start of group 4
+      //p_cnProcBuf    = (simde__m128i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
+      //p_cnProcBufRes = (simde__m128i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
+
+      // Loop over every BN
+      
+      for (j=0; j<6; j++)
+        {
+	  // Set of results pointer to correct BN address
+	  //p_cnProcBufResBit = p_cnProcBufRes + (j*bitOffsetInGroup);
+
+	  // Loop over CNs
+	  //	  for (i=0; i<M; i++,iprime++)
+	  //            {
+	  fprintf(fd,"            for (int i=0;i<M;i++) {\n");
+	  // Abs and sign of 16 CNs (first BN)
+	  //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
+	  fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[3]>>4)+lut_idxCnProcG6[j][0]*2);
+	  //                sgn  = simde_mm_sign_epi8(ones, ymm0);
+#ifndef AVOID_MM256_SIGN
+	  fprintf(fd,"                sgn  = simde_mm_sign_epi8(ones, ymm0);\n");
+#else
+	  fprintf(fd,"                sgn  = simde_mm_xor_si128(ones, ymm0);\n");
+#endif	  
+	  //                min  = simde_mm_abs_epi8(ymm0);
+	  fprintf(fd,"                min  = simde_mm_abs_epi8(ymm0);\n");
+	  
+	  
+	  // Loop over BNs
+	  for (k=1; k<5; k++)
+	    {
+	      fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[3]>>4)+lut_idxCnProcG6[j][k]*2);
+	      
+	      //                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));
+	      fprintf(fd,"                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));\n");
+	      
+	      //                sgn  = simde_mm_sign_epi8(sgn, ymm0);
+#ifndef AVOID_MM256_SIGN
+  	      fprintf(fd,"                sgn  = simde_mm_sign_epi8(sgn, ymm0);\n");
+#else
+	      fprintf(fd,"                sgn  = simde_mm_xor_si128(sgn, ymm0);\n");
+#endif
+	    }
+	  
+	  // Store result
+	  //                min = simde_mm_min_epu8(min, maxLLR); // 128 in epi8 is -127
+#ifndef DROP_MAXLLR
+	  fprintf(fd,"                min = simde_mm_min_epu8(min, maxLLR);\n");
+#endif	  
+	  //                *p_cnProcBufResBit = simde_mm_sign_epi8(min, sgn);
+	  //                p_cnProcBufResBit++;
+	  fprintf(fd,"                ((simde__m128i*)cnProcBufRes)[%d+i] = simde_mm_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[3]>>4)+(j*bitOffsetInGroup));
+	  fprintf(fd,"            }\n");
+	}
+    }
+
+
+  // =====================================================================
+  // Process group with 7 BNs
+  fprintf(fd,"//Process group with 7 BNs\n");
+  // Offset is 5*384/32 = 60
+  const uint16_t lut_idxCnProcG7[7][6] = {{60,120,180,240,300,360}, {0,120,180,240,300,360},
+					  {0,60,180,240,300,360},   {0,60,120,240,300,360},
+					  {0,60,120,180,300,360},   {0,60,120,180,240,360},
+					  {0,60,120,180,240,300}};
+
+
+
+  if (lut_numCnInCnGroups[4] > 0)
+    {
+      // Number of groups of 16 CNs for parallel processing
+      // Ceil for values not divisible by 16
+      fprintf(fd, "M = (%d*Z + 15)>>4;\n",lut_numCnInCnGroups[4] );
+
+      // Set the offset to each bit within a group in terms of 16 Byte
+      bitOffsetInGroup = (lut_numCnInCnGroups_BG1_R13[4]*NR_LDPC_ZMAX)>>4;
+
+
+      // Set pointers to start of group 4
+      //p_cnProcBuf    = (simde__m128i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
+      //p_cnProcBufRes = (simde__m128i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
+
+      // Loop over every BN
+      
+      for (j=0; j<7; j++)
+        {
+	  // Set of results pointer to correct BN address
+	  //p_cnProcBufResBit = p_cnProcBufRes + (j*bitOffsetInGroup);
+
+	  // Loop over CNs
+	  //	  for (i=0; i<M; i++,iprime++)
+	  //            {
+	  fprintf(fd,"            for (int i=0;i<M;i++) {\n");
+	  // Abs and sign of 16 CNs (first BN)
+	  //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
+	  fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[4]>>4)+lut_idxCnProcG7[j][0]*2);
+	  //                sgn  = simde_mm_sign_epi8(ones, ymm0);
+#ifndef AVOID_MM256_SIGN
+	  fprintf(fd,"                sgn  = simde_mm_sign_epi8(ones, ymm0);\n");
+#else
+	  fprintf(fd,"                sgn  = simde_mm_xor_si128(ones, ymm0);\n");
+#endif	  
+	  //                min  = simde_mm_abs_epi8(ymm0);
+	  fprintf(fd,"                min  = simde_mm_abs_epi8(ymm0);\n");
+	  
+	  
+	  // Loop over BNs
+	  for (k=1; k<6; k++)
+	    {
+	      fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[4]>>4)+lut_idxCnProcG7[j][k]*2);
+	      
+	      //                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));
+	      fprintf(fd,"                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));\n");
+	      
+	      //                sgn  = simde_mm_sign_epi8(sgn, ymm0);
+#ifndef AVOID_MM256_SIGN
+  	      fprintf(fd,"                sgn  = simde_mm_sign_epi8(sgn, ymm0);\n");
+#else
+	      fprintf(fd,"                sgn  = simde_mm_xor_si128(sgn, ymm0);\n");
+#endif
+	    }
+	  
+	  // Store result
+	  //                min = simde_mm_min_epu8(min, maxLLR); // 128 in epi8 is -127
+#ifndef DROP_MAXLLR
+	  fprintf(fd,"                min = simde_mm_min_epu8(min, maxLLR);\n");
+#endif	  
+	  //                *p_cnProcBufResBit = simde_mm_sign_epi8(min, sgn);
+	  //                p_cnProcBufResBit++;
+	  fprintf(fd,"                ((simde__m128i*)cnProcBufRes)[%d+i] = simde_mm_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[4]>>4)+(j*bitOffsetInGroup));
+	  fprintf(fd,"            }\n");
+	}
+    }
+
+
+  // =====================================================================
+  // Process group with 8 BNs
+  fprintf(fd,"//Process group with 8 BNs\n");
+  // Offset is 2*384/32 = 24
+  const uint8_t lut_idxCnProcG8[8][7] = {{24,48,72,96,120,144,168}, {0,48,72,96,120,144,168},
+					 {0,24,72,96,120,144,168}, {0,24,48,96,120,144,168},
+					 {0,24,48,72,120,144,168}, {0,24,48,72,96,144,168},
+					 {0,24,48,72,96,120,168},  {0,24,48,72,96,120,144}};
+
+
+
+
+  if (lut_numCnInCnGroups[5] > 0)
+    {
+      // Number of groups of 16 CNs for parallel processing
+      // Ceil for values not divisible by 16
+     fprintf(fd, "M = (%d*Z + 15)>>4;\n",lut_numCnInCnGroups[5] );
+
+      // Set the offset to each bit within a group in terms of 16 Byte
+      bitOffsetInGroup = (lut_numCnInCnGroups_BG1_R13[5]*NR_LDPC_ZMAX)>>4;
+
+
+      // Set pointers to start of group 4
+      //p_cnProcBuf    = (simde__m128i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
+      //p_cnProcBufRes = (simde__m128i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
+
+      // Loop over every BN
+      
+      for (j=0; j<8; j++)
+        {
+	  // Set of results pointer to correct BN address
+	  //p_cnProcBufResBit = p_cnProcBufRes + (j*bitOffsetInGroup);
+
+	  // Loop over CNs
+	  //	  for (i=0; i<M; i++,iprime++)
+	  //            {
+	  fprintf(fd,"            for (int i=0;i<M;i++) {\n");
+	  // Abs and sign of 16 CNs (first BN)
+	  //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
+	  fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[5]>>4)+lut_idxCnProcG8[j][0]*2);
+	  //                sgn  = simde_mm_sign_epi8(ones, ymm0);
+#ifndef AVOID_MM256_SIGN
+	  fprintf(fd,"                sgn  = simde_mm_sign_epi8(ones, ymm0);\n");
+#else
+	  fprintf(fd,"                sgn  = simde_mm_xor_si128(ones, ymm0);\n");
+#endif	  
+	  //                min  = simde_mm_abs_epi8(ymm0);
+	  fprintf(fd,"                min  = simde_mm_abs_epi8(ymm0);\n");
+	  
+	  
+	  // Loop over BNs
+	  for (k=1; k<7; k++)
+	    {
+	      fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[5]>>4)+lut_idxCnProcG8[j][k]*2);
+	      
+	      //                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));
+	      fprintf(fd,"                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));\n");
+	      
+	      //                sgn  = simde_mm_sign_epi8(sgn, ymm0);
+#ifndef AVOID_MM256_SIGN
+  	      fprintf(fd,"                sgn  = simde_mm_sign_epi8(sgn, ymm0);\n");
+#else
+	      fprintf(fd,"                sgn  = simde_mm_xor_si128(sgn, ymm0);\n");
+#endif
+	    }
+	  
+	  // Store result
+	  //                min = simde_mm_min_epu8(min, maxLLR); // 128 in epi8 is -127
+#ifndef DROP_MAXLLR
+	  fprintf(fd,"                min = simde_mm_min_epu8(min, maxLLR);\n");
+#endif
+	  //                *p_cnProcBufResBit = simde_mm_sign_epi8(min, sgn);
+	  //                p_cnProcBufResBit++;
+	  fprintf(fd,"                ((simde__m128i*)cnProcBufRes)[%d+i] = simde_mm_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[5]>>4)+(j*bitOffsetInGroup));
+	  fprintf(fd,"              }\n");
+        }
+    }
+
+  // =====================================================================
+  // Process group with 9 BNs
+  fprintf(fd,"//Process group with 9 BNs\n");
+  // Offset is 2*384/32 = 24
+  const uint8_t lut_idxCnProcG9[9][8] = {{24,48,72,96,120,144,168,192}, {0,48,72,96,120,144,168,192},
+					 {0,24,72,96,120,144,168,192}, {0,24,48,96,120,144,168,192},
+					 {0,24,48,72,120,144,168,192}, {0,24,48,72,96,144,168,192},
+					 {0,24,48,72,96,120,168,192}, {0,24,48,72,96,120,144,192},
+					 {0,24,48,72,96,120,144,168}};
+
+
+
+
+
+  if (lut_numCnInCnGroups[6] > 0)
+    {
+      // Number of groups of 16 CNs for parallel processing
+      // Ceil for values not divisible by 16
+      fprintf(fd, "M = (%d*Z + 15)>>4;\n",lut_numCnInCnGroups[6] );
+      // Set the offset to each bit within a group in terms of 16 Byte
+      bitOffsetInGroup = (lut_numCnInCnGroups_BG1_R13[6]*NR_LDPC_ZMAX)>>4;
+
+
+      // Set pointers to start of group 9
+      //p_cnProcBuf    = (simde__m128i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
+      //p_cnProcBufRes = (simde__m128i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
+
+      // Loop over every BN
+     
+      for (j=0; j<9; j++)
+        {
+	  // Set of results pointer to correct BN address
+	  //p_cnProcBufResBit = p_cnProcBufRes + (j*bitOffsetInGroup);
+
+	  // Loop over CNs
+	  //	  for (i=0; i<M; i++,iprime++)
+	  //            {
+	  fprintf(fd,"            for (int i=0;i<M;i++) {\n");
+	  // Abs and sign of 16 CNs (first BN)
+	  //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
+	  fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[6]>>4)+lut_idxCnProcG9[j][0]*2);
+	  //                sgn  = simde_mm_sign_epi8(ones, ymm0);
+#ifndef AVOID_MM256_SIGN
+	  fprintf(fd,"                sgn  = simde_mm_sign_epi8(ones, ymm0);\n");
+#else
+	  fprintf(fd,"                sgn  = simde_mm_xor_si128(ones, ymm0);\n");
+#endif	  
+	  //                min  = simde_mm_abs_epi8(ymm0);
+	  fprintf(fd,"                min  = simde_mm_abs_epi8(ymm0);\n");
+	  
+	  
+	  // Loop over BNs
+	  for (k=1; k<8; k++)
+	    {
+	      fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[6]>>4)+lut_idxCnProcG9[j][k]*2);
+	      
+	      //                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));
+	      fprintf(fd,"                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));\n");
+	      
+	      //                sgn  = simde_mm_sign_epi8(sgn, ymm0);
+#ifndef AVOID_MM256_SIGN
+  	      fprintf(fd,"                sgn  = simde_mm_sign_epi8(sgn, ymm0);\n");
+#else
+	      fprintf(fd,"                sgn  = simde_mm_xor_si128(sgn, ymm0);\n");
+#endif
+	    }
+	  
+	  // Store result
+	  //                min = simde_mm_min_epu8(min, maxLLR); // 128 in epi8 is -127
+#ifndef DROP_MAXLLR
+	  fprintf(fd,"                min = simde_mm_min_epu8(min, maxLLR);\n");
+#endif
+	  //                *p_cnProcBufResBit = simde_mm_sign_epi8(min, sgn);
+	  //                p_cnProcBufResBit++;
+	  fprintf(fd,"                ((simde__m128i*)cnProcBufRes)[%d+i] = simde_mm_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[6]>>4)+(j*bitOffsetInGroup));
+	  fprintf(fd,"            }\n");
+	}
+    }
+
+  // =====================================================================
+  // Process group with 10 BNs
+  fprintf(fd,"//Process group with 10 BNs\n");
+  // Offset is 1*384/32 = 12
+  const uint8_t lut_idxCnProcG10[10][9] = {{12,24,36,48,60,72,84,96,108}, {0,24,36,48,60,72,84,96,108},
+					   {0,12,36,48,60,72,84,96,108}, {0,12,24,48,60,72,84,96,108},
+					   {0,12,24,36,60,72,84,96,108}, {0,12,24,36,48,72,84,96,108},
+					   {0,12,24,36,48,60,84,96,108}, {0,12,24,36,48,60,72,96,108},
+					   {0,12,24,36,48,60,72,84,108}, {0,12,24,36,48,60,72,84,96}};
+
+
+
+
+
+  if (lut_numCnInCnGroups[7] > 0)
+    {
+      // Number of groups of 16 CNs for parallel processing
+      // Ceil for values not divisible by 16
+      fprintf(fd, " M = (%d*Z + 15)>>4;\n",lut_numCnInCnGroups[7] );
+
+      // Set the offset to each bit within a group in terms of 16 Byte
+      bitOffsetInGroup = (lut_numCnInCnGroups_BG1_R13[7]*NR_LDPC_ZMAX)>>4;
+
+
+      // Set pointers to start of group 10
+      //p_cnProcBuf    = (simde__m128i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
+      //p_cnProcBufRes = (simde__m128i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
+
+      // Loop over every BN
+      
+      for (j=0; j<10; j++)
+        {
+	  // Set of results pointer to correct BN address
+	  //p_cnProcBufResBit = p_cnProcBufRes + (j*bitOffsetInGroup);
+
+	  // Loop over CNs
+	  //	  for (i=0; i<M; i++,iprime++)
+	  //            {
+	  fprintf(fd,"            for (int i=0;i<M;i++) {\n");
+	  // Abs and sign of 16 CNs (first BN)
+	  //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
+	  fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[7]>>4)+lut_idxCnProcG10[j][0]*2);
+	  //                sgn  = simde_mm_sign_epi8(ones, ymm0);
+#ifndef AVOID_MM256_SIGN
+	  fprintf(fd,"                sgn  = simde_mm_sign_epi8(ones, ymm0);\n");
+#else
+	  fprintf(fd,"                sgn  = simde_mm_xor_si128(ones, ymm0);\n");
+#endif	  
+	  //                min  = simde_mm_abs_epi8(ymm0);
+	  fprintf(fd,"                min  = simde_mm_abs_epi8(ymm0);\n");
+	  
+	  
+	  // Loop over BNs
+	  for (k=1; k<9; k++)
+	    {
+	      fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[7]>>4)+lut_idxCnProcG10[j][k]*2);
+	      
+	      //                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));
+	      fprintf(fd,"                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));\n");
+	      
+	      //                sgn  = simde_mm_sign_epi8(sgn, ymm0);
+#ifndef AVOID_MM256_SIGN
+  	      fprintf(fd,"                sgn  = simde_mm_sign_epi8(sgn, ymm0);\n");
+#else
+	      fprintf(fd,"                sgn  = simde_mm_xor_si128(sgn, ymm0);\n");
+#endif
+	    }
+	  
+	  // Store result
+	  //                min = simde_mm_min_epu8(min, maxLLR); // 128 in epi8 is -127
+#ifndef DROP_MAXLLR
+	  fprintf(fd,"                min = simde_mm_min_epu8(min, maxLLR);\n");
+#endif	  
+	  //                *p_cnProcBufResBit = simde_mm_sign_epi8(min, sgn);
+	  //                p_cnProcBufResBit++;
+	  fprintf(fd,"                ((simde__m128i*)cnProcBufRes)[%d+i] = simde_mm_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[7]>>4)+(j*bitOffsetInGroup));
+	  fprintf(fd,"            }\n");
+        }
+    }
+
+
+  // =====================================================================
+  // Process group with 19 BNs
+  fprintf(fd,"//Process group with 19 BNs\n");
+  // Offset is 4*384/32 = 12
+  const uint16_t lut_idxCnProcG19[19][18] = {{48,96,144,192,240,288,336,384,432,480,528,576,624,672,720,768,816,864}, {0,96,144,192,240,288,336,384,432,480,528,576,624,672,720,768,816,864},
+					     {0,48,144,192,240,288,336,384,432,480,528,576,624,672,720,768,816,864}, {0,48,96,192,240,288,336,384,432,480,528,576,624,672,720,768,816,864},
+					     {0,48,96,144,240,288,336,384,432,480,528,576,624,672,720,768,816,864}, {0,48,96,144,192,288,336,384,432,480,528,576,624,672,720,768,816,864},
+					     {0,48,96,144,192,240,336,384,432,480,528,576,624,672,720,768,816,864}, {0,48,96,144,192,240,288,384,432,480,528,576,624,672,720,768,816,864},
+					     {0,48,96,144,192,240,288,336,432,480,528,576,624,672,720,768,816,864}, {0,48,96,144,192,240,288,336,384,480,528,576,624,672,720,768,816,864},
+					     {0,48,96,144,192,240,288,336,384,432,528,576,624,672,720,768,816,864}, {0,48,96,144,192,240,288,336,384,432,480,576,624,672,720,768,816,864},
+					     {0,48,96,144,192,240,288,336,384,432,480,528,624,672,720,768,816,864}, {0,48,96,144,192,240,288,336,384,432,480,528,576,672,720,768,816,864},
+					     {0,48,96,144,192,240,288,336,384,432,480,528,576,624,720,768,816,864}, {0,48,96,144,192,240,288,336,384,432,480,528,576,624,672,768,816,864},
+					     {0,48,96,144,192,240,288,336,384,432,480,528,576,624,672,720,816,864}, {0,48,96,144,192,240,288,336,384,432,480,528,576,624,672,720,768,864},
+					     {0,48,96,144,192,240,288,336,384,432,480,528,576,624,672,720,768,816}};
+
+ 
+  if (lut_numCnInCnGroups[8] > 0)
+    {
+      // Number of groups of 16 CNs for parallel processing
+      // Ceil for values not divisible by 16
+     fprintf(fd, " M = (%d*Z + 15)>>4;\n",lut_numCnInCnGroups[8] );
+
+      // Set the offset to each bit within a group in terms of 16 Byte
+      bitOffsetInGroup = (lut_numCnInCnGroups_BG1_R13[8]*NR_LDPC_ZMAX)>>4;
+
+
+      // Set pointers to start of group 19
+      //p_cnProcBuf    = (simde__m128i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
+      //p_cnProcBufRes = (simde__m128i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
+
+      // Loop over every BN
+      
+      for (j=0; j<19; j++)
+        {
+	  // Set of results pointer to correct BN address
+	  //p_cnProcBufResBit = p_cnProcBufRes + (j*bitOffsetInGroup);
+
+	  // Loop over CNs
+	  //	  for (i=0; i<M; i++,iprime++)
+	  //            {
+	  fprintf(fd,"            for (int i=0;i<M;i++) {\n");
+	  // Abs and sign of 16 CNs (first BN)
+	  //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
+	  fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[8]>>4)+lut_idxCnProcG19[j][0]*2);
+	  //                sgn  = simde_mm_sign_epi8(ones, ymm0);
+#ifndef AVOID_MM256_SIGN
+	  fprintf(fd,"                sgn  = simde_mm_sign_epi8(ones, ymm0);\n");
+#else
+	  fprintf(fd,"                sgn  = simde_mm_xor_si128(ones, ymm0);\n");
+#endif	  
+	  //                min  = simde_mm_abs_epi8(ymm0);
+	  fprintf(fd,"                min  = simde_mm_abs_epi8(ymm0);\n");
+	  
+	  
+	  // Loop over BNs
+	  for (k=1; k<18; k++)
+	    {
+	      fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[8]>>4)+lut_idxCnProcG19[j][k]*2);
+	      
+	      //                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));
+	      fprintf(fd,"                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));\n");
+	      
+	      //                sgn  = simde_mm_sign_epi8(sgn, ymm0);
+#ifndef AVOID_MM256_SIGN
+  	      fprintf(fd,"                sgn  = simde_mm_sign_epi8(sgn, ymm0);\n");
+#else
+	      fprintf(fd,"                sgn  = simde_mm_xor_si128(sgn, ymm0);\n");
+#endif
+	    }
+	  
+	  // Store result
+	  //                min = simde_mm_min_epu8(min, maxLLR); // 128 in epi8 is -127
+#ifndef DROP_MAXLLR
+	  fprintf(fd,"                min = simde_mm_min_epu8(min, maxLLR);\n");
+#endif	  
+	  //                *p_cnProcBufResBit = simde_mm_sign_epi8(min, sgn);
+	  //                p_cnProcBufResBit++;
+	  fprintf(fd,"                ((simde__m128i*)cnProcBufRes)[%d+i] = simde_mm_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[8]>>4)+(j*bitOffsetInGroup));
+	  fprintf(fd,"            }\n");
+        }
+    }
+
+  fprintf(fd,"}\n");
+  fclose(fd);
+}//end of the function  nrLDPC_cnProc_BG1
+
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc/cnProc_gen_BG1_avx2.c b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc/cnProc_gen_BG1_avx2.c
index a4a269a2f639ba1b3e6fc8a5b619901d146d6342..6c21d8b4ea4dc0f59f60dd5235f51bb2d3c1fd73 100644
--- a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc/cnProc_gen_BG1_avx2.c
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc/cnProc_gen_BG1_avx2.c
@@ -24,6 +24,8 @@
 #include <stdint.h>
 #include "../../nrLDPCdecoder_defs.h"
 
+#define AVOID_MM256_SIGN 1
+#define DROP_MAXLLR 1
 void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
 {
   const char *ratestr[3]={"13","23","89"};
@@ -57,8 +59,8 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
 
 
 
-  //__m256i* p_cnProcBuf;
-  //__m256i* p_cnProcBufRes;
+  //simde__m256i* p_cnProcBuf;
+  //simde__m256i* p_cnProcBufRes;
 
   // Number of CNs in Groups
   //uint32_t M;
@@ -67,11 +69,11 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
   // Offset to each bit within a group in terms of 32 Byte
   uint32_t bitOffsetInGroup;
 
-  //__m256i ymm0, min, sgn;
-  //__m256i* p_cnProcBufResBit;
+  //simde__m256i ymm0, min, sgn;
+  //simde__m256i* p_cnProcBufResBit;
 
-  // const __m256i* p_ones   = (__m256i*) ones256_epi8;
-  // const __m256i* p_maxLLR = (__m256i*) maxLLR256_epi8;
+  // const simde__m256i* p_ones   = (simde__m256i*) ones256_epi8;
+  // const simde__m256i* p_maxLLR = (simde__m256i*) maxLLR256_epi8;
 
   // LUT with offsets for bits that need to be processed
   // 1. bit proc requires LLRs of 2. and 3. bit, 2.bits of 1. and 3. etc.
@@ -85,11 +87,15 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
   // 1. bit proc requires LLRs of 2. and 3. bit, 2.bits of 1. and 3. etc.
   // Offsets are in units of bitOffsetInGroup (1*384/32)
   const uint8_t lut_idxCnProcG3[3][2] = {{12,24}, {0,24}, {0,12}};
-
-  fprintf(fd,"                __m256i ymm0, min, sgn,ones,maxLLR;\n");
-  fprintf(fd,"                ones   = simde_mm256_set1_epi8((char)1);\n");
-  fprintf(fd,"                maxLLR = simde_mm256_set1_epi8((char)127);\n");
-
+#ifndef DROP_MAXLLR
+  fprintf(fd,"                simde__m256i ymm0, min, sgn,ones,maxLLR;\n");
+#else
+  fprintf(fd,"                simde__m256i ymm0, min, sgn,ones;\n");
+#endif
+  fprintf(fd,"                ones   = simde_mm256_set1_epi8((int8_t)1);\n");
+#ifndef DROP_MAXLLR
+  fprintf(fd,"                maxLLR = simde_mm256_set1_epi8((int8_t)127);\n");
+#endif
   fprintf(fd,"                uint32_t  M;\n");
 
   if (lut_numCnInCnGroups[0] > 0)
@@ -103,8 +109,8 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
 
 
       // Set pointers to start of group 3
-      //p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[0]];
-      //p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[0]];
+      //p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[0]];
+      //p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[0]];
 
       // Loop over every BN
       
@@ -120,32 +126,40 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
 	  fprintf(fd,"            for (int i=0;i<M;i++) {\n");
 	  // Abs and sign of 32 CNs (first BN)
 	  //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-	  fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[0]>>5)+lut_idxCnProcG3[j][0]);
+	  fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[0]>>5)+lut_idxCnProcG3[j][0]);
 	  //                sgn  = simde_mm256_sign_epi8(ones, ymm0);
+#ifndef AVOID_MM256_SIGN
 	  fprintf(fd,"                sgn  = simde_mm256_sign_epi8(ones, ymm0);\n");
+#else
+	  fprintf(fd,"                sgn  = simde_mm256_xor_si256(ones, ymm0);\n");
+#endif	  
 	  //                min  = simde_mm256_abs_epi8(ymm0);
 	  fprintf(fd,"                min  = simde_mm256_abs_epi8(ymm0);\n");
 	  
 	  // 32 CNs of second BN
 	  //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][1] + i];
-	  fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[0]>>5)+lut_idxCnProcG3[j][1]);
+	  fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[0]>>5)+lut_idxCnProcG3[j][1]);
 	  
 	  //                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));
 	  fprintf(fd,"                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));\n");
 	  
 	  //                sgn  = simde_mm256_sign_epi8(sgn, ymm0);
+#ifndef AVOID_MM256_SIGN
 	  fprintf(fd,"                sgn  = simde_mm256_sign_epi8(sgn, ymm0);\n");
-	  
+#else
+	  fprintf(fd,"                sgn  = simde_mm256_xor_si256(sgn, ymm0);\n");
+#endif
 	  // Store result
 	  //                min = simde_mm256_min_epu8(min, maxLLR); // 128 in epi8 is -127
+#ifndef DROP_MAXLLR
 	  fprintf(fd,"                min = simde_mm256_min_epu8(min, maxLLR);\n");
+#endif
 	  //                *p_cnProcBufResBit = simde_mm256_sign_epi8(min, sgn);
 	  //                p_cnProcBufResBit++;
-	  fprintf(fd,"                ((__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[0]>>5)+(j*bitOffsetInGroup));
+	  fprintf(fd,"                ((simde__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[0]>>5)+(j*bitOffsetInGroup));
 	  fprintf(fd,"            }\n");
         }
     }
-
   // =====================================================================
   // Process group with 4 BNs
   fprintf(fd,"//Process group with 4 BNs\n");
@@ -163,8 +177,8 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
 
 
       // Set pointers to start of group 4
-      //p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
-      //p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
+      //p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
+      //p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
 
       // Loop over every BN
       
@@ -179,9 +193,13 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
 	  fprintf(fd,"            for (int i=0;i<M;i++) {\n");
 	  // Abs and sign of 32 CNs (first BN)
 	  //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-	  fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[1]>>5)+lut_idxCnProcG4[j][0]);
+	  fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[1]>>5)+lut_idxCnProcG4[j][0]);
 	  //                sgn  = simde_mm256_sign_epi8(ones, ymm0);
+#ifndef AVOID_MM256_SIGN
 	  fprintf(fd,"                sgn  = simde_mm256_sign_epi8(ones, ymm0);\n");
+#else
+	  fprintf(fd,"                sgn  = simde_mm256_xor_si256(ones, ymm0);\n");
+#endif	  
 	  //                min  = simde_mm256_abs_epi8(ymm0);
 	  fprintf(fd,"                min  = simde_mm256_abs_epi8(ymm0);\n");
 	  
@@ -189,21 +207,27 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
 	  // Loop over BNs
 	  for (k=1; k<3; k++)
 	    {
-	      fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[1]>>5)+lut_idxCnProcG4[j][k]);
+	      fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[1]>>5)+lut_idxCnProcG4[j][k]);
 	      
 	      //                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));
 	      fprintf(fd,"                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));\n");
 	      
 	      //                sgn  = simde_mm256_sign_epi8(sgn, ymm0);
-	      fprintf(fd,"                sgn  = simde_mm256_sign_epi8(sgn, ymm0);\n");
+#ifndef AVOID_MM256_SIGN
+  	      fprintf(fd,"                sgn  = simde_mm256_sign_epi8(sgn, ymm0);\n");
+#else
+	      fprintf(fd,"                sgn  = simde_mm256_xor_si256(sgn, ymm0);\n");
+#endif
 	    }
 	  
 	  // Store result
 	  //                min = simde_mm256_min_epu8(min, maxLLR); // 128 in epi8 is -127
+#ifndef DROP_MAXLLR
 	  fprintf(fd,"                min = simde_mm256_min_epu8(min, maxLLR);\n");
+#endif	  
 	  //                *p_cnProcBufResBit = simde_mm256_sign_epi8(min, sgn);
 	      //                p_cnProcBufResBit++;
-	  fprintf(fd,"                ((__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[1]>>5)+(j*bitOffsetInGroup));
+	  fprintf(fd,"                ((simde__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[1]>>5)+(j*bitOffsetInGroup));
 	  fprintf(fd,"            }\n");
         }
     }
@@ -228,8 +252,8 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
 
 
       // Set pointers to start of group 4
-      //p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
-      //p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
+      //p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
+      //p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
 
       // Loop over every BN
       
@@ -244,9 +268,13 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
 	  fprintf(fd,"            for (int i=0;i<M;i++) {\n");
 	  // Abs and sign of 32 CNs (first BN)
 	  //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-	  fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[2]>>5)+lut_idxCnProcG5[j][0]);
+	  fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[2]>>5)+lut_idxCnProcG5[j][0]);
 	  //                sgn  = simde_mm256_sign_epi8(ones, ymm0);
+#ifndef AVOID_MM256_SIGN
 	  fprintf(fd,"                sgn  = simde_mm256_sign_epi8(ones, ymm0);\n");
+#else
+	  fprintf(fd,"                sgn  = simde_mm256_xor_si256(ones, ymm0);\n");
+#endif	  
 	  //                min  = simde_mm256_abs_epi8(ymm0);
 	  fprintf(fd,"                min  = simde_mm256_abs_epi8(ymm0);\n");
 	  
@@ -254,21 +282,27 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
 	  // Loop over BNs
 	  for (k=1; k<4; k++)
 	    {
-	      fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[2]>>5)+lut_idxCnProcG5[j][k]);
+	      fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[2]>>5)+lut_idxCnProcG5[j][k]);
 	      
 	      //                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));
 	      fprintf(fd,"                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));\n");
 	      
 	      //                sgn  = simde_mm256_sign_epi8(sgn, ymm0);
-	      fprintf(fd,"                sgn  = simde_mm256_sign_epi8(sgn, ymm0);\n");
+#ifndef AVOID_MM256_SIGN
+  	      fprintf(fd,"                sgn  = simde_mm256_sign_epi8(sgn, ymm0);\n");
+#else
+	      fprintf(fd,"                sgn  = simde_mm256_xor_si256(sgn, ymm0);\n");
+#endif
 	    }
 	  
 	  // Store result
 	  //                min = simde_mm256_min_epu8(min, maxLLR); // 128 in epi8 is -127
+#ifndef DROP_MAXLLR
 	  fprintf(fd,"                min = simde_mm256_min_epu8(min, maxLLR);\n");
+#endif	  
 	  //                *p_cnProcBufResBit = simde_mm256_sign_epi8(min, sgn);
 	  //                p_cnProcBufResBit++;
-	  fprintf(fd,"                ((__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[2]>>5)+(j*bitOffsetInGroup));
+	  fprintf(fd,"                ((simde__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[2]>>5)+(j*bitOffsetInGroup));
 	  fprintf(fd,"           }\n");
         }
     }
@@ -293,8 +327,8 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
 
 
       // Set pointers to start of group 4
-      //p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
-      //p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
+      //p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
+      //p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
 
       // Loop over every BN
       
@@ -309,9 +343,13 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
 	  fprintf(fd,"            for (int i=0;i<M;i++) {\n");
 	  // Abs and sign of 32 CNs (first BN)
 	  //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-	  fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[3]>>5)+lut_idxCnProcG6[j][0]);
+	  fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[3]>>5)+lut_idxCnProcG6[j][0]);
 	  //                sgn  = simde_mm256_sign_epi8(ones, ymm0);
+#ifndef AVOID_MM256_SIGN
 	  fprintf(fd,"                sgn  = simde_mm256_sign_epi8(ones, ymm0);\n");
+#else
+	  fprintf(fd,"                sgn  = simde_mm256_xor_si256(ones, ymm0);\n");
+#endif	  
 	  //                min  = simde_mm256_abs_epi8(ymm0);
 	  fprintf(fd,"                min  = simde_mm256_abs_epi8(ymm0);\n");
 	  
@@ -319,21 +357,27 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
 	  // Loop over BNs
 	  for (k=1; k<5; k++)
 	    {
-	      fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[3]>>5)+lut_idxCnProcG6[j][k]);
+	      fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[3]>>5)+lut_idxCnProcG6[j][k]);
 	      
 	      //                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));
 	      fprintf(fd,"                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));\n");
 	      
 	      //                sgn  = simde_mm256_sign_epi8(sgn, ymm0);
-	      fprintf(fd,"                sgn  = simde_mm256_sign_epi8(sgn, ymm0);\n");
+#ifndef AVOID_MM256_SIGN
+  	      fprintf(fd,"                sgn  = simde_mm256_sign_epi8(sgn, ymm0);\n");
+#else
+	      fprintf(fd,"                sgn  = simde_mm256_xor_si256(sgn, ymm0);\n");
+#endif
 	    }
 	  
 	  // Store result
 	  //                min = simde_mm256_min_epu8(min, maxLLR); // 128 in epi8 is -127
+#ifndef DROP_MAXLLR
 	  fprintf(fd,"                min = simde_mm256_min_epu8(min, maxLLR);\n");
+#endif	  
 	  //                *p_cnProcBufResBit = simde_mm256_sign_epi8(min, sgn);
 	  //                p_cnProcBufResBit++;
-	  fprintf(fd,"                ((__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[3]>>5)+(j*bitOffsetInGroup));
+	  fprintf(fd,"                ((simde__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[3]>>5)+(j*bitOffsetInGroup));
 	  fprintf(fd,"            }\n");
 	}
     }
@@ -361,8 +405,8 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
 
 
       // Set pointers to start of group 4
-      //p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
-      //p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
+      //p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
+      //p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
 
       // Loop over every BN
       
@@ -377,9 +421,13 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
 	  fprintf(fd,"            for (int i=0;i<M;i++) {\n");
 	  // Abs and sign of 32 CNs (first BN)
 	  //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-	  fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[4]>>5)+lut_idxCnProcG7[j][0]);
+	  fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[4]>>5)+lut_idxCnProcG7[j][0]);
 	  //                sgn  = simde_mm256_sign_epi8(ones, ymm0);
+#ifndef AVOID_MM256_SIGN
 	  fprintf(fd,"                sgn  = simde_mm256_sign_epi8(ones, ymm0);\n");
+#else
+	  fprintf(fd,"                sgn  = simde_mm256_xor_si256(ones, ymm0);\n");
+#endif	  
 	  //                min  = simde_mm256_abs_epi8(ymm0);
 	  fprintf(fd,"                min  = simde_mm256_abs_epi8(ymm0);\n");
 	  
@@ -387,21 +435,27 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
 	  // Loop over BNs
 	  for (k=1; k<6; k++)
 	    {
-	      fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[4]>>5)+lut_idxCnProcG7[j][k]);
+	      fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[4]>>5)+lut_idxCnProcG7[j][k]);
 	      
 	      //                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));
 	      fprintf(fd,"                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));\n");
 	      
 	      //                sgn  = simde_mm256_sign_epi8(sgn, ymm0);
-	      fprintf(fd,"                sgn  = simde_mm256_sign_epi8(sgn, ymm0);\n");
+#ifndef AVOID_MM256_SIGN
+  	      fprintf(fd,"                sgn  = simde_mm256_sign_epi8(sgn, ymm0);\n");
+#else
+	      fprintf(fd,"                sgn  = simde_mm256_xor_si256(sgn, ymm0);\n");
+#endif
 	    }
 	  
 	  // Store result
 	  //                min = simde_mm256_min_epu8(min, maxLLR); // 128 in epi8 is -127
+#ifndef DROP_MAXLLR
 	  fprintf(fd,"                min = simde_mm256_min_epu8(min, maxLLR);\n");
+#endif	  
 	  //                *p_cnProcBufResBit = simde_mm256_sign_epi8(min, sgn);
 	  //                p_cnProcBufResBit++;
-	  fprintf(fd,"                ((__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[4]>>5)+(j*bitOffsetInGroup));
+	  fprintf(fd,"                ((simde__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[4]>>5)+(j*bitOffsetInGroup));
 	  fprintf(fd,"            }\n");
 	}
     }
@@ -430,8 +484,8 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
 
 
       // Set pointers to start of group 4
-      //p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
-      //p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
+      //p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
+      //p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
 
       // Loop over every BN
       
@@ -446,9 +500,13 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
 	  fprintf(fd,"            for (int i=0;i<M;i++) {\n");
 	  // Abs and sign of 32 CNs (first BN)
 	  //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-	  fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[5]>>5)+lut_idxCnProcG8[j][0]);
+	  fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[5]>>5)+lut_idxCnProcG8[j][0]);
 	  //                sgn  = simde_mm256_sign_epi8(ones, ymm0);
+#ifndef AVOID_MM256_SIGN
 	  fprintf(fd,"                sgn  = simde_mm256_sign_epi8(ones, ymm0);\n");
+#else
+	  fprintf(fd,"                sgn  = simde_mm256_xor_si256(ones, ymm0);\n");
+#endif	  
 	  //                min  = simde_mm256_abs_epi8(ymm0);
 	  fprintf(fd,"                min  = simde_mm256_abs_epi8(ymm0);\n");
 	  
@@ -456,21 +514,27 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
 	  // Loop over BNs
 	  for (k=1; k<7; k++)
 	    {
-	      fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[5]>>5)+lut_idxCnProcG8[j][k]);
+	      fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[5]>>5)+lut_idxCnProcG8[j][k]);
 	      
 	      //                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));
 	      fprintf(fd,"                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));\n");
 	      
 	      //                sgn  = simde_mm256_sign_epi8(sgn, ymm0);
-	      fprintf(fd,"                sgn  = simde_mm256_sign_epi8(sgn, ymm0);\n");
+#ifndef AVOID_MM256_SIGN
+  	      fprintf(fd,"                sgn  = simde_mm256_sign_epi8(sgn, ymm0);\n");
+#else
+	      fprintf(fd,"                sgn  = simde_mm256_xor_si256(sgn, ymm0);\n");
+#endif
 	    }
 	  
 	  // Store result
 	  //                min = simde_mm256_min_epu8(min, maxLLR); // 128 in epi8 is -127
+#ifndef DROP_MAXLLR
 	  fprintf(fd,"                min = simde_mm256_min_epu8(min, maxLLR);\n");
+#endif
 	  //                *p_cnProcBufResBit = simde_mm256_sign_epi8(min, sgn);
 	  //                p_cnProcBufResBit++;
-	  fprintf(fd,"                ((__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[5]>>5)+(j*bitOffsetInGroup));
+	  fprintf(fd,"                ((simde__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[5]>>5)+(j*bitOffsetInGroup));
 	  fprintf(fd,"              }\n");
         }
     }
@@ -499,8 +563,8 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
 
 
       // Set pointers to start of group 9
-      //p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
-      //p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
+      //p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
+      //p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
 
       // Loop over every BN
      
@@ -515,9 +579,13 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
 	  fprintf(fd,"            for (int i=0;i<M;i++) {\n");
 	  // Abs and sign of 32 CNs (first BN)
 	  //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-	  fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[6]>>5)+lut_idxCnProcG9[j][0]);
+	  fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[6]>>5)+lut_idxCnProcG9[j][0]);
 	  //                sgn  = simde_mm256_sign_epi8(ones, ymm0);
+#ifndef AVOID_MM256_SIGN
 	  fprintf(fd,"                sgn  = simde_mm256_sign_epi8(ones, ymm0);\n");
+#else
+	  fprintf(fd,"                sgn  = simde_mm256_xor_si256(ones, ymm0);\n");
+#endif	  
 	  //                min  = simde_mm256_abs_epi8(ymm0);
 	  fprintf(fd,"                min  = simde_mm256_abs_epi8(ymm0);\n");
 	  
@@ -525,21 +593,27 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
 	  // Loop over BNs
 	  for (k=1; k<8; k++)
 	    {
-	      fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[6]>>5)+lut_idxCnProcG9[j][k]);
+	      fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[6]>>5)+lut_idxCnProcG9[j][k]);
 	      
 	      //                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));
 	      fprintf(fd,"                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));\n");
 	      
 	      //                sgn  = simde_mm256_sign_epi8(sgn, ymm0);
-	      fprintf(fd,"                sgn  = simde_mm256_sign_epi8(sgn, ymm0);\n");
+#ifndef AVOID_MM256_SIGN
+  	      fprintf(fd,"                sgn  = simde_mm256_sign_epi8(sgn, ymm0);\n");
+#else
+	      fprintf(fd,"                sgn  = simde_mm256_xor_si256(sgn, ymm0);\n");
+#endif
 	    }
 	  
 	  // Store result
 	  //                min = simde_mm256_min_epu8(min, maxLLR); // 128 in epi8 is -127
+#ifndef DROP_MAXLLR
 	  fprintf(fd,"                min = simde_mm256_min_epu8(min, maxLLR);\n");
+#endif
 	  //                *p_cnProcBufResBit = simde_mm256_sign_epi8(min, sgn);
 	  //                p_cnProcBufResBit++;
-	  fprintf(fd,"                ((__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[6]>>5)+(j*bitOffsetInGroup));
+	  fprintf(fd,"                ((simde__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[6]>>5)+(j*bitOffsetInGroup));
 	  fprintf(fd,"            }\n");
 	}
     }
@@ -569,8 +643,8 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
 
 
       // Set pointers to start of group 10
-      //p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
-      //p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
+      //p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
+      //p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
 
       // Loop over every BN
       
@@ -585,9 +659,13 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
 	  fprintf(fd,"            for (int i=0;i<M;i++) {\n");
 	  // Abs and sign of 32 CNs (first BN)
 	  //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-	  fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[7]>>5)+lut_idxCnProcG10[j][0]);
+	  fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[7]>>5)+lut_idxCnProcG10[j][0]);
 	  //                sgn  = simde_mm256_sign_epi8(ones, ymm0);
+#ifndef AVOID_MM256_SIGN
 	  fprintf(fd,"                sgn  = simde_mm256_sign_epi8(ones, ymm0);\n");
+#else
+	  fprintf(fd,"                sgn  = simde_mm256_xor_si256(ones, ymm0);\n");
+#endif	  
 	  //                min  = simde_mm256_abs_epi8(ymm0);
 	  fprintf(fd,"                min  = simde_mm256_abs_epi8(ymm0);\n");
 	  
@@ -595,21 +673,27 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
 	  // Loop over BNs
 	  for (k=1; k<9; k++)
 	    {
-	      fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[7]>>5)+lut_idxCnProcG10[j][k]);
+	      fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[7]>>5)+lut_idxCnProcG10[j][k]);
 	      
 	      //                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));
 	      fprintf(fd,"                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));\n");
 	      
 	      //                sgn  = simde_mm256_sign_epi8(sgn, ymm0);
-	      fprintf(fd,"                sgn  = simde_mm256_sign_epi8(sgn, ymm0);\n");
+#ifndef AVOID_MM256_SIGN
+  	      fprintf(fd,"                sgn  = simde_mm256_sign_epi8(sgn, ymm0);\n");
+#else
+	      fprintf(fd,"                sgn  = simde_mm256_xor_si256(sgn, ymm0);\n");
+#endif
 	    }
 	  
 	  // Store result
 	  //                min = simde_mm256_min_epu8(min, maxLLR); // 128 in epi8 is -127
+#ifndef DROP_MAXLLR
 	  fprintf(fd,"                min = simde_mm256_min_epu8(min, maxLLR);\n");
+#endif	  
 	  //                *p_cnProcBufResBit = simde_mm256_sign_epi8(min, sgn);
 	  //                p_cnProcBufResBit++;
-	  fprintf(fd,"                ((__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[7]>>5)+(j*bitOffsetInGroup));
+	  fprintf(fd,"                ((simde__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[7]>>5)+(j*bitOffsetInGroup));
 	  fprintf(fd,"            }\n");
         }
     }
@@ -630,7 +714,7 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
 					     {0,48,96,144,192,240,288,336,384,432,480,528,576,624,672,720,816,864}, {0,48,96,144,192,240,288,336,384,432,480,528,576,624,672,720,768,864},
 					     {0,48,96,144,192,240,288,336,384,432,480,528,576,624,672,720,768,816}};
 
-
+ 
   if (lut_numCnInCnGroups[8] > 0)
     {
       // Number of groups of 32 CNs for parallel processing
@@ -642,8 +726,8 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
 
 
       // Set pointers to start of group 19
-      //p_cnProcBuf    = (__m256i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
-      //p_cnProcBufRes = (__m256i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
+      //p_cnProcBuf    = (simde__m256i*) &cnProcBuf   [lut_startAddrCnGroups[1]];
+      //p_cnProcBufRes = (simde__m256i*) &cnProcBufRes[lut_startAddrCnGroups[1]];
 
       // Loop over every BN
       
@@ -658,9 +742,13 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
 	  fprintf(fd,"            for (int i=0;i<M;i++) {\n");
 	  // Abs and sign of 32 CNs (first BN)
 	  //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-	  fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[8]>>5)+lut_idxCnProcG19[j][0]);
+	  fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[8]>>5)+lut_idxCnProcG19[j][0]);
 	  //                sgn  = simde_mm256_sign_epi8(ones, ymm0);
+#ifndef AVOID_MM256_SIGN
 	  fprintf(fd,"                sgn  = simde_mm256_sign_epi8(ones, ymm0);\n");
+#else
+	  fprintf(fd,"                sgn  = simde_mm256_xor_si256(ones, ymm0);\n");
+#endif	  
 	  //                min  = simde_mm256_abs_epi8(ymm0);
 	  fprintf(fd,"                min  = simde_mm256_abs_epi8(ymm0);\n");
 	  
@@ -668,21 +756,27 @@ void nrLDPC_cnProc_BG1_generator_AVX2(const char* dir, int R)
 	  // Loop over BNs
 	  for (k=1; k<18; k++)
 	    {
-	      fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[8]>>5)+lut_idxCnProcG19[j][k]);
+	      fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[8]>>5)+lut_idxCnProcG19[j][k]);
 	      
 	      //                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));
 	      fprintf(fd,"                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));\n");
 	      
 	      //                sgn  = simde_mm256_sign_epi8(sgn, ymm0);
-	      fprintf(fd,"                sgn  = simde_mm256_sign_epi8(sgn, ymm0);\n");
+#ifndef AVOID_MM256_SIGN
+  	      fprintf(fd,"                sgn  = simde_mm256_sign_epi8(sgn, ymm0);\n");
+#else
+	      fprintf(fd,"                sgn  = simde_mm256_xor_si256(sgn, ymm0);\n");
+#endif
 	    }
 	  
 	  // Store result
 	  //                min = simde_mm256_min_epu8(min, maxLLR); // 128 in epi8 is -127
+#ifndef DROP_MAXLLR
 	  fprintf(fd,"                min = simde_mm256_min_epu8(min, maxLLR);\n");
+#endif	  
 	  //                *p_cnProcBufResBit = simde_mm256_sign_epi8(min, sgn);
 	  //                p_cnProcBufResBit++;
-	  fprintf(fd,"                ((__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[8]>>5)+(j*bitOffsetInGroup));
+	  fprintf(fd,"                ((simde__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[8]>>5)+(j*bitOffsetInGroup));
 	  fprintf(fd,"            }\n");
         }
     }
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc/cnProc_gen_BG2_128.c b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc/cnProc_gen_BG2_128.c
new file mode 100644
index 0000000000000000000000000000000000000000..fa49d527c35c636b7999042be8da3fe5063fe441
--- /dev/null
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc/cnProc_gen_BG2_128.c
@@ -0,0 +1,436 @@
+/*
+ * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The OpenAirInterface Software Alliance licenses this file to You under
+ * the OAI Public License, Version 1.1  (the "License"); you may not use this file
+ * except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.openairinterface.org/?page_id=698
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *-------------------------------------------------------------------------------
+ * For more information about the OpenAirInterface (OAI) Software Alliance:
+ *      contact@openairinterface.org
+ */
+
+#include <stdio.h>
+#include <stdint.h>
+#include <stdlib.h>
+#include "../../nrLDPCdecoder_defs.h"
+#include "../../nrLDPC_types.h"                                                                                           
+#include "../../nrLDPC_bnProc.h"
+
+
+void nrLDPC_cnProc_BG2_generator_128(const char* dir, int R)
+{
+  const char *ratestr[3]={"15","13","23"};
+
+  if (R<0 || R>2) {printf("Illegal R %d\n",R); abort();}
+
+
+//  system("mkdir -p ldpc_gen_files/avx2");
+
+  char fname[FILENAME_MAX+1];
+  snprintf(fname, sizeof(fname), "%s/cnProc128/nrLDPC_cnProc_BG2_R%s_128.h", dir, ratestr[R]);
+  FILE *fd=fopen(fname,"w");
+  if (fd == NULL) {
+    printf("Cannot create file %s\n", fname);
+    abort();
+  }
+
+  fprintf(fd,"#include <stdint.h>\n");
+  fprintf(fd,"#include \"PHY/sse_intrin.h\"\n");
+  fprintf(fd,"static inline void nrLDPC_cnProc_BG2_R%s_128(int8_t* cnProcBuf, int8_t* cnProcBufRes, uint16_t Z) {\n",ratestr[R]);
+
+  const uint8_t*  lut_numCnInCnGroups;
+  const uint32_t* lut_startAddrCnGroups = lut_startAddrCnGroups_BG2;
+
+  if (R==0)      lut_numCnInCnGroups = lut_numCnInCnGroups_BG2_R15;
+  else if (R==1) lut_numCnInCnGroups = lut_numCnInCnGroups_BG2_R13;
+  else if (R==2) lut_numCnInCnGroups = lut_numCnInCnGroups_BG2_R23;
+  else { printf("aborting, illegal R %d\n",R); fclose(fd);abort();}
+
+
+  // Number of CNs in Groups
+  //uint32_t M;
+  uint32_t j;
+  uint32_t k;
+  // Offset to each bit within a group in terms of 32 byte
+  uint32_t bitOffsetInGroup;
+
+  // Offsets are in units of bitOffsetInGroup (1*384/32)
+  //    const uint8_t lut_idxCnProcG3[3][2] = {{12,24}, {0,24}, {0,12}};
+
+  // =====================================================================
+  // Process group with 3 BNs
+  fprintf(fd,"//Process group with 3 BNs\n");
+  // LUT with offsets for bits that need to be processed
+  // 1. bit proc requires LLRs of 2. and 3. bit, 2.bits of 1. and 3. etc.
+    // Offsets are in units of bitOffsetInGroup
+    const uint8_t lut_idxCnProcG3[3][2] = {{72,144}, {0,144}, {0,72}};
+
+
+  fprintf(fd,"                simde__m128i ymm0, min, sgn,ones,maxLLR;\n");
+  fprintf(fd,"                ones   = simde_mm_set1_epi8((char)1);\n");
+  fprintf(fd,"                maxLLR = simde_mm_set1_epi8((char)127);\n");
+    fprintf(fd,"                uint32_t M;\n");
+ 
+
+  if (lut_numCnInCnGroups[0] > 0)
+    {
+      // Number of groups of 16 CNs for parallel processing
+      // Ceil for values not divisible by 32
+     fprintf(fd," M = (%d*Z + 15)>>4;\n",lut_numCnInCnGroups[0] );
+
+      // Set the offset to each bit within a group in terms of 32 byte
+      bitOffsetInGroup = (lut_numCnInCnGroups_BG2_R15[0]*NR_LDPC_ZMAX)>>4;
+
+      // Loop over every BN
+      
+      for (j=0; j<3; j++)
+        {
+
+            fprintf(fd,"            for (int i=0;i<M;i+=2) {\n");
+            // Abs and sign of 16 CNs (first BN)
+            //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
+            fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[0]>>4)+lut_idxCnProcG3[j][0]*2);
+            //                sgn  = simde_mm_sign_epi8(ones, ymm0);
+            fprintf(fd,"                sgn  = simde_mm_sign_epi8(ones, ymm0);\n");
+            //                min  = simde_mm_abs_epi8(ymm0);
+            fprintf(fd,"                min  = simde_mm_abs_epi8(ymm0);\n");
+            
+            // 16 CNs of second BN
+            //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][1] + i];
+            fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[0]>>4)+lut_idxCnProcG3[j][1]*2);
+            
+            //                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));
+            fprintf(fd,"                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));\n");
+            
+            //                sgn  = simde_mm_sign_epi8(sgn, ymm0);
+            fprintf(fd,"                sgn  = simde_mm_sign_epi8(sgn, ymm0);\n");
+            
+            // Store result
+            //                min = simde_mm_min_epu8(min, maxLLR); // 128 in epi8 is -127
+            fprintf(fd,"                min = simde_mm_min_epu8(min, maxLLR);\n");
+            //                *p_cnProcBufResBit = simde_mm_sign_epi8(min, sgn);
+            //                p_cnProcBufResBit++;
+            fprintf(fd,"                ((simde__m128i*)cnProcBufRes)[%d+i] = simde_mm_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[0]>>4)+(j*bitOffsetInGroup));
+
+            // Abs and sign of 16 CNs (first BN)
+            //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
+            fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[0]>>4)+lut_idxCnProcG3[j][0]*2+1);
+            //                sgn  = simde_mm_sign_epi8(ones, ymm0);
+            fprintf(fd,"                sgn  = simde_mm_sign_epi8(ones, ymm0);\n");
+            //                min  = simde_mm_abs_epi8(ymm0);
+            fprintf(fd,"                min  = simde_mm_abs_epi8(ymm0);\n");
+      
+            fprintf(fd,"            }\n");
+          }
+      }
+
+  // =====================================================================
+  // Process group with 4 BNs
+  fprintf(fd,"//Process group with 4 BNs\n");
+  
+ // Offset is 20*384/32 = 240
+    const uint16_t lut_idxCnProcG4[4][3] = {{240,480,720}, {0,480,720}, {0,240,720}, {0,240,480}};
+
+    if (lut_numCnInCnGroups[1] > 0)
+    {
+            // Number of groups of 16 CNs for parallel processing
+            // Ceil for values not divisible by 32
+        fprintf(fd," M = (%d*Z + 15)>>4;\n",lut_numCnInCnGroups[1] );
+
+            // Set the offset to each bit within a group in terms of 32 byte
+            bitOffsetInGroup = (lut_numCnInCnGroups_BG2_R15[1]*NR_LDPC_ZMAX)>>4;
+
+            // Loop over every BN
+            
+          for (j=0; j<4; j++)
+          {
+
+          // Loop over CNs
+
+          fprintf(fd,"            for (int i=0;i<M;i++) {\n");
+          // Abs and sign of 16 CNs (first BN)
+          //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
+          fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[1]>>4)+lut_idxCnProcG4[j][0]*2);
+          //                sgn  = simde_mm_sign_epi8(ones, ymm0);
+           fprintf(fd,"                sgn  = simde_mm_sign_epi8(ones, ymm0);\n");
+          //                min  = simde_mm_abs_epi8(ymm0);
+          fprintf(fd,"                min  = simde_mm_abs_epi8(ymm0);\n");
+            
+            
+          // Loop over BNs
+            for (k=1; k<3; k++)
+            {
+            fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[1]>>4)+lut_idxCnProcG4[j][k]*2);
+                
+            //                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));
+            fprintf(fd,"                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));\n");
+                
+            //                sgn  = simde_mm_sign_epi8(sgn, ymm0);
+                fprintf(fd,"                sgn  = simde_mm_sign_epi8(sgn, ymm0);\n");
+            }
+            
+            // Store result
+            //                min = simde_mm_min_epu8(min, maxLLR); // 128 in epi8 is -127
+            fprintf(fd,"                min = simde_mm_min_epu8(min, maxLLR);\n");
+            //                *p_cnProcBufResBit = simde_mm_sign_epi8(min, sgn);
+                //                p_cnProcBufResBit++;
+            fprintf(fd,"                ((simde__m128i*)cnProcBufRes)[%d+i] = simde_mm_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[1]>>4)+(j*bitOffsetInGroup));
+            fprintf(fd,"            }\n");
+          }
+      }
+
+
+  // =====================================================================
+  // Process group with 5 BNs
+    fprintf(fd,"//Process group with 5 BNs\n");
+
+    // Offset is 9*384/32 = 108
+    const uint16_t lut_idxCnProcG5[5][4] = {{108,216,324,432}, {0,216,324,432},
+                                            {0,108,324,432}, {0,108,216,432}, {0,108,216,324}};
+
+
+
+    if (lut_numCnInCnGroups[2] > 0)
+    {
+      // Number of groups of 16 CNs for parallel processing
+      // Ceil for values not divisible by 32
+    fprintf(fd," M = (%d*Z + 15)>>4;\n",lut_numCnInCnGroups[2] );
+      // Set the offset to each bit within a group in terms of 32 byte
+      bitOffsetInGroup = (lut_numCnInCnGroups_BG2_R15[2]*NR_LDPC_ZMAX)>>4;
+
+      // Loop over every BN
+      
+      for (j=0; j<5; j++)
+	    {
+
+         
+         fprintf(fd,"            for (int i=0;i<M;i++) {\n");
+        // Abs and sign of 16 CNs (first BN)
+        //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
+        fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[2]>>4)+lut_idxCnProcG5[j][0]*2);
+        //                sgn  = simde_mm_sign_epi8(ones, ymm0);
+        fprintf(fd,"                sgn  = simde_mm_sign_epi8(ones, ymm0);\n");
+        //                min  = simde_mm_abs_epi8(ymm0);
+        fprintf(fd,"                min  = simde_mm_abs_epi8(ymm0);\n");
+        
+        
+        // Loop over BNs
+        for (k=1; k<4; k++)
+        {
+          fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[2]>>4)+lut_idxCnProcG5[j][k]*2);
+            
+          //                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));
+          fprintf(fd,"                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));\n");
+            
+          //                sgn  = simde_mm_sign_epi8(sgn, ymm0);
+          fprintf(fd,"                sgn  = simde_mm_sign_epi8(sgn, ymm0);\n");
+        }
+        
+          // Store result
+        //                min = simde_mm_min_epu8(min, maxLLR); // 128 in epi8 is -127
+        fprintf(fd,"                min = simde_mm_min_epu8(min, maxLLR);\n");
+        //                *p_cnProcBufResBit = simde_mm_sign_epi8(min, sgn);
+        fprintf(fd,"                ((simde__m128i*)cnProcBufRes)[%d+i] = simde_mm_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[2]>>4)+(j*bitOffsetInGroup));
+        fprintf(fd,"           }\n");
+      }
+    }
+
+  // =====================================================================
+  // Process group with 6 BNs
+  fprintf(fd,"//Process group with 6 BNs\n");
+    // Offset is 3*384/32 = 36
+  const uint16_t lut_idxCnProcG6[6][5] = {{36,72,108,144,180}, {0,72,108,144,180},
+                                            {0,36,108,144,180}, {0,36,72,144,180},
+                                            {0,36,72,108,180}, {0,36,72,108,144}};
+
+
+  if (lut_numCnInCnGroups[3] > 0)
+  {
+      // Number of groups of 16 CNs for parallel processing
+      // Ceil for values not divisible by 32
+      fprintf(fd, "M = (%d*Z + 15)>>4;\n",lut_numCnInCnGroups[3] );
+
+      // Set the offset to each bit within a group in terms of 32 byte
+      bitOffsetInGroup = (lut_numCnInCnGroups_BG2_R15[3]*NR_LDPC_ZMAX)>>4;
+
+      // Loop over every BN
+      
+    for (j=0; j<6; j++)
+    {
+	
+
+	    // Loop over CNs
+	 
+	    fprintf(fd,"            for (int i=0;i<M;i++) {\n");
+	    // Abs and sign of 16 CNs (first BN)
+	    //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
+	    fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[3]>>4)+lut_idxCnProcG6[j][0]*2);
+	    //                sgn  = simde_mm_sign_epi8(ones, ymm0);
+	    fprintf(fd,"                sgn  = simde_mm_sign_epi8(ones, ymm0);\n");
+	   //                min  = simde_mm_abs_epi8(ymm0);
+	    fprintf(fd,"                min  = simde_mm_abs_epi8(ymm0);\n");
+	  
+	  
+	    // Loop over BNs
+	    for (k=1; k<5; k++)
+	    {
+	    fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[3]>>4)+lut_idxCnProcG6[j][k]*2);
+	      
+	    //                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));
+	    fprintf(fd,"                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));\n");
+	      
+	    //                sgn  = simde_mm_sign_epi8(sgn, ymm0);
+	    fprintf(fd,"                sgn  = simde_mm_sign_epi8(sgn, ymm0);\n");
+	    }
+	  
+      // Store result
+      //                min = simde_mm_min_epu8(min, maxLLR); // 128 in epi8 is -127
+      fprintf(fd,"                min = simde_mm_min_epu8(min, maxLLR);\n");
+      //                *p_cnProcBufResBit = simde_mm_sign_epi8(min, sgn);
+      //                p_cnProcBufResBit++;
+      fprintf(fd,"                ((simde__m128i*)cnProcBufRes)[%d+i] = simde_mm_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[3]>>4)+(j*bitOffsetInGroup));
+      fprintf(fd,"            }\n");
+	  }
+  }
+
+
+
+  // =====================================================================
+  // Process group with 8 BNs
+  fprintf(fd,"//Process group with 8 BNs\n");
+ // Offset is 2*384/32 = 24
+    const uint8_t lut_idxCnProcG8[8][7] = {{24,48,72,96,120,144,168}, {0,48,72,96,120,144,168},
+                                           {0,24,72,96,120,144,168}, {0,24,48,96,120,144,168},
+                                           {0,24,48,72,120,144,168}, {0,24,48,72,96,144,168},
+                                           {0,24,48,72,96,120,168}, {0,24,48,72,96,120,144}};
+
+
+
+
+
+
+    if (lut_numCnInCnGroups[4] > 0)
+    {
+      // Number of groups of 16 CNs for parallel processing
+      // Ceil for values not divisible by 32
+     fprintf(fd, "M = (%d*Z + 15)>>4;\n",lut_numCnInCnGroups[4] );
+
+      // Set the offset to each bit within a group in terms of 32 byte
+      bitOffsetInGroup = (lut_numCnInCnGroups_BG2_R15[4]*NR_LDPC_ZMAX)>>4;
+
+      // Loop over every BN
+      
+      for (j=0; j<8; j++)
+      {
+
+	      // Loop over CNs
+        fprintf(fd,"            for (int i=0;i<M;i++) {\n");
+        // Abs and sign of 16 CNs (first BN)
+        //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
+        fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[4]>>4)+lut_idxCnProcG8[j][0]*2);
+        //                sgn  = simde_mm_sign_epi8(ones, ymm0);
+        fprintf(fd,"                sgn  = simde_mm_sign_epi8(ones, ymm0);\n");
+        //                min  = simde_mm_abs_epi8(ymm0);
+        fprintf(fd,"                min  = simde_mm_abs_epi8(ymm0);\n");
+        
+	      // Loop over BNs
+          for (k=1; k<7; k++)
+          {
+          fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[4]>>4)+lut_idxCnProcG8[j][k]*2);
+            
+          //                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));
+          fprintf(fd,"                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));\n");
+            
+            //                sgn  = simde_mm_sign_epi8(sgn, ymm0);
+          fprintf(fd,"                sgn  = simde_mm_sign_epi8(sgn, ymm0);\n");
+          
+          }
+	  
+	        // Store result
+          //                min = simde_mm_min_epu8(min, maxLLR); // 128 in epi8 is -127
+          fprintf(fd,"                min = simde_mm_min_epu8(min, maxLLR);\n");
+          //                *p_cnProcBufResBit = simde_mm_sign_epi8(min, sgn);
+          //                p_cnProcBufResBit++;
+          fprintf(fd,"                ((simde__m128i*)cnProcBufRes)[%d+i] = simde_mm_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[4]>>4)+(j*bitOffsetInGroup));
+          fprintf(fd,"              }\n");
+        }
+    }
+
+ 
+  // =====================================================================
+  // Process group with 10 BNs
+  fprintf(fd,"//Process group with 10 BNs\n");
+
+    const uint8_t lut_idxCnProcG10[10][9] = {{24,48,72,96,120,144,168,192,216}, {0,48,72,96,120,144,168,192,216},
+                                             {0,24,72,96,120,144,168,192,216}, {0,24,48,96,120,144,168,192,216},
+                                             {0,24,48,72,120,144,168,192,216}, {0,24,48,72,96,144,168,192,216},
+                                             {0,24,48,72,96,120,168,192,216}, {0,24,48,72,96,120,144,192,216},
+                                             {0,24,48,72,96,120,144,168,216}, {0,24,48,72,96,120,144,168,192}};
+
+
+
+
+
+    if (lut_numCnInCnGroups[5] > 0)
+    {
+      // Number of groups of 16 CNs for parallel processing
+      // Ceil for values not divisible by 32
+    fprintf(fd, "M = (%d*Z + 15)>>4;\n",lut_numCnInCnGroups[5] );
+
+      // Set the offset to each bit within a group in terms of 32 byte
+      bitOffsetInGroup = (lut_numCnInCnGroups_BG2_R15[5]*NR_LDPC_ZMAX)>>4;
+
+      // Loop over every BN
+      
+      for (j=0; j<10; j++)
+      {
+
+      // Loop over CNs
+
+      fprintf(fd,"            for (int i=0;i<M;i++) {\n");
+      // Abs and sign of 16 CNs (first BN)
+        //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
+      fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[5]>>4)+lut_idxCnProcG10[j][0]*2);
+        //                sgn  = simde_mm_sign_epi8(ones, ymm0);
+      fprintf(fd,"                sgn  = simde_mm_sign_epi8(ones, ymm0);\n");
+        //                min  = simde_mm_abs_epi8(ymm0);
+      fprintf(fd,"                min  = simde_mm_abs_epi8(ymm0);\n");
+        
+	  
+	  // Loop over BNs
+	     for (k=1; k<9; k++)
+	     {
+          fprintf(fd,"                ymm0 = ((simde__m128i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[5]>>4)+lut_idxCnProcG10[j][k]*2);
+            
+            //                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));
+          fprintf(fd,"                min  = simde_mm_min_epu8(min, simde_mm_abs_epi8(ymm0));\n");
+            
+            //                sgn  = simde_mm_sign_epi8(sgn, ymm0);
+          fprintf(fd,"                sgn  = simde_mm_sign_epi8(sgn, ymm0);\n");
+        }
+	  
+          // Store result
+            //                min = simde_mm_min_epu8(min, maxLLR); // 128 in epi8 is -127
+          fprintf(fd,"                min = simde_mm_min_epu8(min, maxLLR);\n");
+            //                *p_cnProcBufResBit = simde_mm_sign_epi8(min, sgn);
+            //                p_cnProcBufResBit++;
+          fprintf(fd,"                ((simde__m128i*)cnProcBufRes)[%d+i] = simde_mm_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[5]>>4)+(j*bitOffsetInGroup));
+          fprintf(fd,"            }\n");
+      }
+    }
+
+
+  fprintf(fd,"}\n");
+  fclose(fd);
+}//end of the function  nrLDPC_cnProc_BG2
+
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc/cnProc_gen_BG2_avx2.c b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc/cnProc_gen_BG2_avx2.c
index 0d0c1305a04b20266bcf12fec497a2ea240f6fe2..62084b5b93a10e4dad89c75ad97afc8ff2e0f5e7 100644
--- a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc/cnProc_gen_BG2_avx2.c
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc/cnProc_gen_BG2_avx2.c
@@ -76,7 +76,7 @@ void nrLDPC_cnProc_BG2_generator_AVX2(const char* dir, int R)
     const uint8_t lut_idxCnProcG3[3][2] = {{72,144}, {0,144}, {0,72}};
 
 
-  fprintf(fd,"                __m256i ymm0, min, sgn,ones,maxLLR;\n");
+  fprintf(fd,"                simde__m256i ymm0, min, sgn,ones,maxLLR;\n");
   fprintf(fd,"                ones   = simde_mm256_set1_epi8((char)1);\n");
   fprintf(fd,"                maxLLR = simde_mm256_set1_epi8((char)127);\n");
     fprintf(fd,"                uint32_t M;\n");
@@ -99,7 +99,7 @@ void nrLDPC_cnProc_BG2_generator_AVX2(const char* dir, int R)
             fprintf(fd,"            for (int i=0;i<M;i+=2) {\n");
             // Abs and sign of 32 CNs (first BN)
             //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-            fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[0]>>5)+lut_idxCnProcG3[j][0]);
+            fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[0]>>5)+lut_idxCnProcG3[j][0]);
             //                sgn  = simde_mm256_sign_epi8(ones, ymm0);
             fprintf(fd,"                sgn  = simde_mm256_sign_epi8(ones, ymm0);\n");
             //                min  = simde_mm256_abs_epi8(ymm0);
@@ -107,7 +107,7 @@ void nrLDPC_cnProc_BG2_generator_AVX2(const char* dir, int R)
             
             // 32 CNs of second BN
             //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][1] + i];
-            fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[0]>>5)+lut_idxCnProcG3[j][1]);
+            fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[0]>>5)+lut_idxCnProcG3[j][1]);
             
             //                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));
             fprintf(fd,"                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));\n");
@@ -120,11 +120,11 @@ void nrLDPC_cnProc_BG2_generator_AVX2(const char* dir, int R)
             fprintf(fd,"                min = simde_mm256_min_epu8(min, maxLLR);\n");
             //                *p_cnProcBufResBit = simde_mm256_sign_epi8(min, sgn);
             //                p_cnProcBufResBit++;
-            fprintf(fd,"                ((__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[0]>>5)+(j*bitOffsetInGroup));
+            fprintf(fd,"                ((simde__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[0]>>5)+(j*bitOffsetInGroup));
 
             // Abs and sign of 32 CNs (first BN)
             //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-            fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[0]>>5)+lut_idxCnProcG3[j][0]+1);
+            fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[0]>>5)+lut_idxCnProcG3[j][0]+1);
             //                sgn  = simde_mm256_sign_epi8(ones, ymm0);
             fprintf(fd,"                sgn  = simde_mm256_sign_epi8(ones, ymm0);\n");
             //                min  = simde_mm256_abs_epi8(ymm0);
@@ -160,7 +160,7 @@ void nrLDPC_cnProc_BG2_generator_AVX2(const char* dir, int R)
           fprintf(fd,"            for (int i=0;i<M;i++) {\n");
           // Abs and sign of 32 CNs (first BN)
           //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-          fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[1]>>5)+lut_idxCnProcG4[j][0]);
+          fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[1]>>5)+lut_idxCnProcG4[j][0]);
           //                sgn  = simde_mm256_sign_epi8(ones, ymm0);
            fprintf(fd,"                sgn  = simde_mm256_sign_epi8(ones, ymm0);\n");
           //                min  = simde_mm256_abs_epi8(ymm0);
@@ -170,7 +170,7 @@ void nrLDPC_cnProc_BG2_generator_AVX2(const char* dir, int R)
           // Loop over BNs
             for (k=1; k<3; k++)
             {
-            fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[1]>>5)+lut_idxCnProcG4[j][k]);
+            fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[1]>>5)+lut_idxCnProcG4[j][k]);
                 
             //                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));
             fprintf(fd,"                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));\n");
@@ -184,7 +184,7 @@ void nrLDPC_cnProc_BG2_generator_AVX2(const char* dir, int R)
             fprintf(fd,"                min = simde_mm256_min_epu8(min, maxLLR);\n");
             //                *p_cnProcBufResBit = simde_mm256_sign_epi8(min, sgn);
                 //                p_cnProcBufResBit++;
-            fprintf(fd,"                ((__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[1]>>5)+(j*bitOffsetInGroup));
+            fprintf(fd,"                ((simde__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[1]>>5)+(j*bitOffsetInGroup));
             fprintf(fd,"            }\n");
           }
       }
@@ -217,7 +217,7 @@ void nrLDPC_cnProc_BG2_generator_AVX2(const char* dir, int R)
          fprintf(fd,"            for (int i=0;i<M;i++) {\n");
         // Abs and sign of 32 CNs (first BN)
         //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-        fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[2]>>5)+lut_idxCnProcG5[j][0]);
+        fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[2]>>5)+lut_idxCnProcG5[j][0]);
         //                sgn  = simde_mm256_sign_epi8(ones, ymm0);
         fprintf(fd,"                sgn  = simde_mm256_sign_epi8(ones, ymm0);\n");
         //                min  = simde_mm256_abs_epi8(ymm0);
@@ -227,7 +227,7 @@ void nrLDPC_cnProc_BG2_generator_AVX2(const char* dir, int R)
         // Loop over BNs
         for (k=1; k<4; k++)
         {
-          fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[2]>>5)+lut_idxCnProcG5[j][k]);
+          fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[2]>>5)+lut_idxCnProcG5[j][k]);
             
           //                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));
           fprintf(fd,"                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));\n");
@@ -240,7 +240,7 @@ void nrLDPC_cnProc_BG2_generator_AVX2(const char* dir, int R)
         //                min = simde_mm256_min_epu8(min, maxLLR); // 128 in epi8 is -127
         fprintf(fd,"                min = simde_mm256_min_epu8(min, maxLLR);\n");
         //                *p_cnProcBufResBit = simde_mm256_sign_epi8(min, sgn);
-        fprintf(fd,"                ((__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[2]>>5)+(j*bitOffsetInGroup));
+        fprintf(fd,"                ((simde__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[2]>>5)+(j*bitOffsetInGroup));
         fprintf(fd,"           }\n");
       }
     }
@@ -274,7 +274,7 @@ void nrLDPC_cnProc_BG2_generator_AVX2(const char* dir, int R)
 	    fprintf(fd,"            for (int i=0;i<M;i++) {\n");
 	    // Abs and sign of 32 CNs (first BN)
 	    //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-	    fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[3]>>5)+lut_idxCnProcG6[j][0]);
+	    fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[3]>>5)+lut_idxCnProcG6[j][0]);
 	    //                sgn  = simde_mm256_sign_epi8(ones, ymm0);
 	    fprintf(fd,"                sgn  = simde_mm256_sign_epi8(ones, ymm0);\n");
 	   //                min  = simde_mm256_abs_epi8(ymm0);
@@ -284,7 +284,7 @@ void nrLDPC_cnProc_BG2_generator_AVX2(const char* dir, int R)
 	    // Loop over BNs
 	    for (k=1; k<5; k++)
 	    {
-	    fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[3]>>5)+lut_idxCnProcG6[j][k]);
+	    fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[3]>>5)+lut_idxCnProcG6[j][k]);
 	      
 	    //                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));
 	    fprintf(fd,"                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));\n");
@@ -298,7 +298,7 @@ void nrLDPC_cnProc_BG2_generator_AVX2(const char* dir, int R)
       fprintf(fd,"                min = simde_mm256_min_epu8(min, maxLLR);\n");
       //                *p_cnProcBufResBit = simde_mm256_sign_epi8(min, sgn);
       //                p_cnProcBufResBit++;
-      fprintf(fd,"                ((__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[3]>>5)+(j*bitOffsetInGroup));
+      fprintf(fd,"                ((simde__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[3]>>5)+(j*bitOffsetInGroup));
       fprintf(fd,"            }\n");
 	  }
   }
@@ -337,7 +337,7 @@ void nrLDPC_cnProc_BG2_generator_AVX2(const char* dir, int R)
         fprintf(fd,"            for (int i=0;i<M;i++) {\n");
         // Abs and sign of 32 CNs (first BN)
         //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-        fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[4]>>5)+lut_idxCnProcG8[j][0]);
+        fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[4]>>5)+lut_idxCnProcG8[j][0]);
         //                sgn  = simde_mm256_sign_epi8(ones, ymm0);
         fprintf(fd,"                sgn  = simde_mm256_sign_epi8(ones, ymm0);\n");
         //                min  = simde_mm256_abs_epi8(ymm0);
@@ -346,7 +346,7 @@ void nrLDPC_cnProc_BG2_generator_AVX2(const char* dir, int R)
 	      // Loop over BNs
           for (k=1; k<7; k++)
           {
-          fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[4]>>5)+lut_idxCnProcG8[j][k]);
+          fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[4]>>5)+lut_idxCnProcG8[j][k]);
             
           //                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));
           fprintf(fd,"                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));\n");
@@ -361,7 +361,7 @@ void nrLDPC_cnProc_BG2_generator_AVX2(const char* dir, int R)
           fprintf(fd,"                min = simde_mm256_min_epu8(min, maxLLR);\n");
           //                *p_cnProcBufResBit = simde_mm256_sign_epi8(min, sgn);
           //                p_cnProcBufResBit++;
-          fprintf(fd,"                ((__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[4]>>5)+(j*bitOffsetInGroup));
+          fprintf(fd,"                ((simde__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[4]>>5)+(j*bitOffsetInGroup));
           fprintf(fd,"              }\n");
         }
     }
@@ -400,7 +400,7 @@ void nrLDPC_cnProc_BG2_generator_AVX2(const char* dir, int R)
       fprintf(fd,"            for (int i=0;i<M;i++) {\n");
       // Abs and sign of 32 CNs (first BN)
         //                ymm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-      fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[5]>>5)+lut_idxCnProcG10[j][0]);
+      fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[5]>>5)+lut_idxCnProcG10[j][0]);
         //                sgn  = simde_mm256_sign_epi8(ones, ymm0);
       fprintf(fd,"                sgn  = simde_mm256_sign_epi8(ones, ymm0);\n");
         //                min  = simde_mm256_abs_epi8(ymm0);
@@ -410,7 +410,7 @@ void nrLDPC_cnProc_BG2_generator_AVX2(const char* dir, int R)
 	  // Loop over BNs
 	     for (k=1; k<9; k++)
 	     {
-          fprintf(fd,"                ymm0 = ((__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[5]>>5)+lut_idxCnProcG10[j][k]);
+          fprintf(fd,"                ymm0 = ((simde__m256i*)cnProcBuf)[%d+i];\n",(lut_startAddrCnGroups[5]>>5)+lut_idxCnProcG10[j][k]);
             
             //                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));
           fprintf(fd,"                min  = simde_mm256_min_epu8(min, simde_mm256_abs_epi8(ymm0));\n");
@@ -424,7 +424,7 @@ void nrLDPC_cnProc_BG2_generator_AVX2(const char* dir, int R)
           fprintf(fd,"                min = simde_mm256_min_epu8(min, maxLLR);\n");
             //                *p_cnProcBufResBit = simde_mm256_sign_epi8(min, sgn);
             //                p_cnProcBufResBit++;
-          fprintf(fd,"                ((__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[5]>>5)+(j*bitOffsetInGroup));
+          fprintf(fd,"                ((simde__m256i*)cnProcBufRes)[%d+i] = simde_mm256_sign_epi8(min, sgn);\n",(lut_startAddrCnGroups[5]>>5)+(j*bitOffsetInGroup));
           fprintf(fd,"            }\n");
       }
     }
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc/main128.c b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc/main128.c
new file mode 100644
index 0000000000000000000000000000000000000000..1353ee20eb1c42915e5df4d3d429183b8cc71580
--- /dev/null
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc/main128.c
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The OpenAirInterface Software Alliance licenses this file to You under
+ * the OAI Public License, Version 1.1  (the "License"); you may not use this file
+ * except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.openairinterface.org/?page_id=698
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *-------------------------------------------------------------------------------
+ * For more information about the OpenAirInterface (OAI) Software Alliance:
+ *      contact@openairinterface.org
+ */
+
+#include <stdio.h>
+#include <stdint.h>
+#define NB_R  3
+void nrLDPC_cnProc_BG1_generator_128(const char*, int);
+void nrLDPC_cnProc_BG2_generator_128(const char*, int);
+
+const char *__asan_default_options()
+{
+  /* don't do leak checking in nr_ulsim, creates problems in the CI */
+  return "detect_leaks=0";
+}
+
+int main(int argc, char *argv[])
+{
+  if (argc != 2) {
+    fprintf(stderr, "usage: %s <output-dir>\n", argv[0]);
+    return 1;
+  }
+  const char *dir = argv[1];
+
+  int R[NB_R]={0,1,2};
+  for(int i=0; i<NB_R;i++) {
+    nrLDPC_cnProc_BG1_generator_128(dir, R[i]);
+    nrLDPC_cnProc_BG2_generator_128(dir, R[i]);
+  }
+
+  return(0);
+}
+
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc_avx512/CMakeLists.txt b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc_avx512/CMakeLists.txt
index 7ca988f1ca2dc228343b474ce09de58f385ba978..f6da998c851ed9e642206599d03dc4ab26c9a6b6 100644
--- a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc_avx512/CMakeLists.txt
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc_avx512/CMakeLists.txt
@@ -2,7 +2,7 @@ add_executable(cnProc_gen_avx512
                cnProc_gen_BG1_avx512.c
                cnProc_gen_BG2_avx512.c
                main.c)
-target_compile_options(cnProc_gen_avx512 PRIVATE -W -Wall -mavx2)
+target_compile_options(cnProc_gen_avx512 PRIVATE -W -Wall )
 
 #set(cnProc_avx512_headers
 #    cnProc_avx512/nrLDPC_cnProc_BG1_R13_AVX512.h
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc_avx512/cnProc_gen_BG1_avx512.c b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc_avx512/cnProc_gen_BG1_avx512.c
index aafcc987905694c9de76d19e248492cc125b591b..3f7f71e21b790229ef8c8ccbbfece3858bbaf2f4 100644
--- a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc_avx512/cnProc_gen_BG1_avx512.c
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc_avx512/cnProc_gen_BG1_avx512.c
@@ -46,7 +46,7 @@ void nrLDPC_cnProc_BG1_generator_AVX512(const char *dir, int R)
   // fprintf(fd,"#include <stdint.h>\n");
   // fprintf(fd,"#include \"PHY/sse_intrin.h\"\n");
 
-  // fprintf(fd,   "#define conditional_negate(a,b,z) _mm512_mask_sub_epi8(a,simde_mm512_movepi8_mask(b),z,a)\n");
+  // fprintf(fd,   "#define conditional_negate(a,b,z) simde_mm512_mask_sub_epi8(a,simde_mm512_movepi8_mask(b),z,a)\n");
 
   fprintf(fd, "static inline void nrLDPC_cnProc_BG1_R%s_AVX512(int8_t* cnProcBuf, int8_t* cnProcBufRes, uint16_t Z) {\n", ratestr[R]);
 
@@ -71,11 +71,11 @@ void nrLDPC_cnProc_BG1_generator_AVX512(const char *dir, int R)
   uint32_t bitOffsetInGroup;
 
   fprintf(fd, "                uint32_t M, i;\n");
-  fprintf(fd, "                __m512i zmm0, min, sgn,zeros,maxLLR, ones;\n");
+  fprintf(fd, "                simde__m512i zmm0, min, sgn,zeros,maxLLR, ones;\n");
 
-  fprintf(fd, "                  zeros  = _mm512_setzero_si512();\n");
-  fprintf(fd, "                  maxLLR = _mm512_set1_epi8((char)127);\n");
-  fprintf(fd, "                 ones = _mm512_set1_epi8((char)1);\n");
+  fprintf(fd, "                  zeros  = simde_mm512_setzero_si512();\n");
+  fprintf(fd, "                  maxLLR = simde_mm512_set1_epi8((char)127);\n");
+  fprintf(fd, "                 ones = simde_mm512_set1_epi8((char)1);\n");
 
   // =====================================================================
   // Process group with 3 BNs
@@ -103,27 +103,33 @@ void nrLDPC_cnProc_BG1_generator_AVX512(const char *dir, int R)
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       // Abs and sign of 64  CNs (first BN)
       //                zmm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-      fprintf(fd, "                zmm0 = ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[0] >> 6) + lut_idxCnProcG3[j][0] / 2);
-      fprintf(fd, "                sgn  = _mm512_xor_si512(ones, zmm0);\n");
-      fprintf(fd, "                min  = _mm512_abs_epi8(zmm0);\n");
+      fprintf(fd,
+              "                zmm0 = ((simde__m512i*)cnProcBuf)[%d+i];\n",
+              (lut_startAddrCnGroups[0] >> 6) + lut_idxCnProcG3[j][0] / 2);
+      fprintf(fd, "                sgn  = simde_mm512_xor_si512(ones, zmm0);\n");
+      fprintf(fd, "                min  = simde_mm512_abs_epi8(zmm0);\n");
 
       // for (k=1; k<2; k++)
       //{
-      fprintf(fd, "                zmm0 = ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[0] >> 6) + lut_idxCnProcG3[j][1] / 2);
+      fprintf(fd,
+              "                zmm0 = ((simde__m512i*)cnProcBuf)[%d+i];\n",
+              (lut_startAddrCnGroups[0] >> 6) + lut_idxCnProcG3[j][1] / 2);
 
-      //                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-      fprintf(fd, "                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));\n");
+      //                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+      fprintf(fd, "                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));\n");
 
-      //                sgn  = _mm512_sign_epi8(*p_ones, zmm0);
-      fprintf(fd, "                sgn  = _mm512_xor_si512(sgn, zmm0);\n");
+      //                sgn  = simde_mm512_sign_epi8(*p_ones, zmm0);
+      fprintf(fd, "                sgn  = simde_mm512_xor_si512(sgn, zmm0);\n");
       // }
 
       // Store result
-      //                min = _mm512_min_epu8(min, *maxLLR); // 128 in epi8 is -127
-      fprintf(fd, "                min = _mm512_min_epu8(min, maxLLR);\n");
-      //                *p_cnProcBufResBit = _mm512_sign_epi8(min, sgn);
+      //                min = simde_mm512_min_epu8(min, *maxLLR); // 128 in epi8 is -127
+      fprintf(fd, "                min = simde_mm512_min_epu8(min, maxLLR);\n");
+      //                *p_cnProcBufResBit = simde_mm512_sign_epi8(min, sgn);
       //                p_cnProcBufResBit++;
-      fprintf(fd, "                ((__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min, sgn,zeros);\n", (lut_startAddrCnGroups[0] >> 6) + (j * bitOffsetInGroup));
+      fprintf(fd,
+              "                ((simde__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min, sgn,zeros);\n",
+              (lut_startAddrCnGroups[0] >> 6) + (j * bitOffsetInGroup));
       fprintf(fd, "            }\n");
     }
   }
@@ -150,27 +156,33 @@ void nrLDPC_cnProc_BG1_generator_AVX512(const char *dir, int R)
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       // Abs and sign of 64  CNs (first BN)
       //                zmm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-      fprintf(fd, "              zmm0 = ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[1] >> 6) + lut_idxCnProcG4[j][0] / 2);
-      fprintf(fd, "                sgn  = _mm512_xor_si512(ones, zmm0);\n");
-      fprintf(fd, "                min  = _mm512_abs_epi8(zmm0);\n");
+      fprintf(fd,
+              "              zmm0 = ((simde__m512i*)cnProcBuf)[%d+i];\n",
+              (lut_startAddrCnGroups[1] >> 6) + lut_idxCnProcG4[j][0] / 2);
+      fprintf(fd, "                sgn  = simde_mm512_xor_si512(ones, zmm0);\n");
+      fprintf(fd, "                min  = simde_mm512_abs_epi8(zmm0);\n");
 
       // Loop over BNs
       for (k = 1; k < 3; k++) {
-        fprintf(fd, "                zmm0 = ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[1] >> 6) + lut_idxCnProcG4[j][k] / 2);
+        fprintf(fd,
+                "                zmm0 = ((simde__m512i*)cnProcBuf)[%d+i];\n",
+                (lut_startAddrCnGroups[1] >> 6) + lut_idxCnProcG4[j][k] / 2);
 
-        //                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-        fprintf(fd, "                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));\n");
+        //                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+        fprintf(fd, "                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));\n");
 
-        //                sgn  = _mm512_sign_epi8(*p_ones, zmm0);
-        fprintf(fd, "                sgn  = _mm512_xor_si512(sgn, zmm0);\n");
+        //                sgn  = simde_mm512_sign_epi8(*p_ones, zmm0);
+        fprintf(fd, "                sgn  = simde_mm512_xor_si512(sgn, zmm0);\n");
       }
 
       // Store result
-      //                min = _mm512_min_epu8(min, maxLLR); // 128 in epi8 is -127
-      fprintf(fd, "                min = _mm512_min_epu8(min, maxLLR);\n");
-      //                *p_cnProcBufResBit = _mm512_sign_epi8(min, sgn);
+      //                min = simde_mm512_min_epu8(min, maxLLR); // 128 in epi8 is -127
+      fprintf(fd, "                min = simde_mm512_min_epu8(min, maxLLR);\n");
+      //                *p_cnProcBufResBit = simde_mm512_sign_epi8(min, sgn);
       //                p_cnProcBufResBit++;
-      fprintf(fd, "                ((__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min, sgn,zeros);\n", (lut_startAddrCnGroups[1] >> 6) + (j * bitOffsetInGroup));
+      fprintf(fd,
+              "                ((simde__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min, sgn,zeros);\n",
+              (lut_startAddrCnGroups[1] >> 6) + (j * bitOffsetInGroup));
       fprintf(fd, "            }\n");
     }
   }
@@ -196,26 +208,32 @@ void nrLDPC_cnProc_BG1_generator_AVX512(const char *dir, int R)
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       // Abs and sign of 64  CNs (first BN)
       //                zmm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-      fprintf(fd, "                zmm0 = ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[2] >> 6) + lut_idxCnProcG5[j][0] / 2);
-      fprintf(fd, "                sgn  = _mm512_xor_si512(ones, zmm0);\n");
-      fprintf(fd, "                min  = _mm512_abs_epi8(zmm0);\n");
+      fprintf(fd,
+              "                zmm0 = ((simde__m512i*)cnProcBuf)[%d+i];\n",
+              (lut_startAddrCnGroups[2] >> 6) + lut_idxCnProcG5[j][0] / 2);
+      fprintf(fd, "                sgn  = simde_mm512_xor_si512(ones, zmm0);\n");
+      fprintf(fd, "                min  = simde_mm512_abs_epi8(zmm0);\n");
 
       // Loop over BNs
       for (k = 1; k < 4; k++) {
-        fprintf(fd, "                zmm0 = ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[2] >> 6) + lut_idxCnProcG5[j][k] / 2);
+        fprintf(fd,
+                "                zmm0 = ((simde__m512i*)cnProcBuf)[%d+i];\n",
+                (lut_startAddrCnGroups[2] >> 6) + lut_idxCnProcG5[j][k] / 2);
 
-        //                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-        fprintf(fd, "                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));\n");
+        //                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+        fprintf(fd, "                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));\n");
 
-        //                sgn  = _mm512_sign_epi8(*p_ones, zmm0);
-        fprintf(fd, "                sgn  = _mm512_xor_si512(sgn, zmm0);\n");
+        //                sgn  = simde_mm512_sign_epi8(*p_ones, zmm0);
+        fprintf(fd, "                sgn  = simde_mm512_xor_si512(sgn, zmm0);\n");
       }
 
       // Store result
-      //                min = _mm512_min_epu8(min, maxLLR); // 128 in epi8 is -127
-      fprintf(fd, "                min = _mm512_min_epu8(min, maxLLR);\n");
+      //                min = simde_mm512_min_epu8(min, maxLLR); // 128 in epi8 is -127
+      fprintf(fd, "                min = simde_mm512_min_epu8(min, maxLLR);\n");
 
-      fprintf(fd, "                ((__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min, sgn,zeros);\n", (lut_startAddrCnGroups[2] >> 6) + (j * bitOffsetInGroup));
+      fprintf(fd,
+              "                ((simde__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min, sgn,zeros);\n",
+              (lut_startAddrCnGroups[2] >> 6) + (j * bitOffsetInGroup));
       fprintf(fd, "           }\n");
     }
   }
@@ -242,26 +260,32 @@ void nrLDPC_cnProc_BG1_generator_AVX512(const char *dir, int R)
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       // Abs and sign of 64  CNs (first BN)
       //                zmm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-      fprintf(fd, "                zmm0 = ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[3] >> 6) + lut_idxCnProcG6[j][0] / 2);
-      fprintf(fd, "                sgn  = _mm512_xor_si512(ones, zmm0);\n");
-      fprintf(fd, "                min  = _mm512_abs_epi8(zmm0);\n");
+      fprintf(fd,
+              "                zmm0 = ((simde__m512i*)cnProcBuf)[%d+i];\n",
+              (lut_startAddrCnGroups[3] >> 6) + lut_idxCnProcG6[j][0] / 2);
+      fprintf(fd, "                sgn  = simde_mm512_xor_si512(ones, zmm0);\n");
+      fprintf(fd, "                min  = simde_mm512_abs_epi8(zmm0);\n");
 
       // Loop over BNs
       for (k = 1; k < 5; k++) {
-        fprintf(fd, "                zmm0 = ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[3] >> 6) + lut_idxCnProcG6[j][k] / 2);
+        fprintf(fd,
+                "                zmm0 = ((simde__m512i*)cnProcBuf)[%d+i];\n",
+                (lut_startAddrCnGroups[3] >> 6) + lut_idxCnProcG6[j][k] / 2);
 
-        //                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-        fprintf(fd, "                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));\n");
+        //                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+        fprintf(fd, "                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));\n");
 
-        //                sgn  = _mm512_sign_epi8(*p_ones, zmm0);
-        fprintf(fd, "                sgn  = _mm512_xor_si512(sgn, zmm0);\n");
+        //                sgn  = simde_mm512_sign_epi8(*p_ones, zmm0);
+        fprintf(fd, "                sgn  = simde_mm512_xor_si512(sgn, zmm0);\n");
       }
 
       // Store result
-      //                min = _mm512_min_epu8(min, maxLLR); // 128 in epi8 is -127
-      fprintf(fd, "                min = _mm512_min_epu8(min, maxLLR);\n");
+      //                min = simde_mm512_min_epu8(min, maxLLR); // 128 in epi8 is -127
+      fprintf(fd, "                min = simde_mm512_min_epu8(min, maxLLR);\n");
 
-      fprintf(fd, "                ((__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min, sgn,zeros);\n", (lut_startAddrCnGroups[3] >> 6) + (j * bitOffsetInGroup));
+      fprintf(fd,
+              "                ((simde__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min, sgn,zeros);\n",
+              (lut_startAddrCnGroups[3] >> 6) + (j * bitOffsetInGroup));
       fprintf(fd, "            }\n");
     }
   }
@@ -296,26 +320,32 @@ void nrLDPC_cnProc_BG1_generator_AVX512(const char *dir, int R)
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       // Abs and sign of 64  CNs (first BN)
       //                zmm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-      fprintf(fd, "                zmm0= ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[4] >> 6) + lut_idxCnProcG7[j][0] / 2);
-      fprintf(fd, "                sgn  = _mm512_xor_si512(ones, zmm0);\n");
-      fprintf(fd, "                min  = _mm512_abs_epi8(zmm0);\n");
+      fprintf(fd,
+              "                zmm0= ((simde__m512i*)cnProcBuf)[%d+i];\n",
+              (lut_startAddrCnGroups[4] >> 6) + lut_idxCnProcG7[j][0] / 2);
+      fprintf(fd, "                sgn  = simde_mm512_xor_si512(ones, zmm0);\n");
+      fprintf(fd, "                min  = simde_mm512_abs_epi8(zmm0);\n");
 
       // Loop over BNs
       for (k = 1; k < 6; k++) {
-        fprintf(fd, "                zmm0 = ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[4] >> 6) + lut_idxCnProcG7[j][k] / 2);
+        fprintf(fd,
+                "                zmm0 = ((simde__m512i*)cnProcBuf)[%d+i];\n",
+                (lut_startAddrCnGroups[4] >> 6) + lut_idxCnProcG7[j][k] / 2);
 
-        //                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-        fprintf(fd, "                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));\n");
+        //                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+        fprintf(fd, "                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));\n");
 
-        //                sgn  = _mm512_sign_epi8(*p_ones, zmm0);
-        fprintf(fd, "                sgn  = _mm512_xor_si512(sgn, zmm0);\n");
+        //                sgn  = simde_mm512_sign_epi8(*p_ones, zmm0);
+        fprintf(fd, "                sgn  = simde_mm512_xor_si512(sgn, zmm0);\n");
       }
 
       // Store result
-      //                min = _mm512_min_epu8(min, maxLLR); // 128 in epi8 is -127
-      fprintf(fd, "                min = _mm512_min_epu8(min, maxLLR);\n");
+      //                min = simde_mm512_min_epu8(min, maxLLR); // 128 in epi8 is -127
+      fprintf(fd, "                min = simde_mm512_min_epu8(min, maxLLR);\n");
 
-      fprintf(fd, "                ((__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min, sgn,zeros);\n", (lut_startAddrCnGroups[4] >> 6) + (j * bitOffsetInGroup));
+      fprintf(fd,
+              "                ((simde__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min, sgn,zeros);\n",
+              (lut_startAddrCnGroups[4] >> 6) + (j * bitOffsetInGroup));
       fprintf(fd, "            }\n");
     }
   }
@@ -351,26 +381,32 @@ void nrLDPC_cnProc_BG1_generator_AVX512(const char *dir, int R)
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       // Abs and sign of 64  CNs (first BN)
       //                zmm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-      fprintf(fd, "                zmm0 = ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[5] >> 6) + lut_idxCnProcG8[j][0] / 2);
-      fprintf(fd, "                sgn  = _mm512_xor_si512(ones, zmm0);\n");
-      fprintf(fd, "                min  = _mm512_abs_epi8(zmm0);\n");
+      fprintf(fd,
+              "                zmm0 = ((simde__m512i*)cnProcBuf)[%d+i];\n",
+              (lut_startAddrCnGroups[5] >> 6) + lut_idxCnProcG8[j][0] / 2);
+      fprintf(fd, "                sgn  = simde_mm512_xor_si512(ones, zmm0);\n");
+      fprintf(fd, "                min  = simde_mm512_abs_epi8(zmm0);\n");
 
       // Loop over BNs
       for (k = 1; k < 7; k++) {
-        fprintf(fd, "                zmm0 = ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[5] >> 6) + lut_idxCnProcG8[j][k] / 2);
+        fprintf(fd,
+                "                zmm0 = ((simde__m512i*)cnProcBuf)[%d+i];\n",
+                (lut_startAddrCnGroups[5] >> 6) + lut_idxCnProcG8[j][k] / 2);
 
-        //                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-        fprintf(fd, "                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));\n");
+        //                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+        fprintf(fd, "                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));\n");
 
-        //                sgn  = _mm512_sign_epi8(*p_ones, zmm0);
-        fprintf(fd, "                sgn  = _mm512_xor_si512(sgn, zmm0);\n");
+        //                sgn  = simde_mm512_sign_epi8(*p_ones, zmm0);
+        fprintf(fd, "                sgn  = simde_mm512_xor_si512(sgn, zmm0);\n");
       }
 
       // Store result
-      //                min = _mm512_min_epu8(min, maxLLR); // 128 in epi8 is -127
-      fprintf(fd, "                min = _mm512_min_epu8(min, maxLLR);\n");
+      //                min = simde_mm512_min_epu8(min, maxLLR); // 128 in epi8 is -127
+      fprintf(fd, "                min = simde_mm512_min_epu8(min, maxLLR);\n");
 
-      fprintf(fd, "                ((__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min, sgn,zeros);\n", (lut_startAddrCnGroups[5] >> 6) + (j * bitOffsetInGroup));
+      fprintf(fd,
+              "                ((simde__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min, sgn,zeros);\n",
+              (lut_startAddrCnGroups[5] >> 6) + (j * bitOffsetInGroup));
       fprintf(fd, "              }\n");
     }
   }
@@ -408,26 +444,32 @@ void nrLDPC_cnProc_BG1_generator_AVX512(const char *dir, int R)
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       // Abs and sign of 64  CNs (first BN)
       //                zmm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-      fprintf(fd, "                zmm0 = ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[6] >> 6) + lut_idxCnProcG9[j][0] / 2);
-      fprintf(fd, "                sgn  = _mm512_xor_si512(ones, zmm0);\n");
-      fprintf(fd, "                min  = _mm512_abs_epi8(zmm0);\n");
+      fprintf(fd,
+              "                zmm0 = ((simde__m512i*)cnProcBuf)[%d+i];\n",
+              (lut_startAddrCnGroups[6] >> 6) + lut_idxCnProcG9[j][0] / 2);
+      fprintf(fd, "                sgn  = simde_mm512_xor_si512(ones, zmm0);\n");
+      fprintf(fd, "                min  = simde_mm512_abs_epi8(zmm0);\n");
 
       // Loop over BNs
       for (k = 1; k < 8; k++) {
-        fprintf(fd, "                zmm0 = ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[6] >> 6) + lut_idxCnProcG9[j][k] / 2);
+        fprintf(fd,
+                "                zmm0 = ((simde__m512i*)cnProcBuf)[%d+i];\n",
+                (lut_startAddrCnGroups[6] >> 6) + lut_idxCnProcG9[j][k] / 2);
 
-        //                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-        fprintf(fd, "                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));\n");
+        //                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+        fprintf(fd, "                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));\n");
 
-        //                sgn  = _mm512_sign_epi8(*p_ones, zmm0);
-        fprintf(fd, "                sgn  = _mm512_xor_si512(sgn, zmm0);\n");
+        //                sgn  = simde_mm512_sign_epi8(*p_ones, zmm0);
+        fprintf(fd, "                sgn  = simde_mm512_xor_si512(sgn, zmm0);\n");
       }
 
       // Store result
-      //                min = _mm512_min_epu8(min, maxLLR); // 128 in epi8 is -127
-      fprintf(fd, "                min = _mm512_min_epu8(min, maxLLR);\n");
+      //                min = simde_mm512_min_epu8(min, maxLLR); // 128 in epi8 is -127
+      fprintf(fd, "                min = simde_mm512_min_epu8(min, maxLLR);\n");
 
-      fprintf(fd, "                ((__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min, sgn,zeros);\n", (lut_startAddrCnGroups[6] >> 6) + (j * bitOffsetInGroup));
+      fprintf(fd,
+              "                ((simde__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min, sgn,zeros);\n",
+              (lut_startAddrCnGroups[6] >> 6) + (j * bitOffsetInGroup));
       fprintf(fd, "              }\n");
     }
   }
@@ -465,26 +507,32 @@ void nrLDPC_cnProc_BG1_generator_AVX512(const char *dir, int R)
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       // Abs and sign of 64  CNs (first BN)
       //                zmm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-      fprintf(fd, "                zmm0 = ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[7] >> 6) + lut_idxCnProcG10[j][0] / 2);
-      fprintf(fd, "                sgn  = _mm512_xor_si512(ones, zmm0);\n");
-      fprintf(fd, "                min  = _mm512_abs_epi8(zmm0);\n");
+      fprintf(fd,
+              "                zmm0 = ((simde__m512i*)cnProcBuf)[%d+i];\n",
+              (lut_startAddrCnGroups[7] >> 6) + lut_idxCnProcG10[j][0] / 2);
+      fprintf(fd, "                sgn  = simde_mm512_xor_si512(ones, zmm0);\n");
+      fprintf(fd, "                min  = simde_mm512_abs_epi8(zmm0);\n");
 
       // Loop over BNs
       for (k = 1; k < 9; k++) {
-        fprintf(fd, "                zmm0 = ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[7] >> 6) + lut_idxCnProcG10[j][k] / 2);
+        fprintf(fd,
+                "                zmm0 = ((simde__m512i*)cnProcBuf)[%d+i];\n",
+                (lut_startAddrCnGroups[7] >> 6) + lut_idxCnProcG10[j][k] / 2);
 
-        //                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-        fprintf(fd, "                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));\n");
+        //                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+        fprintf(fd, "                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));\n");
 
-        //                sgn  = _mm512_sign_epi8(*p_ones, zmm0);
-        fprintf(fd, "                sgn  = _mm512_xor_si512(sgn, zmm0);\n");
+        //                sgn  = simde_mm512_sign_epi8(*p_ones, zmm0);
+        fprintf(fd, "                sgn  = simde_mm512_xor_si512(sgn, zmm0);\n");
       }
 
       // Store result
-      //                min = _mm512_min_epu8(min, maxLLR); // 128 in epi8 is -127
-      fprintf(fd, "                min = _mm512_min_epu8(min, maxLLR);\n");
+      //                min = simde_mm512_min_epu8(min, maxLLR); // 128 in epi8 is -127
+      fprintf(fd, "                min = simde_mm512_min_epu8(min, maxLLR);\n");
 
-      fprintf(fd, "                ((__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min,sgn,zeros);\n", (lut_startAddrCnGroups[7] >> 6) + (j * bitOffsetInGroup));
+      fprintf(fd,
+              "                ((simde__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min,sgn,zeros);\n",
+              (lut_startAddrCnGroups[7] >> 6) + (j * bitOffsetInGroup));
       fprintf(fd, "            }\n");
     }
   }
@@ -531,26 +579,32 @@ void nrLDPC_cnProc_BG1_generator_AVX512(const char *dir, int R)
       fprintf(fd, "            for (i=0;i<M;i++) {\n");
       // Abs and sign of 64  CNs (first BN)
       //                zmm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-      fprintf(fd, "                zmm0 = ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[8] >> 6) + lut_idxCnProcG19[j][0] / 2);
-      fprintf(fd, "                sgn  = _mm512_xor_si512(ones, zmm0);\n");
-      fprintf(fd, "                min  = _mm512_abs_epi8(zmm0);\n");
+      fprintf(fd,
+              "                zmm0 = ((simde__m512i*)cnProcBuf)[%d+i];\n",
+              (lut_startAddrCnGroups[8] >> 6) + lut_idxCnProcG19[j][0] / 2);
+      fprintf(fd, "                sgn  = simde_mm512_xor_si512(ones, zmm0);\n");
+      fprintf(fd, "                min  = simde_mm512_abs_epi8(zmm0);\n");
 
       // Loop over BNs
       for (k = 1; k < 18; k++) {
-        fprintf(fd, "                zmm0 = ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[8] >> 6) + lut_idxCnProcG19[j][k] / 2);
+        fprintf(fd,
+                "                zmm0 = ((simde__m512i*)cnProcBuf)[%d+i];\n",
+                (lut_startAddrCnGroups[8] >> 6) + lut_idxCnProcG19[j][k] / 2);
 
-        //                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-        fprintf(fd, "                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));\n");
+        //                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+        fprintf(fd, "                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));\n");
 
-        //                sgn  = _mm512_sign_epi8(*p_ones, zmm0);
-        fprintf(fd, "                sgn  = _mm512_xor_si512(sgn, zmm0);\n");
+        //                sgn  = simde_mm512_sign_epi8(*p_ones, zmm0);
+        fprintf(fd, "                sgn  = simde_mm512_xor_si512(sgn, zmm0);\n");
       }
 
       // Store result
-      //                min = _mm512_min_epu8(min, maxLLR); // 128 in epi8 is -127
-      fprintf(fd, "                min = _mm512_min_epu8(min, maxLLR);\n");
+      //                min = simde_mm512_min_epu8(min, maxLLR); // 128 in epi8 is -127
+      fprintf(fd, "                min = simde_mm512_min_epu8(min, maxLLR);\n");
 
-      fprintf(fd, "                ((__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min, sgn,zeros);\n", (lut_startAddrCnGroups[8] >> 6) + (j * bitOffsetInGroup));
+      fprintf(fd,
+              "                ((simde__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min, sgn,zeros);\n",
+              (lut_startAddrCnGroups[8] >> 6) + (j * bitOffsetInGroup));
       fprintf(fd, "            }\n");
     }
   }
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc_avx512/cnProc_gen_BG2_avx512.c b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc_avx512/cnProc_gen_BG2_avx512.c
index 70e760cc6a1251dd7cdd33efa41c0ab5a7daced7..562f8714692a5a9d4d133c04765392cc0e742a7f 100644
--- a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc_avx512/cnProc_gen_BG2_avx512.c
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/generator_cnProc_avx512/cnProc_gen_BG2_avx512.c
@@ -43,7 +43,7 @@ void nrLDPC_cnProc_BG2_generator_AVX512(const char *dir, int R)
     abort();
   }
 
-  fprintf(fd, "#define conditional_negate(a,b,z) _mm512_mask_sub_epi8(a,_mm512_movepi8_mask(b),z,a)\n");
+  fprintf(fd, "#define conditional_negate(a,b,z) simde_mm512_mask_sub_epi8(a,_mm512_movepi8_mask(b),z,a)\n");
 
   fprintf(fd, "static inline void nrLDPC_cnProc_BG2_R%s_AVX512(int8_t* cnProcBuf, int8_t* cnProcBufRes, uint16_t Z) {\n", ratestr[R]);
   const uint8_t *lut_numCnInCnGroups;
@@ -69,10 +69,10 @@ void nrLDPC_cnProc_BG2_generator_AVX512(const char *dir, int R)
   uint32_t bitOffsetInGroup;
 
   fprintf(fd, "                uint32_t M;\n");
-  fprintf(fd, "                __m512i zmm0, min, sgn,zeros,ones,maxLLR;\n");
-  fprintf(fd, "                zeros  = _mm512_setzero_si512();\n");
-  fprintf(fd, "                maxLLR = _mm512_set1_epi8((char)127);\n");
-  fprintf(fd, "               ones = _mm512_set1_epi8((char)1);\n");
+  fprintf(fd, "                simde__m512i zmm0, min, sgn,zeros,ones,maxLLR;\n");
+  fprintf(fd, "                zeros  = simde_mm512_setzero_si512();\n");
+  fprintf(fd, "                maxLLR = simde_mm512_set1_epi8((char)127);\n");
+  fprintf(fd, "               ones = simde_mm512_set1_epi8((char)1);\n");
   // =====================================================================
   // Process group with 3 BNs
   fprintf(fd, "//Process group with 3 BNs\n");
@@ -95,27 +95,33 @@ void nrLDPC_cnProc_BG2_generator_AVX512(const char *dir, int R)
       fprintf(fd, "            for (int i=0;i<M;i++) {\n");
       // Abs and sign of 64  CNs (first BN)
       //                zmm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-      fprintf(fd, "                zmm0 = ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[0] >> 6) + lut_idxCnProcG3[j][0] / 2);
-      fprintf(fd, "                sgn  = _mm512_xor_si512(ones, zmm0);\n");
-      fprintf(fd, "                min  = _mm512_abs_epi8(zmm0);\n");
+      fprintf(fd,
+              "                zmm0 = ((simde__m512i*)cnProcBuf)[%d+i];\n",
+              (lut_startAddrCnGroups[0] >> 6) + lut_idxCnProcG3[j][0] / 2);
+      fprintf(fd, "                sgn  = simde_mm512_xor_si512(ones, zmm0);\n");
+      fprintf(fd, "                min  = simde_mm512_abs_epi8(zmm0);\n");
 
       // for (k=1; k<2; k++)
       //{
-      fprintf(fd, "                zmm0 = ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[0] >> 6) + lut_idxCnProcG3[j][1] / 2);
+      fprintf(fd,
+              "                zmm0 = ((simde__m512i*)cnProcBuf)[%d+i];\n",
+              (lut_startAddrCnGroups[0] >> 6) + lut_idxCnProcG3[j][1] / 2);
 
-      //                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-      fprintf(fd, "                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));\n");
+      //                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+      fprintf(fd, "                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));\n");
 
-      //                sgn  = _mm512_sign_epi8(*p_ones, zmm0);
-      fprintf(fd, "                sgn  = _mm512_xor_si512(sgn, zmm0);\n");
+      //                sgn  = simde_mm512_sign_epi8(*p_ones, zmm0);
+      fprintf(fd, "                sgn  = simde_mm512_xor_si512(sgn, zmm0);\n");
       // }
 
       // Store result
-      //                min = _mm512_min_epu8(min, *maxLLR); // 128 in epi8 is -127
-      fprintf(fd, "                min = _mm512_min_epu8(min, maxLLR);\n");
-      //                *p_cnProcBufResBit = _mm512_sign_epi8(min, sgn);
+      //                min = simde_mm512_min_epu8(min, *maxLLR); // 128 in epi8 is -127
+      fprintf(fd, "                min = simde_mm512_min_epu8(min, maxLLR);\n");
+      //                *p_cnProcBufResBit = simde_mm512_sign_epi8(min, sgn);
       //                p_cnProcBufResBit++;
-      fprintf(fd, "                ((__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min, sgn,zeros);\n", (lut_startAddrCnGroups[0] >> 6) + (j * bitOffsetInGroup));
+      fprintf(fd,
+              "                ((simde__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min, sgn,zeros);\n",
+              (lut_startAddrCnGroups[0] >> 6) + (j * bitOffsetInGroup));
       fprintf(fd, "            }\n");
     }
   }
@@ -139,27 +145,33 @@ void nrLDPC_cnProc_BG2_generator_AVX512(const char *dir, int R)
       fprintf(fd, "            for (int i=0;i<M;i++) {\n");
       // Abs and sign of 64  CNs (first BN)
       //                zmm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-      fprintf(fd, "                zmm0 = ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[1] >> 6) + lut_idxCnProcG4[j][0] / 2);
-      fprintf(fd, "                sgn  = _mm512_xor_si512(ones, zmm0);\n");
-      fprintf(fd, "                min  = _mm512_abs_epi8(zmm0);\n");
+      fprintf(fd,
+              "                zmm0 = ((simde__m512i*)cnProcBuf)[%d+i];\n",
+              (lut_startAddrCnGroups[1] >> 6) + lut_idxCnProcG4[j][0] / 2);
+      fprintf(fd, "                sgn  = simde_mm512_xor_si512(ones, zmm0);\n");
+      fprintf(fd, "                min  = simde_mm512_abs_epi8(zmm0);\n");
 
       // Loop over BNs
       for (k = 1; k < 3; k++) {
-        fprintf(fd, "                zmm0 = ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[1] >> 6) + lut_idxCnProcG4[j][k] / 2);
+        fprintf(fd,
+                "                zmm0 = ((simde__m512i*)cnProcBuf)[%d+i];\n",
+                (lut_startAddrCnGroups[1] >> 6) + lut_idxCnProcG4[j][k] / 2);
 
-        //                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-        fprintf(fd, "                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));\n");
+        //                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+        fprintf(fd, "                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));\n");
 
-        //                sgn  = _mm512_sign_epi8(sgn, zmm0);
-        fprintf(fd, "                sgn  = _mm512_xor_si512(sgn, zmm0);\n");
+        //                sgn  = simde_mm512_sign_epi8(sgn, zmm0);
+        fprintf(fd, "                sgn  = simde_mm512_xor_si512(sgn, zmm0);\n");
       }
 
       // Store result
-      //                min = _mm512_min_epu8(min, maxLLR); // 128 in epi8 is -127
-      fprintf(fd, "                min = _mm512_min_epu8(min, maxLLR);\n");
-      //                *p_cnProcBufResBit = _mm512_sign_epi8(min, sgn);
+      //                min = simde_mm512_min_epu8(min, maxLLR); // 128 in epi8 is -127
+      fprintf(fd, "                min = simde_mm512_min_epu8(min, maxLLR);\n");
+      //                *p_cnProcBufResBit = simde_mm512_sign_epi8(min, sgn);
       //                p_cnProcBufResBit++;
-      fprintf(fd, "                ((__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min, sgn,zeros);\n", (lut_startAddrCnGroups[1] >> 6) + (j * bitOffsetInGroup));
+      fprintf(fd,
+              "                ((simde__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min, sgn,zeros);\n",
+              (lut_startAddrCnGroups[1] >> 6) + (j * bitOffsetInGroup));
       fprintf(fd, "            }\n");
     }
   }
@@ -184,26 +196,32 @@ void nrLDPC_cnProc_BG2_generator_AVX512(const char *dir, int R)
       fprintf(fd, "            for (int i=0;i<M;i++) {\n");
       // Abs and sign of 64  CNs (first BN)
       //                zmm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-      fprintf(fd, "                zmm0 = ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[2] >> 6) + lut_idxCnProcG5[j][0] / 2);
-      fprintf(fd, "                sgn  = _mm512_xor_si512(ones, zmm0);\n");
-      fprintf(fd, "                min  = _mm512_abs_epi8(zmm0);\n");
+      fprintf(fd,
+              "                zmm0 = ((simde__m512i*)cnProcBuf)[%d+i];\n",
+              (lut_startAddrCnGroups[2] >> 6) + lut_idxCnProcG5[j][0] / 2);
+      fprintf(fd, "                sgn  = simde_mm512_xor_si512(ones, zmm0);\n");
+      fprintf(fd, "                min  = simde_mm512_abs_epi8(zmm0);\n");
 
       // Loop over BNs
       for (k = 1; k < 4; k++) {
-        fprintf(fd, "                zmm0 = ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[2] >> 6) + lut_idxCnProcG5[j][k] / 2);
+        fprintf(fd,
+                "                zmm0 = ((simde__m512i*)cnProcBuf)[%d+i];\n",
+                (lut_startAddrCnGroups[2] >> 6) + lut_idxCnProcG5[j][k] / 2);
 
-        //                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-        fprintf(fd, "                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));\n");
+        //                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+        fprintf(fd, "                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));\n");
 
-        //                sgn  = _mm512_sign_epi8(sgn, zmm0);
-        fprintf(fd, "                sgn  = _mm512_xor_si512(sgn, zmm0);\n");
+        //                sgn  = simde_mm512_sign_epi8(sgn, zmm0);
+        fprintf(fd, "                sgn  = simde_mm512_xor_si512(sgn, zmm0);\n");
       }
 
       // Store result
-      //                min = _mm512_min_epu8(min, maxLLR); // 128 in epi8 is -127
-      fprintf(fd, "                min = _mm512_min_epu8(min, maxLLR);\n");
+      //                min = simde_mm512_min_epu8(min, maxLLR); // 128 in epi8 is -127
+      fprintf(fd, "                min = simde_mm512_min_epu8(min, maxLLR);\n");
 
-      fprintf(fd, "                ((__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min, sgn,zeros);\n", (lut_startAddrCnGroups[2] >> 6) + (j * bitOffsetInGroup));
+      fprintf(fd,
+              "                ((simde__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min, sgn,zeros);\n",
+              (lut_startAddrCnGroups[2] >> 6) + (j * bitOffsetInGroup));
       fprintf(fd, "           }\n");
     }
   }
@@ -228,26 +246,32 @@ void nrLDPC_cnProc_BG2_generator_AVX512(const char *dir, int R)
       fprintf(fd, "            for (int i=0;i<M;i++) {\n");
       // Abs and sign of 64  CNs (first BN)
       //                zmm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-      fprintf(fd, "                zmm0 = ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[3] >> 6) + lut_idxCnProcG6[j][0] / 2);
-      fprintf(fd, "                sgn  = _mm512_xor_si512(ones, zmm0);\n");
-      fprintf(fd, "                min  = _mm512_abs_epi8(zmm0);\n");
+      fprintf(fd,
+              "                zmm0 = ((simde__m512i*)cnProcBuf)[%d+i];\n",
+              (lut_startAddrCnGroups[3] >> 6) + lut_idxCnProcG6[j][0] / 2);
+      fprintf(fd, "                sgn  = simde_mm512_xor_si512(ones, zmm0);\n");
+      fprintf(fd, "                min  = simde_mm512_abs_epi8(zmm0);\n");
 
       // Loop over BNs
       for (k = 1; k < 5; k++) {
-        fprintf(fd, "                zmm0 = ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[3] >> 6) + lut_idxCnProcG6[j][k] / 2);
+        fprintf(fd,
+                "                zmm0 = ((simde__m512i*)cnProcBuf)[%d+i];\n",
+                (lut_startAddrCnGroups[3] >> 6) + lut_idxCnProcG6[j][k] / 2);
 
-        //                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-        fprintf(fd, "                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));\n");
+        //                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+        fprintf(fd, "                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));\n");
 
-        //                sgn  = _mm512_sign_epi8(sgn, zmm0);
-        fprintf(fd, "                sgn  = _mm512_xor_si512(sgn, zmm0);\n");
+        //                sgn  = simde_mm512_sign_epi8(sgn, zmm0);
+        fprintf(fd, "                sgn  = simde_mm512_xor_si512(sgn, zmm0);\n");
       }
 
       // Store result
-      //                min = _mm512_min_epu8(min, maxLLR); // 128 in epi8 is -127
-      fprintf(fd, "                min = _mm512_min_epu8(min, maxLLR);\n");
+      //                min = simde_mm512_min_epu8(min, maxLLR); // 128 in epi8 is -127
+      fprintf(fd, "                min = simde_mm512_min_epu8(min, maxLLR);\n");
 
-      fprintf(fd, "                ((__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min, sgn,zeros);\n", (lut_startAddrCnGroups[3] >> 6) + (j * bitOffsetInGroup));
+      fprintf(fd,
+              "                ((simde__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min, sgn,zeros);\n",
+              (lut_startAddrCnGroups[3] >> 6) + (j * bitOffsetInGroup));
       fprintf(fd, "            }\n");
     }
   }
@@ -279,26 +303,32 @@ void nrLDPC_cnProc_BG2_generator_AVX512(const char *dir, int R)
       fprintf(fd, "            for (int i=0;i<M;i++) {\n");
       // Abs and sign of 64  CNs (first BN)
       //                zmm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-      fprintf(fd, "                zmm0 = ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[4] >> 6) + lut_idxCnProcG8[j][0] / 2);
-      fprintf(fd, "                sgn  = _mm512_xor_si512(ones, zmm0);\n");
-      fprintf(fd, "                min  = _mm512_abs_epi8(zmm0);\n");
+      fprintf(fd,
+              "                zmm0 = ((simde__m512i*)cnProcBuf)[%d+i];\n",
+              (lut_startAddrCnGroups[4] >> 6) + lut_idxCnProcG8[j][0] / 2);
+      fprintf(fd, "                sgn  = simde_mm512_xor_si512(ones, zmm0);\n");
+      fprintf(fd, "                min  = simde_mm512_abs_epi8(zmm0);\n");
 
       // Loop over BNs
       for (k = 1; k < 7; k++) {
-        fprintf(fd, "                zmm0 = ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[4] >> 6) + lut_idxCnProcG8[j][k] / 2);
+        fprintf(fd,
+                "                zmm0 = ((simde__m512i*)cnProcBuf)[%d+i];\n",
+                (lut_startAddrCnGroups[4] >> 6) + lut_idxCnProcG8[j][k] / 2);
 
-        //                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-        fprintf(fd, "                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));\n");
+        //                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+        fprintf(fd, "                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));\n");
 
-        //                sgn  = _mm512_sign_epi8(sgn, zmm0);
-        fprintf(fd, "                sgn  = _mm512_xor_si512(sgn, zmm0);\n");
+        //                sgn  = simde_mm512_sign_epi8(sgn, zmm0);
+        fprintf(fd, "                sgn  = simde_mm512_xor_si512(sgn, zmm0);\n");
       }
 
       // Store result
-      //                min = _mm512_min_epu8(min, maxLLR); // 128 in epi8 is -127
-      fprintf(fd, "                min = _mm512_min_epu8(min, maxLLR);\n");
+      //                min = simde_mm512_min_epu8(min, maxLLR); // 128 in epi8 is -127
+      fprintf(fd, "                min = simde_mm512_min_epu8(min, maxLLR);\n");
 
-      fprintf(fd, "                ((__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min, sgn,zeros);\n", (lut_startAddrCnGroups[4] >> 6) + (j * bitOffsetInGroup));
+      fprintf(fd,
+              "                ((simde__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min, sgn,zeros);\n",
+              (lut_startAddrCnGroups[4] >> 6) + (j * bitOffsetInGroup));
       fprintf(fd, "              }\n");
     }
   }
@@ -332,26 +362,32 @@ void nrLDPC_cnProc_BG2_generator_AVX512(const char *dir, int R)
       fprintf(fd, "            for (int i=0;i<M;i++) {\n");
       // Abs and sign of 64  CNs (first BN)
       //                zmm0 = p_cnProcBuf[lut_idxCnProcG3[j][0] + i];
-      fprintf(fd, "                zmm0 = ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[5] >> 6) + lut_idxCnProcG10[j][0] / 2);
-      fprintf(fd, "                sgn  = _mm512_xor_si512(ones, zmm0);\n");
-      fprintf(fd, "                min  = _mm512_abs_epi8(zmm0);\n");
+      fprintf(fd,
+              "                zmm0 = ((simde__m512i*)cnProcBuf)[%d+i];\n",
+              (lut_startAddrCnGroups[5] >> 6) + lut_idxCnProcG10[j][0] / 2);
+      fprintf(fd, "                sgn  = simde_mm512_xor_si512(ones, zmm0);\n");
+      fprintf(fd, "                min  = simde_mm512_abs_epi8(zmm0);\n");
 
       // Loop over BNs
       for (k = 1; k < 9; k++) {
-        fprintf(fd, "                zmm0 = ((__m512i*)cnProcBuf)[%d+i];\n", (lut_startAddrCnGroups[5] >> 6) + lut_idxCnProcG10[j][k] / 2);
+        fprintf(fd,
+                "                zmm0 = ((simde__m512i*)cnProcBuf)[%d+i];\n",
+                (lut_startAddrCnGroups[5] >> 6) + lut_idxCnProcG10[j][k] / 2);
 
-        //                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));
-        fprintf(fd, "                min  = _mm512_min_epu8(min, _mm512_abs_epi8(zmm0));\n");
+        //                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));
+        fprintf(fd, "                min  = simde_mm512_min_epu8(min, simde_mm512_abs_epi8(zmm0));\n");
 
-        //                sgn  = _mm512_sign_epi8(sgn, zmm0);
-        fprintf(fd, "                sgn  = _mm512_xor_si512(sgn, zmm0);\n");
+        //                sgn  = simde_mm512_sign_epi8(sgn, zmm0);
+        fprintf(fd, "                sgn  = simde_mm512_xor_si512(sgn, zmm0);\n");
       }
 
       // Store result
-      //                min = _mm512_min_epu8(min, maxLLR); // 128 in epi8 is -127
-      fprintf(fd, "                min = _mm512_min_epu8(min, maxLLR);\n");
+      //                min = simde_mm512_min_epu8(min, maxLLR); // 128 in epi8 is -127
+      fprintf(fd, "                min = simde_mm512_min_epu8(min, maxLLR);\n");
 
-      fprintf(fd, "                ((__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min,sgn,zeros);\n", (lut_startAddrCnGroups[5] >> 6) + (j * bitOffsetInGroup));
+      fprintf(fd,
+              "                ((simde__m512i*)cnProcBufRes)[%d+i] = conditional_negate(min,sgn,zeros);\n",
+              (lut_startAddrCnGroups[5] >> 6) + (j * bitOffsetInGroup));
       fprintf(fd, "            }\n");
     }
   }
diff --git a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/nrLDPC_debug.h b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/nrLDPC_debug.h
index 41c0a474a4457ad6ea975f3b15c5fa4ed50ceed9..13f533781eb841e2b5d4ac52c99954e388097a08 100644
--- a/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/nrLDPC_debug.h
+++ b/openair1/PHY/CODING/nrLDPC_decoder/nrLDPC_tools/nrLDPC_debug.h
@@ -169,7 +169,7 @@ static inline void nrLDPC_debug_initBuffer2File(e_nrLDPC_buffers buffer)
    \brief Prints 256 data type
    \param in Input to print
 */
-static inline void nrLDPC_debug_print256i_epi8(__m256i* in)
+static inline void nrLDPC_debug_print256i_epi8(simde__m256i* in)
 {
     uint32_t i;
 
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc176_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc176_byte.c
index 6f56bbe844e28da1f91f4dba6aff5943c4c5a3a5..a4b266915519cd2136d498184a26e9979d7cf769 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc176_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc176_byte.c
@@ -1,9 +1,9 @@
 #include "PHY/sse_intrin.h"
 // generated code for Zc=176, byte encoding
 static inline void ldpc176_byte(uint8_t *c,uint8_t *d) {
-  __m128i *csimd=(__m128i *)c,*dsimd=(__m128i *)d;
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
 
-  __m128i *c2,*d2;
+  simde__m128i *c2,*d2;
 
   int i2;
   for (i2=0; i2<11; i2++) {
@@ -11,141 +11,141 @@ static inline void ldpc176_byte(uint8_t *c,uint8_t *d) {
      d2=&dsimd[i2];
 
 //row: 0
-     d2[0]=_mm_xor_si128(c2[2911],_mm_xor_si128(c2[6296],_mm_xor_si128(c2[971],_mm_xor_si128(c2[490],_mm_xor_si128(c2[2933],_mm_xor_si128(c2[1476],_mm_xor_si128(c2[6803],_mm_xor_si128(c2[3442],_mm_xor_si128(c2[531],_mm_xor_si128(c2[2466],_mm_xor_si128(c2[4915],_mm_xor_si128(c2[72],_mm_xor_si128(c2[553],_mm_xor_si128(c2[1066],_mm_xor_si128(c2[6871],_mm_xor_si128(c2[3482],_mm_xor_si128(c2[7371],_mm_xor_si128(c2[5923],_mm_xor_si128(c2[7376],_mm_xor_si128(c2[2562],_mm_xor_si128(c2[4493],_mm_xor_si128(c2[5464],_mm_xor_si128(c2[161],_mm_xor_si128(c2[162],_mm_xor_si128(c2[3068],_mm_xor_si128(c2[6959],_mm_xor_si128(c2[6471],_mm_xor_si128(c2[5505],_mm_xor_si128(c2[1654],_mm_xor_si128(c2[1169],_mm_xor_si128(c2[3109],_mm_xor_si128(c2[5064],_mm_xor_si128(c2[7487],_mm_xor_si128(c2[5066],_mm_xor_si128(c2[726],_mm_xor_si128(c2[3640],_mm_xor_si128(c2[3150],_mm_xor_si128(c2[1726],_mm_xor_si128(c2[6560],_mm_xor_si128(c2[4630],_mm_xor_si128(c2[4643],_mm_xor_si128(c2[7068],_mm_xor_si128(c2[6583],_mm_xor_si128(c2[7093],_mm_xor_si128(c2[7090],_mm_xor_si128(c2[4664],_mm_xor_si128(c2[2753],_mm_xor_si128(c2[1302],_mm_xor_si128(c2[6629],_mm_xor_si128(c2[5686],_mm_xor_si128(c2[841],_mm_xor_si128(c2[4712],_mm_xor_si128(c2[2802],_mm_xor_si128(c2[1834],_mm_xor_si128(c2[3764],_mm_xor_si128(c2[404],_mm_xor_si128(c2[1858],_mm_xor_si128(c2[7664],_mm_xor_si128(c2[3816],_mm_xor_si128(c2[4774],_mm_xor_si128(c2[1388],_mm_xor_si128(c2[7702],_mm_xor_si128(c2[3828],_mm_xor_si128(c2[4803],_mm_xor_si128(c2[6270],_mm_xor_si128(c2[7245],c2[4825]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[0]=simde_mm_xor_si128(c2[2911],simde_mm_xor_si128(c2[6296],simde_mm_xor_si128(c2[971],simde_mm_xor_si128(c2[490],simde_mm_xor_si128(c2[2933],simde_mm_xor_si128(c2[1476],simde_mm_xor_si128(c2[6803],simde_mm_xor_si128(c2[3442],simde_mm_xor_si128(c2[531],simde_mm_xor_si128(c2[2466],simde_mm_xor_si128(c2[4915],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[553],simde_mm_xor_si128(c2[1066],simde_mm_xor_si128(c2[6871],simde_mm_xor_si128(c2[3482],simde_mm_xor_si128(c2[7371],simde_mm_xor_si128(c2[5923],simde_mm_xor_si128(c2[7376],simde_mm_xor_si128(c2[2562],simde_mm_xor_si128(c2[4493],simde_mm_xor_si128(c2[5464],simde_mm_xor_si128(c2[161],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[3068],simde_mm_xor_si128(c2[6959],simde_mm_xor_si128(c2[6471],simde_mm_xor_si128(c2[5505],simde_mm_xor_si128(c2[1654],simde_mm_xor_si128(c2[1169],simde_mm_xor_si128(c2[3109],simde_mm_xor_si128(c2[5064],simde_mm_xor_si128(c2[7487],simde_mm_xor_si128(c2[5066],simde_mm_xor_si128(c2[726],simde_mm_xor_si128(c2[3640],simde_mm_xor_si128(c2[3150],simde_mm_xor_si128(c2[1726],simde_mm_xor_si128(c2[6560],simde_mm_xor_si128(c2[4630],simde_mm_xor_si128(c2[4643],simde_mm_xor_si128(c2[7068],simde_mm_xor_si128(c2[6583],simde_mm_xor_si128(c2[7093],simde_mm_xor_si128(c2[7090],simde_mm_xor_si128(c2[4664],simde_mm_xor_si128(c2[2753],simde_mm_xor_si128(c2[1302],simde_mm_xor_si128(c2[6629],simde_mm_xor_si128(c2[5686],simde_mm_xor_si128(c2[841],simde_mm_xor_si128(c2[4712],simde_mm_xor_si128(c2[2802],simde_mm_xor_si128(c2[1834],simde_mm_xor_si128(c2[3764],simde_mm_xor_si128(c2[404],simde_mm_xor_si128(c2[1858],simde_mm_xor_si128(c2[7664],simde_mm_xor_si128(c2[3816],simde_mm_xor_si128(c2[4774],simde_mm_xor_si128(c2[1388],simde_mm_xor_si128(c2[7702],simde_mm_xor_si128(c2[3828],simde_mm_xor_si128(c2[4803],simde_mm_xor_si128(c2[6270],simde_mm_xor_si128(c2[7245],c2[4825]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 1
-     d2[11]=_mm_xor_si128(c2[2911],_mm_xor_si128(c2[3395],_mm_xor_si128(c2[6780],_mm_xor_si128(c2[1455],_mm_xor_si128(c2[974],_mm_xor_si128(c2[2933],_mm_xor_si128(c2[3417],_mm_xor_si128(c2[1960],_mm_xor_si128(c2[7287],_mm_xor_si128(c2[3442],_mm_xor_si128(c2[3926],_mm_xor_si128(c2[1015],_mm_xor_si128(c2[2950],_mm_xor_si128(c2[4915],_mm_xor_si128(c2[5399],_mm_xor_si128(c2[556],_mm_xor_si128(c2[1037],_mm_xor_si128(c2[1550],_mm_xor_si128(c2[7355],_mm_xor_si128(c2[3966],_mm_xor_si128(c2[7371],_mm_xor_si128(c2[112],_mm_xor_si128(c2[6407],_mm_xor_si128(c2[117],_mm_xor_si128(c2[2562],_mm_xor_si128(c2[3046],_mm_xor_si128(c2[4977],_mm_xor_si128(c2[5948],_mm_xor_si128(c2[645],_mm_xor_si128(c2[646],_mm_xor_si128(c2[3552],_mm_xor_si128(c2[7443],_mm_xor_si128(c2[6955],_mm_xor_si128(c2[5989],_mm_xor_si128(c2[1654],_mm_xor_si128(c2[2138],_mm_xor_si128(c2[1653],_mm_xor_si128(c2[3593],_mm_xor_si128(c2[5064],_mm_xor_si128(c2[5548],_mm_xor_si128(c2[228],_mm_xor_si128(c2[5550],_mm_xor_si128(c2[726],_mm_xor_si128(c2[1210],_mm_xor_si128(c2[4124],_mm_xor_si128(c2[3634],_mm_xor_si128(c2[1726],_mm_xor_si128(c2[2210],_mm_xor_si128(c2[7044],_mm_xor_si128(c2[5114],_mm_xor_si128(c2[4643],_mm_xor_si128(c2[5127],_mm_xor_si128(c2[7552],_mm_xor_si128(c2[7067],_mm_xor_si128(c2[7577],_mm_xor_si128(c2[7574],_mm_xor_si128(c2[5148],_mm_xor_si128(c2[2753],_mm_xor_si128(c2[3237],_mm_xor_si128(c2[1786],_mm_xor_si128(c2[7113],_mm_xor_si128(c2[5686],_mm_xor_si128(c2[6170],_mm_xor_si128(c2[1325],_mm_xor_si128(c2[5196],_mm_xor_si128(c2[3286],_mm_xor_si128(c2[2318],_mm_xor_si128(c2[4248],_mm_xor_si128(c2[404],_mm_xor_si128(c2[888],_mm_xor_si128(c2[2342],_mm_xor_si128(c2[405],_mm_xor_si128(c2[3816],_mm_xor_si128(c2[4300],_mm_xor_si128(c2[5258],_mm_xor_si128(c2[1872],_mm_xor_si128(c2[7702],_mm_xor_si128(c2[443],_mm_xor_si128(c2[4312],_mm_xor_si128(c2[5287],_mm_xor_si128(c2[6270],_mm_xor_si128(c2[6754],_mm_xor_si128(c2[7729],c2[5309])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[11]=simde_mm_xor_si128(c2[2911],simde_mm_xor_si128(c2[3395],simde_mm_xor_si128(c2[6780],simde_mm_xor_si128(c2[1455],simde_mm_xor_si128(c2[974],simde_mm_xor_si128(c2[2933],simde_mm_xor_si128(c2[3417],simde_mm_xor_si128(c2[1960],simde_mm_xor_si128(c2[7287],simde_mm_xor_si128(c2[3442],simde_mm_xor_si128(c2[3926],simde_mm_xor_si128(c2[1015],simde_mm_xor_si128(c2[2950],simde_mm_xor_si128(c2[4915],simde_mm_xor_si128(c2[5399],simde_mm_xor_si128(c2[556],simde_mm_xor_si128(c2[1037],simde_mm_xor_si128(c2[1550],simde_mm_xor_si128(c2[7355],simde_mm_xor_si128(c2[3966],simde_mm_xor_si128(c2[7371],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[6407],simde_mm_xor_si128(c2[117],simde_mm_xor_si128(c2[2562],simde_mm_xor_si128(c2[3046],simde_mm_xor_si128(c2[4977],simde_mm_xor_si128(c2[5948],simde_mm_xor_si128(c2[645],simde_mm_xor_si128(c2[646],simde_mm_xor_si128(c2[3552],simde_mm_xor_si128(c2[7443],simde_mm_xor_si128(c2[6955],simde_mm_xor_si128(c2[5989],simde_mm_xor_si128(c2[1654],simde_mm_xor_si128(c2[2138],simde_mm_xor_si128(c2[1653],simde_mm_xor_si128(c2[3593],simde_mm_xor_si128(c2[5064],simde_mm_xor_si128(c2[5548],simde_mm_xor_si128(c2[228],simde_mm_xor_si128(c2[5550],simde_mm_xor_si128(c2[726],simde_mm_xor_si128(c2[1210],simde_mm_xor_si128(c2[4124],simde_mm_xor_si128(c2[3634],simde_mm_xor_si128(c2[1726],simde_mm_xor_si128(c2[2210],simde_mm_xor_si128(c2[7044],simde_mm_xor_si128(c2[5114],simde_mm_xor_si128(c2[4643],simde_mm_xor_si128(c2[5127],simde_mm_xor_si128(c2[7552],simde_mm_xor_si128(c2[7067],simde_mm_xor_si128(c2[7577],simde_mm_xor_si128(c2[7574],simde_mm_xor_si128(c2[5148],simde_mm_xor_si128(c2[2753],simde_mm_xor_si128(c2[3237],simde_mm_xor_si128(c2[1786],simde_mm_xor_si128(c2[7113],simde_mm_xor_si128(c2[5686],simde_mm_xor_si128(c2[6170],simde_mm_xor_si128(c2[1325],simde_mm_xor_si128(c2[5196],simde_mm_xor_si128(c2[3286],simde_mm_xor_si128(c2[2318],simde_mm_xor_si128(c2[4248],simde_mm_xor_si128(c2[404],simde_mm_xor_si128(c2[888],simde_mm_xor_si128(c2[2342],simde_mm_xor_si128(c2[405],simde_mm_xor_si128(c2[3816],simde_mm_xor_si128(c2[4300],simde_mm_xor_si128(c2[5258],simde_mm_xor_si128(c2[1872],simde_mm_xor_si128(c2[7702],simde_mm_xor_si128(c2[443],simde_mm_xor_si128(c2[4312],simde_mm_xor_si128(c2[5287],simde_mm_xor_si128(c2[6270],simde_mm_xor_si128(c2[6754],simde_mm_xor_si128(c2[7729],c2[5309])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 2
-     d2[22]=_mm_xor_si128(c2[3395],_mm_xor_si128(c2[6780],_mm_xor_si128(c2[971],_mm_xor_si128(c2[1455],_mm_xor_si128(c2[490],_mm_xor_si128(c2[974],_mm_xor_si128(c2[3417],_mm_xor_si128(c2[1476],_mm_xor_si128(c2[1960],_mm_xor_si128(c2[6803],_mm_xor_si128(c2[7287],_mm_xor_si128(c2[3926],_mm_xor_si128(c2[1015],_mm_xor_si128(c2[2466],_mm_xor_si128(c2[2950],_mm_xor_si128(c2[5399],_mm_xor_si128(c2[556],_mm_xor_si128(c2[553],_mm_xor_si128(c2[1037],_mm_xor_si128(c2[1550],_mm_xor_si128(c2[6871],_mm_xor_si128(c2[7355],_mm_xor_si128(c2[3482],_mm_xor_si128(c2[3966],_mm_xor_si128(c2[112],_mm_xor_si128(c2[6407],_mm_xor_si128(c2[7376],_mm_xor_si128(c2[117],_mm_xor_si128(c2[3046],_mm_xor_si128(c2[4493],_mm_xor_si128(c2[4977],_mm_xor_si128(c2[5464],_mm_xor_si128(c2[5948],_mm_xor_si128(c2[645],_mm_xor_si128(c2[162],_mm_xor_si128(c2[646],_mm_xor_si128(c2[3068],_mm_xor_si128(c2[3552],_mm_xor_si128(c2[7443],_mm_xor_si128(c2[6471],_mm_xor_si128(c2[6955],_mm_xor_si128(c2[5505],_mm_xor_si128(c2[5989],_mm_xor_si128(c2[2138],_mm_xor_si128(c2[1653],_mm_xor_si128(c2[3109],_mm_xor_si128(c2[3593],_mm_xor_si128(c2[5548],_mm_xor_si128(c2[7487],_mm_xor_si128(c2[228],_mm_xor_si128(c2[5066],_mm_xor_si128(c2[5550],_mm_xor_si128(c2[1210],_mm_xor_si128(c2[4124],_mm_xor_si128(c2[3150],_mm_xor_si128(c2[3634],_mm_xor_si128(c2[2210],_mm_xor_si128(c2[7044],_mm_xor_si128(c2[4630],_mm_xor_si128(c2[5114],_mm_xor_si128(c2[5127],_mm_xor_si128(c2[7068],_mm_xor_si128(c2[7552],_mm_xor_si128(c2[6583],_mm_xor_si128(c2[7067],_mm_xor_si128(c2[7577],_mm_xor_si128(c2[7090],_mm_xor_si128(c2[7574],_mm_xor_si128(c2[4664],_mm_xor_si128(c2[5148],_mm_xor_si128(c2[3237],_mm_xor_si128(c2[1786],_mm_xor_si128(c2[6629],_mm_xor_si128(c2[7113],_mm_xor_si128(c2[6170],_mm_xor_si128(c2[1325],_mm_xor_si128(c2[4712],_mm_xor_si128(c2[5196],_mm_xor_si128(c2[3286],_mm_xor_si128(c2[1834],_mm_xor_si128(c2[2318],_mm_xor_si128(c2[3764],_mm_xor_si128(c2[4248],_mm_xor_si128(c2[888],_mm_xor_si128(c2[1858],_mm_xor_si128(c2[2342],_mm_xor_si128(c2[7664],_mm_xor_si128(c2[405],_mm_xor_si128(c2[4300],_mm_xor_si128(c2[5258],_mm_xor_si128(c2[1388],_mm_xor_si128(c2[1872],_mm_xor_si128(c2[443],_mm_xor_si128(c2[3828],_mm_xor_si128(c2[4312],_mm_xor_si128(c2[4803],_mm_xor_si128(c2[5287],_mm_xor_si128(c2[6754],_mm_xor_si128(c2[7729],_mm_xor_si128(c2[4825],c2[5309]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[22]=simde_mm_xor_si128(c2[3395],simde_mm_xor_si128(c2[6780],simde_mm_xor_si128(c2[971],simde_mm_xor_si128(c2[1455],simde_mm_xor_si128(c2[490],simde_mm_xor_si128(c2[974],simde_mm_xor_si128(c2[3417],simde_mm_xor_si128(c2[1476],simde_mm_xor_si128(c2[1960],simde_mm_xor_si128(c2[6803],simde_mm_xor_si128(c2[7287],simde_mm_xor_si128(c2[3926],simde_mm_xor_si128(c2[1015],simde_mm_xor_si128(c2[2466],simde_mm_xor_si128(c2[2950],simde_mm_xor_si128(c2[5399],simde_mm_xor_si128(c2[556],simde_mm_xor_si128(c2[553],simde_mm_xor_si128(c2[1037],simde_mm_xor_si128(c2[1550],simde_mm_xor_si128(c2[6871],simde_mm_xor_si128(c2[7355],simde_mm_xor_si128(c2[3482],simde_mm_xor_si128(c2[3966],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[6407],simde_mm_xor_si128(c2[7376],simde_mm_xor_si128(c2[117],simde_mm_xor_si128(c2[3046],simde_mm_xor_si128(c2[4493],simde_mm_xor_si128(c2[4977],simde_mm_xor_si128(c2[5464],simde_mm_xor_si128(c2[5948],simde_mm_xor_si128(c2[645],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[646],simde_mm_xor_si128(c2[3068],simde_mm_xor_si128(c2[3552],simde_mm_xor_si128(c2[7443],simde_mm_xor_si128(c2[6471],simde_mm_xor_si128(c2[6955],simde_mm_xor_si128(c2[5505],simde_mm_xor_si128(c2[5989],simde_mm_xor_si128(c2[2138],simde_mm_xor_si128(c2[1653],simde_mm_xor_si128(c2[3109],simde_mm_xor_si128(c2[3593],simde_mm_xor_si128(c2[5548],simde_mm_xor_si128(c2[7487],simde_mm_xor_si128(c2[228],simde_mm_xor_si128(c2[5066],simde_mm_xor_si128(c2[5550],simde_mm_xor_si128(c2[1210],simde_mm_xor_si128(c2[4124],simde_mm_xor_si128(c2[3150],simde_mm_xor_si128(c2[3634],simde_mm_xor_si128(c2[2210],simde_mm_xor_si128(c2[7044],simde_mm_xor_si128(c2[4630],simde_mm_xor_si128(c2[5114],simde_mm_xor_si128(c2[5127],simde_mm_xor_si128(c2[7068],simde_mm_xor_si128(c2[7552],simde_mm_xor_si128(c2[6583],simde_mm_xor_si128(c2[7067],simde_mm_xor_si128(c2[7577],simde_mm_xor_si128(c2[7090],simde_mm_xor_si128(c2[7574],simde_mm_xor_si128(c2[4664],simde_mm_xor_si128(c2[5148],simde_mm_xor_si128(c2[3237],simde_mm_xor_si128(c2[1786],simde_mm_xor_si128(c2[6629],simde_mm_xor_si128(c2[7113],simde_mm_xor_si128(c2[6170],simde_mm_xor_si128(c2[1325],simde_mm_xor_si128(c2[4712],simde_mm_xor_si128(c2[5196],simde_mm_xor_si128(c2[3286],simde_mm_xor_si128(c2[1834],simde_mm_xor_si128(c2[2318],simde_mm_xor_si128(c2[3764],simde_mm_xor_si128(c2[4248],simde_mm_xor_si128(c2[888],simde_mm_xor_si128(c2[1858],simde_mm_xor_si128(c2[2342],simde_mm_xor_si128(c2[7664],simde_mm_xor_si128(c2[405],simde_mm_xor_si128(c2[4300],simde_mm_xor_si128(c2[5258],simde_mm_xor_si128(c2[1388],simde_mm_xor_si128(c2[1872],simde_mm_xor_si128(c2[443],simde_mm_xor_si128(c2[3828],simde_mm_xor_si128(c2[4312],simde_mm_xor_si128(c2[4803],simde_mm_xor_si128(c2[5287],simde_mm_xor_si128(c2[6754],simde_mm_xor_si128(c2[7729],simde_mm_xor_si128(c2[4825],c2[5309]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 3
-     d2[33]=_mm_xor_si128(c2[3395],_mm_xor_si128(c2[6780],_mm_xor_si128(c2[1455],_mm_xor_si128(c2[490],_mm_xor_si128(c2[974],_mm_xor_si128(c2[3417],_mm_xor_si128(c2[1960],_mm_xor_si128(c2[6803],_mm_xor_si128(c2[7287],_mm_xor_si128(c2[3926],_mm_xor_si128(c2[1015],_mm_xor_si128(c2[2950],_mm_xor_si128(c2[5399],_mm_xor_si128(c2[556],_mm_xor_si128(c2[553],_mm_xor_si128(c2[1037],_mm_xor_si128(c2[1550],_mm_xor_si128(c2[7355],_mm_xor_si128(c2[3482],_mm_xor_si128(c2[3966],_mm_xor_si128(c2[112],_mm_xor_si128(c2[6407],_mm_xor_si128(c2[117],_mm_xor_si128(c2[3046],_mm_xor_si128(c2[4977],_mm_xor_si128(c2[5464],_mm_xor_si128(c2[5948],_mm_xor_si128(c2[645],_mm_xor_si128(c2[646],_mm_xor_si128(c2[3068],_mm_xor_si128(c2[3552],_mm_xor_si128(c2[7443],_mm_xor_si128(c2[6955],_mm_xor_si128(c2[5505],_mm_xor_si128(c2[5989],_mm_xor_si128(c2[2138],_mm_xor_si128(c2[1653],_mm_xor_si128(c2[3593],_mm_xor_si128(c2[5548],_mm_xor_si128(c2[228],_mm_xor_si128(c2[5066],_mm_xor_si128(c2[5550],_mm_xor_si128(c2[1210],_mm_xor_si128(c2[4124],_mm_xor_si128(c2[3150],_mm_xor_si128(c2[3634],_mm_xor_si128(c2[2210],_mm_xor_si128(c2[7044],_mm_xor_si128(c2[4630],_mm_xor_si128(c2[5114],_mm_xor_si128(c2[5127],_mm_xor_si128(c2[7552],_mm_xor_si128(c2[6583],_mm_xor_si128(c2[7067],_mm_xor_si128(c2[7577],_mm_xor_si128(c2[7574],_mm_xor_si128(c2[4664],_mm_xor_si128(c2[5148],_mm_xor_si128(c2[3237],_mm_xor_si128(c2[1786],_mm_xor_si128(c2[7113],_mm_xor_si128(c2[6170],_mm_xor_si128(c2[1325],_mm_xor_si128(c2[4712],_mm_xor_si128(c2[5196],_mm_xor_si128(c2[3286],_mm_xor_si128(c2[2318],_mm_xor_si128(c2[3764],_mm_xor_si128(c2[4248],_mm_xor_si128(c2[888],_mm_xor_si128(c2[2342],_mm_xor_si128(c2[7664],_mm_xor_si128(c2[405],_mm_xor_si128(c2[4300],_mm_xor_si128(c2[5258],_mm_xor_si128(c2[1872],_mm_xor_si128(c2[443],_mm_xor_si128(c2[4312],_mm_xor_si128(c2[4803],_mm_xor_si128(c2[5287],_mm_xor_si128(c2[6754],_mm_xor_si128(c2[7729],_mm_xor_si128(c2[4825],c2[5309])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[33]=simde_mm_xor_si128(c2[3395],simde_mm_xor_si128(c2[6780],simde_mm_xor_si128(c2[1455],simde_mm_xor_si128(c2[490],simde_mm_xor_si128(c2[974],simde_mm_xor_si128(c2[3417],simde_mm_xor_si128(c2[1960],simde_mm_xor_si128(c2[6803],simde_mm_xor_si128(c2[7287],simde_mm_xor_si128(c2[3926],simde_mm_xor_si128(c2[1015],simde_mm_xor_si128(c2[2950],simde_mm_xor_si128(c2[5399],simde_mm_xor_si128(c2[556],simde_mm_xor_si128(c2[553],simde_mm_xor_si128(c2[1037],simde_mm_xor_si128(c2[1550],simde_mm_xor_si128(c2[7355],simde_mm_xor_si128(c2[3482],simde_mm_xor_si128(c2[3966],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[6407],simde_mm_xor_si128(c2[117],simde_mm_xor_si128(c2[3046],simde_mm_xor_si128(c2[4977],simde_mm_xor_si128(c2[5464],simde_mm_xor_si128(c2[5948],simde_mm_xor_si128(c2[645],simde_mm_xor_si128(c2[646],simde_mm_xor_si128(c2[3068],simde_mm_xor_si128(c2[3552],simde_mm_xor_si128(c2[7443],simde_mm_xor_si128(c2[6955],simde_mm_xor_si128(c2[5505],simde_mm_xor_si128(c2[5989],simde_mm_xor_si128(c2[2138],simde_mm_xor_si128(c2[1653],simde_mm_xor_si128(c2[3593],simde_mm_xor_si128(c2[5548],simde_mm_xor_si128(c2[228],simde_mm_xor_si128(c2[5066],simde_mm_xor_si128(c2[5550],simde_mm_xor_si128(c2[1210],simde_mm_xor_si128(c2[4124],simde_mm_xor_si128(c2[3150],simde_mm_xor_si128(c2[3634],simde_mm_xor_si128(c2[2210],simde_mm_xor_si128(c2[7044],simde_mm_xor_si128(c2[4630],simde_mm_xor_si128(c2[5114],simde_mm_xor_si128(c2[5127],simde_mm_xor_si128(c2[7552],simde_mm_xor_si128(c2[6583],simde_mm_xor_si128(c2[7067],simde_mm_xor_si128(c2[7577],simde_mm_xor_si128(c2[7574],simde_mm_xor_si128(c2[4664],simde_mm_xor_si128(c2[5148],simde_mm_xor_si128(c2[3237],simde_mm_xor_si128(c2[1786],simde_mm_xor_si128(c2[7113],simde_mm_xor_si128(c2[6170],simde_mm_xor_si128(c2[1325],simde_mm_xor_si128(c2[4712],simde_mm_xor_si128(c2[5196],simde_mm_xor_si128(c2[3286],simde_mm_xor_si128(c2[2318],simde_mm_xor_si128(c2[3764],simde_mm_xor_si128(c2[4248],simde_mm_xor_si128(c2[888],simde_mm_xor_si128(c2[2342],simde_mm_xor_si128(c2[7664],simde_mm_xor_si128(c2[405],simde_mm_xor_si128(c2[4300],simde_mm_xor_si128(c2[5258],simde_mm_xor_si128(c2[1872],simde_mm_xor_si128(c2[443],simde_mm_xor_si128(c2[4312],simde_mm_xor_si128(c2[4803],simde_mm_xor_si128(c2[5287],simde_mm_xor_si128(c2[6754],simde_mm_xor_si128(c2[7729],simde_mm_xor_si128(c2[4825],c2[5309])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 4
-     d2[44]=_mm_xor_si128(c2[4842],c2[27]);
+     d2[44]=simde_mm_xor_si128(c2[4842],c2[27]);
 
 //row: 5
-     d2[55]=_mm_xor_si128(c2[487],_mm_xor_si128(c2[3872],_mm_xor_si128(c2[6301],_mm_xor_si128(c2[5809],_mm_xor_si128(c2[5326],_mm_xor_si128(c2[509],_mm_xor_si128(c2[6806],_mm_xor_si128(c2[4379],_mm_xor_si128(c2[998],_mm_xor_si128(c2[1018],_mm_xor_si128(c2[5861],_mm_xor_si128(c2[53],_mm_xor_si128(c2[2491],_mm_xor_si128(c2[5391],_mm_xor_si128(c2[5883],_mm_xor_si128(c2[5394],_mm_xor_si128(c2[6385],_mm_xor_si128(c2[4447],_mm_xor_si128(c2[1058],_mm_xor_si128(c2[4958],_mm_xor_si128(c2[3499],_mm_xor_si128(c2[4952],_mm_xor_si128(c2[138],_mm_xor_si128(c2[2069],_mm_xor_si128(c2[3040],_mm_xor_si128(c2[5480],_mm_xor_si128(c2[5481],_mm_xor_si128(c2[644],_mm_xor_si128(c2[4535],_mm_xor_si128(c2[4058],_mm_xor_si128(c2[3081],_mm_xor_si128(c2[6984],_mm_xor_si128(c2[6499],_mm_xor_si128(c2[685],_mm_xor_si128(c2[2640],_mm_xor_si128(c2[5063],_mm_xor_si128(c2[2642],_mm_xor_si128(c2[6056],_mm_xor_si128(c2[1216],_mm_xor_si128(c2[726],_mm_xor_si128(c2[7045],_mm_xor_si128(c2[4136],_mm_xor_si128(c2[2206],_mm_xor_si128(c2[1241],_mm_xor_si128(c2[2230],_mm_xor_si128(c2[4644],_mm_xor_si128(c2[4159],_mm_xor_si128(c2[4669],_mm_xor_si128(c2[4666],_mm_xor_si128(c2[2251],_mm_xor_si128(c2[340],_mm_xor_si128(c2[6632],_mm_xor_si128(c2[4205],_mm_xor_si128(c2[3262],_mm_xor_si128(c2[6160],_mm_xor_si128(c2[2288],_mm_xor_si128(c2[3747],_mm_xor_si128(c2[378],_mm_xor_si128(c2[7153],_mm_xor_si128(c2[1351],_mm_xor_si128(c2[5723],_mm_xor_si128(c2[7177],_mm_xor_si128(c2[5240],_mm_xor_si128(c2[1392],_mm_xor_si128(c2[2361],_mm_xor_si128(c2[6718],_mm_xor_si128(c2[5289],_mm_xor_si128(c2[1415],_mm_xor_si128(c2[2379],_mm_xor_si128(c2[3857],_mm_xor_si128(c2[4821],_mm_xor_si128(c2[2401],c2[1435]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[55]=simde_mm_xor_si128(c2[487],simde_mm_xor_si128(c2[3872],simde_mm_xor_si128(c2[6301],simde_mm_xor_si128(c2[5809],simde_mm_xor_si128(c2[5326],simde_mm_xor_si128(c2[509],simde_mm_xor_si128(c2[6806],simde_mm_xor_si128(c2[4379],simde_mm_xor_si128(c2[998],simde_mm_xor_si128(c2[1018],simde_mm_xor_si128(c2[5861],simde_mm_xor_si128(c2[53],simde_mm_xor_si128(c2[2491],simde_mm_xor_si128(c2[5391],simde_mm_xor_si128(c2[5883],simde_mm_xor_si128(c2[5394],simde_mm_xor_si128(c2[6385],simde_mm_xor_si128(c2[4447],simde_mm_xor_si128(c2[1058],simde_mm_xor_si128(c2[4958],simde_mm_xor_si128(c2[3499],simde_mm_xor_si128(c2[4952],simde_mm_xor_si128(c2[138],simde_mm_xor_si128(c2[2069],simde_mm_xor_si128(c2[3040],simde_mm_xor_si128(c2[5480],simde_mm_xor_si128(c2[5481],simde_mm_xor_si128(c2[644],simde_mm_xor_si128(c2[4535],simde_mm_xor_si128(c2[4058],simde_mm_xor_si128(c2[3081],simde_mm_xor_si128(c2[6984],simde_mm_xor_si128(c2[6499],simde_mm_xor_si128(c2[685],simde_mm_xor_si128(c2[2640],simde_mm_xor_si128(c2[5063],simde_mm_xor_si128(c2[2642],simde_mm_xor_si128(c2[6056],simde_mm_xor_si128(c2[1216],simde_mm_xor_si128(c2[726],simde_mm_xor_si128(c2[7045],simde_mm_xor_si128(c2[4136],simde_mm_xor_si128(c2[2206],simde_mm_xor_si128(c2[1241],simde_mm_xor_si128(c2[2230],simde_mm_xor_si128(c2[4644],simde_mm_xor_si128(c2[4159],simde_mm_xor_si128(c2[4669],simde_mm_xor_si128(c2[4666],simde_mm_xor_si128(c2[2251],simde_mm_xor_si128(c2[340],simde_mm_xor_si128(c2[6632],simde_mm_xor_si128(c2[4205],simde_mm_xor_si128(c2[3262],simde_mm_xor_si128(c2[6160],simde_mm_xor_si128(c2[2288],simde_mm_xor_si128(c2[3747],simde_mm_xor_si128(c2[378],simde_mm_xor_si128(c2[7153],simde_mm_xor_si128(c2[1351],simde_mm_xor_si128(c2[5723],simde_mm_xor_si128(c2[7177],simde_mm_xor_si128(c2[5240],simde_mm_xor_si128(c2[1392],simde_mm_xor_si128(c2[2361],simde_mm_xor_si128(c2[6718],simde_mm_xor_si128(c2[5289],simde_mm_xor_si128(c2[1415],simde_mm_xor_si128(c2[2379],simde_mm_xor_si128(c2[3857],simde_mm_xor_si128(c2[4821],simde_mm_xor_si128(c2[2401],c2[1435]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 6
-     d2[66]=_mm_xor_si128(c2[2911],_mm_xor_si128(c2[4492],_mm_xor_si128(c2[5069],_mm_xor_si128(c2[1700],_mm_xor_si128(c2[6104],_mm_xor_si128(c2[3287],_mm_xor_si128(c2[6210],c2[932])))))));
+     d2[66]=simde_mm_xor_si128(c2[2911],simde_mm_xor_si128(c2[4492],simde_mm_xor_si128(c2[5069],simde_mm_xor_si128(c2[1700],simde_mm_xor_si128(c2[6104],simde_mm_xor_si128(c2[3287],simde_mm_xor_si128(c2[6210],c2[932])))))));
 
 //row: 7
-     d2[77]=_mm_xor_si128(c2[1452],_mm_xor_si128(c2[3416],_mm_xor_si128(c2[91],_mm_xor_si128(c2[4517],_mm_xor_si128(c2[3566],c2[3698])))));
+     d2[77]=simde_mm_xor_si128(c2[1452],simde_mm_xor_si128(c2[3416],simde_mm_xor_si128(c2[91],simde_mm_xor_si128(c2[4517],simde_mm_xor_si128(c2[3566],c2[3698])))));
 
 //row: 8
-     d2[88]=_mm_xor_si128(c2[5],_mm_xor_si128(c2[5324],_mm_xor_si128(c2[3390],_mm_xor_si128(c2[977],_mm_xor_si128(c2[5808],_mm_xor_si128(c2[2911],_mm_xor_si128(c2[3395],_mm_xor_si128(c2[5327],_mm_xor_si128(c2[2430],_mm_xor_si128(c2[2914],_mm_xor_si128(c2[5818],_mm_xor_si128(c2[27],_mm_xor_si128(c2[5346],_mm_xor_si128(c2[6324],_mm_xor_si128(c2[3416],_mm_xor_si128(c2[3900],_mm_xor_si128(c2[3897],_mm_xor_si128(c2[1000],_mm_xor_si128(c2[1484],_mm_xor_si128(c2[5350],_mm_xor_si128(c2[536],_mm_xor_si128(c2[5855],_mm_xor_si128(c2[5368],_mm_xor_si128(c2[2955],_mm_xor_si128(c2[7314],_mm_xor_si128(c2[4406],_mm_xor_si128(c2[4890],_mm_xor_si128(c2[2009],_mm_xor_si128(c2[7328],_mm_xor_si128(c2[4909],_mm_xor_si128(c2[2496],_mm_xor_si128(c2[5390],_mm_xor_si128(c2[2493],_mm_xor_si128(c2[2977],_mm_xor_si128(c2[2971],_mm_xor_si128(c2[5903],_mm_xor_si128(c2[3479],_mm_xor_si128(c2[3965],_mm_xor_si128(c2[1057],_mm_xor_si128(c2[1541],_mm_xor_si128(c2[576],_mm_xor_si128(c2[5422],_mm_xor_si128(c2[5906],_mm_xor_si128(c2[4476],_mm_xor_si128(c2[2052],_mm_xor_si128(c2[3017],_mm_xor_si128(c2[604],_mm_xor_si128(c2[4470],_mm_xor_si128(c2[1562],_mm_xor_si128(c2[2046],_mm_xor_si128(c2[7399],_mm_xor_si128(c2[4975],_mm_xor_si128(c2[1587],_mm_xor_si128(c2[6433],_mm_xor_si128(c2[6917],_mm_xor_si128(c2[2558],_mm_xor_si128(c2[7393],_mm_xor_si128(c2[134],_mm_xor_si128(c2[4998],_mm_xor_si128(c2[2574],_mm_xor_si128(c2[4999],_mm_xor_si128(c2[2091],_mm_xor_si128(c2[2575],_mm_xor_si128(c2[162],_mm_xor_si128(c2[4997],_mm_xor_si128(c2[5481],_mm_xor_si128(c2[4053],_mm_xor_si128(c2[1629],_mm_xor_si128(c2[3565],_mm_xor_si128(c2[668],_mm_xor_si128(c2[1152],_mm_xor_si128(c2[2599],_mm_xor_si128(c2[7445],_mm_xor_si128(c2[186],_mm_xor_si128(c2[6491],_mm_xor_si128(c2[4078],_mm_xor_si128(c2[6006],_mm_xor_si128(c2[3593],_mm_xor_si128(c2[203],_mm_xor_si128(c2[5038],_mm_xor_si128(c2[5522],_mm_xor_si128(c2[2158],_mm_xor_si128(c2[7488],_mm_xor_si128(c2[4581],_mm_xor_si128(c2[1673],_mm_xor_si128(c2[2157],_mm_xor_si128(c2[2160],_mm_xor_si128(c2[7006],_mm_xor_si128(c2[7490],_mm_xor_si128(c2[5574],_mm_xor_si128(c2[3150],_mm_xor_si128(c2[734],_mm_xor_si128(c2[6053],_mm_xor_si128(c2[244],_mm_xor_si128(c2[5090],_mm_xor_si128(c2[5574],_mm_xor_si128(c2[6563],_mm_xor_si128(c2[4139],_mm_xor_si128(c2[3654],_mm_xor_si128(c2[1241],_mm_xor_si128(c2[1724],_mm_xor_si128(c2[6559],_mm_xor_si128(c2[7043],_mm_xor_si128(c2[4144],_mm_xor_si128(c2[1748],_mm_xor_si128(c2[7067],_mm_xor_si128(c2[4162],_mm_xor_si128(c2[1254],_mm_xor_si128(c2[1738],_mm_xor_si128(c2[3677],_mm_xor_si128(c2[780],_mm_xor_si128(c2[1264],_mm_xor_si128(c2[4187],_mm_xor_si128(c2[1763],_mm_xor_si128(c2[4184],_mm_xor_si128(c2[1276],_mm_xor_si128(c2[1760],_mm_xor_si128(c2[1769],_mm_xor_si128(c2[6604],_mm_xor_si128(c2[7088],_mm_xor_si128(c2[7590],_mm_xor_si128(c2[5177],_mm_xor_si128(c2[6139],_mm_xor_si128(c2[3726],_mm_xor_si128(c2[3723],_mm_xor_si128(c2[815],_mm_xor_si128(c2[1299],_mm_xor_si128(c2[2780],_mm_xor_si128(c2[356],_mm_xor_si128(c2[5678],_mm_xor_si128(c2[3265],_mm_xor_si128(c2[1806],_mm_xor_si128(c2[6652],_mm_xor_si128(c2[7136],_mm_xor_si128(c2[355],_mm_xor_si128(c2[7639],_mm_xor_si128(c2[5215],_mm_xor_si128(c2[6671],_mm_xor_si128(c2[3763],_mm_xor_si128(c2[4247],_mm_xor_si128(c2[858],_mm_xor_si128(c2[5704],_mm_xor_si128(c2[6188],_mm_xor_si128(c2[5241],_mm_xor_si128(c2[2817],_mm_xor_si128(c2[6695],_mm_xor_si128(c2[3787],_mm_xor_si128(c2[4271],_mm_xor_si128(c2[4758],_mm_xor_si128(c2[1850],_mm_xor_si128(c2[2334],_mm_xor_si128(c2[910],_mm_xor_si128(c2[6229],_mm_xor_si128(c2[1879],_mm_xor_si128(c2[7198],_mm_xor_si128(c2[6236],_mm_xor_si128(c2[3328],_mm_xor_si128(c2[3812],_mm_xor_si128(c2[903],_mm_xor_si128(c2[4796],_mm_xor_si128(c2[2383],_mm_xor_si128(c2[933],_mm_xor_si128(c2[5768],_mm_xor_si128(c2[6252],_mm_xor_si128(c2[1897],_mm_xor_si128(c2[6732],_mm_xor_si128(c2[7216],_mm_xor_si128(c2[3375],_mm_xor_si128(c2[951],_mm_xor_si128(c2[4339],_mm_xor_si128(c2[1915],_mm_xor_si128(c2[1919],_mm_xor_si128(c2[6754],_mm_xor_si128(c2[7238],c2[5789]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[88]=simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[5324],simde_mm_xor_si128(c2[3390],simde_mm_xor_si128(c2[977],simde_mm_xor_si128(c2[5808],simde_mm_xor_si128(c2[2911],simde_mm_xor_si128(c2[3395],simde_mm_xor_si128(c2[5327],simde_mm_xor_si128(c2[2430],simde_mm_xor_si128(c2[2914],simde_mm_xor_si128(c2[5818],simde_mm_xor_si128(c2[27],simde_mm_xor_si128(c2[5346],simde_mm_xor_si128(c2[6324],simde_mm_xor_si128(c2[3416],simde_mm_xor_si128(c2[3900],simde_mm_xor_si128(c2[3897],simde_mm_xor_si128(c2[1000],simde_mm_xor_si128(c2[1484],simde_mm_xor_si128(c2[5350],simde_mm_xor_si128(c2[536],simde_mm_xor_si128(c2[5855],simde_mm_xor_si128(c2[5368],simde_mm_xor_si128(c2[2955],simde_mm_xor_si128(c2[7314],simde_mm_xor_si128(c2[4406],simde_mm_xor_si128(c2[4890],simde_mm_xor_si128(c2[2009],simde_mm_xor_si128(c2[7328],simde_mm_xor_si128(c2[4909],simde_mm_xor_si128(c2[2496],simde_mm_xor_si128(c2[5390],simde_mm_xor_si128(c2[2493],simde_mm_xor_si128(c2[2977],simde_mm_xor_si128(c2[2971],simde_mm_xor_si128(c2[5903],simde_mm_xor_si128(c2[3479],simde_mm_xor_si128(c2[3965],simde_mm_xor_si128(c2[1057],simde_mm_xor_si128(c2[1541],simde_mm_xor_si128(c2[576],simde_mm_xor_si128(c2[5422],simde_mm_xor_si128(c2[5906],simde_mm_xor_si128(c2[4476],simde_mm_xor_si128(c2[2052],simde_mm_xor_si128(c2[3017],simde_mm_xor_si128(c2[604],simde_mm_xor_si128(c2[4470],simde_mm_xor_si128(c2[1562],simde_mm_xor_si128(c2[2046],simde_mm_xor_si128(c2[7399],simde_mm_xor_si128(c2[4975],simde_mm_xor_si128(c2[1587],simde_mm_xor_si128(c2[6433],simde_mm_xor_si128(c2[6917],simde_mm_xor_si128(c2[2558],simde_mm_xor_si128(c2[7393],simde_mm_xor_si128(c2[134],simde_mm_xor_si128(c2[4998],simde_mm_xor_si128(c2[2574],simde_mm_xor_si128(c2[4999],simde_mm_xor_si128(c2[2091],simde_mm_xor_si128(c2[2575],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[4997],simde_mm_xor_si128(c2[5481],simde_mm_xor_si128(c2[4053],simde_mm_xor_si128(c2[1629],simde_mm_xor_si128(c2[3565],simde_mm_xor_si128(c2[668],simde_mm_xor_si128(c2[1152],simde_mm_xor_si128(c2[2599],simde_mm_xor_si128(c2[7445],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[6491],simde_mm_xor_si128(c2[4078],simde_mm_xor_si128(c2[6006],simde_mm_xor_si128(c2[3593],simde_mm_xor_si128(c2[203],simde_mm_xor_si128(c2[5038],simde_mm_xor_si128(c2[5522],simde_mm_xor_si128(c2[2158],simde_mm_xor_si128(c2[7488],simde_mm_xor_si128(c2[4581],simde_mm_xor_si128(c2[1673],simde_mm_xor_si128(c2[2157],simde_mm_xor_si128(c2[2160],simde_mm_xor_si128(c2[7006],simde_mm_xor_si128(c2[7490],simde_mm_xor_si128(c2[5574],simde_mm_xor_si128(c2[3150],simde_mm_xor_si128(c2[734],simde_mm_xor_si128(c2[6053],simde_mm_xor_si128(c2[244],simde_mm_xor_si128(c2[5090],simde_mm_xor_si128(c2[5574],simde_mm_xor_si128(c2[6563],simde_mm_xor_si128(c2[4139],simde_mm_xor_si128(c2[3654],simde_mm_xor_si128(c2[1241],simde_mm_xor_si128(c2[1724],simde_mm_xor_si128(c2[6559],simde_mm_xor_si128(c2[7043],simde_mm_xor_si128(c2[4144],simde_mm_xor_si128(c2[1748],simde_mm_xor_si128(c2[7067],simde_mm_xor_si128(c2[4162],simde_mm_xor_si128(c2[1254],simde_mm_xor_si128(c2[1738],simde_mm_xor_si128(c2[3677],simde_mm_xor_si128(c2[780],simde_mm_xor_si128(c2[1264],simde_mm_xor_si128(c2[4187],simde_mm_xor_si128(c2[1763],simde_mm_xor_si128(c2[4184],simde_mm_xor_si128(c2[1276],simde_mm_xor_si128(c2[1760],simde_mm_xor_si128(c2[1769],simde_mm_xor_si128(c2[6604],simde_mm_xor_si128(c2[7088],simde_mm_xor_si128(c2[7590],simde_mm_xor_si128(c2[5177],simde_mm_xor_si128(c2[6139],simde_mm_xor_si128(c2[3726],simde_mm_xor_si128(c2[3723],simde_mm_xor_si128(c2[815],simde_mm_xor_si128(c2[1299],simde_mm_xor_si128(c2[2780],simde_mm_xor_si128(c2[356],simde_mm_xor_si128(c2[5678],simde_mm_xor_si128(c2[3265],simde_mm_xor_si128(c2[1806],simde_mm_xor_si128(c2[6652],simde_mm_xor_si128(c2[7136],simde_mm_xor_si128(c2[355],simde_mm_xor_si128(c2[7639],simde_mm_xor_si128(c2[5215],simde_mm_xor_si128(c2[6671],simde_mm_xor_si128(c2[3763],simde_mm_xor_si128(c2[4247],simde_mm_xor_si128(c2[858],simde_mm_xor_si128(c2[5704],simde_mm_xor_si128(c2[6188],simde_mm_xor_si128(c2[5241],simde_mm_xor_si128(c2[2817],simde_mm_xor_si128(c2[6695],simde_mm_xor_si128(c2[3787],simde_mm_xor_si128(c2[4271],simde_mm_xor_si128(c2[4758],simde_mm_xor_si128(c2[1850],simde_mm_xor_si128(c2[2334],simde_mm_xor_si128(c2[910],simde_mm_xor_si128(c2[6229],simde_mm_xor_si128(c2[1879],simde_mm_xor_si128(c2[7198],simde_mm_xor_si128(c2[6236],simde_mm_xor_si128(c2[3328],simde_mm_xor_si128(c2[3812],simde_mm_xor_si128(c2[903],simde_mm_xor_si128(c2[4796],simde_mm_xor_si128(c2[2383],simde_mm_xor_si128(c2[933],simde_mm_xor_si128(c2[5768],simde_mm_xor_si128(c2[6252],simde_mm_xor_si128(c2[1897],simde_mm_xor_si128(c2[6732],simde_mm_xor_si128(c2[7216],simde_mm_xor_si128(c2[3375],simde_mm_xor_si128(c2[951],simde_mm_xor_si128(c2[4339],simde_mm_xor_si128(c2[1915],simde_mm_xor_si128(c2[1919],simde_mm_xor_si128(c2[6754],simde_mm_xor_si128(c2[7238],c2[5789]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 9
-     d2[99]=_mm_xor_si128(c2[5817],_mm_xor_si128(c2[3899],_mm_xor_si128(c2[2647],_mm_xor_si128(c2[7508],_mm_xor_si128(c2[6099],_mm_xor_si128(c2[4255],_mm_xor_si128(c2[3785],c2[934])))))));
+     d2[99]=simde_mm_xor_si128(c2[5817],simde_mm_xor_si128(c2[3899],simde_mm_xor_si128(c2[2647],simde_mm_xor_si128(c2[7508],simde_mm_xor_si128(c2[6099],simde_mm_xor_si128(c2[4255],simde_mm_xor_si128(c2[3785],c2[934])))))));
 
 //row: 10
-     d2[110]=_mm_xor_si128(c2[7292],_mm_xor_si128(c2[6340],_mm_xor_si128(c2[5413],_mm_xor_si128(c2[3545],_mm_xor_si128(c2[663],c2[5637])))));
+     d2[110]=simde_mm_xor_si128(c2[7292],simde_mm_xor_si128(c2[6340],simde_mm_xor_si128(c2[5413],simde_mm_xor_si128(c2[3545],simde_mm_xor_si128(c2[663],c2[5637])))));
 
 //row: 11
-     d2[121]=_mm_xor_si128(c2[3878],_mm_xor_si128(c2[2912],_mm_xor_si128(c2[3396],_mm_xor_si128(c2[7263],_mm_xor_si128(c2[6781],_mm_xor_si128(c2[1938],_mm_xor_si128(c2[1456],_mm_xor_si128(c2[1457],_mm_xor_si128(c2[975],_mm_xor_si128(c2[4357],_mm_xor_si128(c2[3900],_mm_xor_si128(c2[2934],_mm_xor_si128(c2[3418],_mm_xor_si128(c2[2443],_mm_xor_si128(c2[1961],_mm_xor_si128(c2[27],_mm_xor_si128(c2[7288],_mm_xor_si128(c2[999],_mm_xor_si128(c2[4409],_mm_xor_si128(c2[3432],_mm_xor_si128(c2[3916],_mm_xor_si128(c2[1498],_mm_xor_si128(c2[1016],_mm_xor_si128(c2[3433],_mm_xor_si128(c2[2951],_mm_xor_si128(c2[5882],_mm_xor_si128(c2[4916],_mm_xor_si128(c2[5400],_mm_xor_si128(c2[1039],_mm_xor_si128(c2[557],_mm_xor_si128(c2[1520],_mm_xor_si128(c2[1038],_mm_xor_si128(c2[2033],_mm_xor_si128(c2[1540],_mm_xor_si128(c2[95],_mm_xor_si128(c2[7356],_mm_xor_si128(c2[4449],_mm_xor_si128(c2[3967],_mm_xor_si128(c2[595],_mm_xor_si128(c2[7372],_mm_xor_si128(c2[113],_mm_xor_si128(c2[6890],_mm_xor_si128(c2[6408],_mm_xor_si128(c2[600],_mm_xor_si128(c2[118],_mm_xor_si128(c2[3529],_mm_xor_si128(c2[2552],_mm_xor_si128(c2[3036],_mm_xor_si128(c2[5460],_mm_xor_si128(c2[4978],_mm_xor_si128(c2[6431],_mm_xor_si128(c2[5949],_mm_xor_si128(c2[1128],_mm_xor_si128(c2[646],_mm_xor_si128(c2[1129],_mm_xor_si128(c2[647],_mm_xor_si128(c2[4035],_mm_xor_si128(c2[3542],_mm_xor_si128(c2[183],_mm_xor_si128(c2[7444],_mm_xor_si128(c2[7438],_mm_xor_si128(c2[6956],_mm_xor_si128(c2[6472],_mm_xor_si128(c2[5990],_mm_xor_si128(c2[2621],_mm_xor_si128(c2[1655],_mm_xor_si128(c2[2139],_mm_xor_si128(c2[2136],_mm_xor_si128(c2[1654],_mm_xor_si128(c2[4076],_mm_xor_si128(c2[3594],_mm_xor_si128(c2[6031],_mm_xor_si128(c2[5065],_mm_xor_si128(c2[5549],_mm_xor_si128(c2[711],_mm_xor_si128(c2[229],_mm_xor_si128(c2[6033],_mm_xor_si128(c2[5551],_mm_xor_si128(c2[1704],_mm_xor_si128(c2[727],_mm_xor_si128(c2[1211],_mm_xor_si128(c2[4607],_mm_xor_si128(c2[4114],_mm_xor_si128(c2[4117],_mm_xor_si128(c2[3635],_mm_xor_si128(c2[2693],_mm_xor_si128(c2[1716],_mm_xor_si128(c2[2200],_mm_xor_si128(c2[7527],_mm_xor_si128(c2[7045],_mm_xor_si128(c2[5597],_mm_xor_si128(c2[5104],_mm_xor_si128(c2[4137],_mm_xor_si128(c2[5610],_mm_xor_si128(c2[4644],_mm_xor_si128(c2[5128],_mm_xor_si128(c2[292],_mm_xor_si128(c2[7553],_mm_xor_si128(c2[7550],_mm_xor_si128(c2[7068],_mm_xor_si128(c2[317],_mm_xor_si128(c2[7578],_mm_xor_si128(c2[314],_mm_xor_si128(c2[7575],_mm_xor_si128(c2[5642],_mm_xor_si128(c2[5149],_mm_xor_si128(c2[3720],_mm_xor_si128(c2[2754],_mm_xor_si128(c2[3238],_mm_xor_si128(c2[2269],_mm_xor_si128(c2[1787],_mm_xor_si128(c2[7596],_mm_xor_si128(c2[7114],_mm_xor_si128(c2[6653],_mm_xor_si128(c2[5676],_mm_xor_si128(c2[6160],_mm_xor_si128(c2[1808],_mm_xor_si128(c2[1326],_mm_xor_si128(c2[5679],_mm_xor_si128(c2[5197],_mm_xor_si128(c2[7622],_mm_xor_si128(c2[3769],_mm_xor_si128(c2[3287],_mm_xor_si128(c2[2801],_mm_xor_si128(c2[2319],_mm_xor_si128(c2[4731],_mm_xor_si128(c2[4249],_mm_xor_si128(c2[1371],_mm_xor_si128(c2[405],_mm_xor_si128(c2[889],_mm_xor_si128(c2[2825],_mm_xor_si128(c2[2332],_mm_xor_si128(c2[888],_mm_xor_si128(c2[406],_mm_xor_si128(c2[4783],_mm_xor_si128(c2[3806],_mm_xor_si128(c2[4290],_mm_xor_si128(c2[5752],_mm_xor_si128(c2[5259],_mm_xor_si128(c2[2355],_mm_xor_si128(c2[1873],_mm_xor_si128(c2[926],_mm_xor_si128(c2[7703],_mm_xor_si128(c2[444],_mm_xor_si128(c2[4806],_mm_xor_si128(c2[4313],_mm_xor_si128(c2[5770],_mm_xor_si128(c2[5288],_mm_xor_si128(c2[7248],_mm_xor_si128(c2[6271],_mm_xor_si128(c2[6755],_mm_xor_si128(c2[469],_mm_xor_si128(c2[7730],_mm_xor_si128(c2[5792],_mm_xor_si128(c2[5310],c2[3376])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[121]=simde_mm_xor_si128(c2[3878],simde_mm_xor_si128(c2[2912],simde_mm_xor_si128(c2[3396],simde_mm_xor_si128(c2[7263],simde_mm_xor_si128(c2[6781],simde_mm_xor_si128(c2[1938],simde_mm_xor_si128(c2[1456],simde_mm_xor_si128(c2[1457],simde_mm_xor_si128(c2[975],simde_mm_xor_si128(c2[4357],simde_mm_xor_si128(c2[3900],simde_mm_xor_si128(c2[2934],simde_mm_xor_si128(c2[3418],simde_mm_xor_si128(c2[2443],simde_mm_xor_si128(c2[1961],simde_mm_xor_si128(c2[27],simde_mm_xor_si128(c2[7288],simde_mm_xor_si128(c2[999],simde_mm_xor_si128(c2[4409],simde_mm_xor_si128(c2[3432],simde_mm_xor_si128(c2[3916],simde_mm_xor_si128(c2[1498],simde_mm_xor_si128(c2[1016],simde_mm_xor_si128(c2[3433],simde_mm_xor_si128(c2[2951],simde_mm_xor_si128(c2[5882],simde_mm_xor_si128(c2[4916],simde_mm_xor_si128(c2[5400],simde_mm_xor_si128(c2[1039],simde_mm_xor_si128(c2[557],simde_mm_xor_si128(c2[1520],simde_mm_xor_si128(c2[1038],simde_mm_xor_si128(c2[2033],simde_mm_xor_si128(c2[1540],simde_mm_xor_si128(c2[95],simde_mm_xor_si128(c2[7356],simde_mm_xor_si128(c2[4449],simde_mm_xor_si128(c2[3967],simde_mm_xor_si128(c2[595],simde_mm_xor_si128(c2[7372],simde_mm_xor_si128(c2[113],simde_mm_xor_si128(c2[6890],simde_mm_xor_si128(c2[6408],simde_mm_xor_si128(c2[600],simde_mm_xor_si128(c2[118],simde_mm_xor_si128(c2[3529],simde_mm_xor_si128(c2[2552],simde_mm_xor_si128(c2[3036],simde_mm_xor_si128(c2[5460],simde_mm_xor_si128(c2[4978],simde_mm_xor_si128(c2[6431],simde_mm_xor_si128(c2[5949],simde_mm_xor_si128(c2[1128],simde_mm_xor_si128(c2[646],simde_mm_xor_si128(c2[1129],simde_mm_xor_si128(c2[647],simde_mm_xor_si128(c2[4035],simde_mm_xor_si128(c2[3542],simde_mm_xor_si128(c2[183],simde_mm_xor_si128(c2[7444],simde_mm_xor_si128(c2[7438],simde_mm_xor_si128(c2[6956],simde_mm_xor_si128(c2[6472],simde_mm_xor_si128(c2[5990],simde_mm_xor_si128(c2[2621],simde_mm_xor_si128(c2[1655],simde_mm_xor_si128(c2[2139],simde_mm_xor_si128(c2[2136],simde_mm_xor_si128(c2[1654],simde_mm_xor_si128(c2[4076],simde_mm_xor_si128(c2[3594],simde_mm_xor_si128(c2[6031],simde_mm_xor_si128(c2[5065],simde_mm_xor_si128(c2[5549],simde_mm_xor_si128(c2[711],simde_mm_xor_si128(c2[229],simde_mm_xor_si128(c2[6033],simde_mm_xor_si128(c2[5551],simde_mm_xor_si128(c2[1704],simde_mm_xor_si128(c2[727],simde_mm_xor_si128(c2[1211],simde_mm_xor_si128(c2[4607],simde_mm_xor_si128(c2[4114],simde_mm_xor_si128(c2[4117],simde_mm_xor_si128(c2[3635],simde_mm_xor_si128(c2[2693],simde_mm_xor_si128(c2[1716],simde_mm_xor_si128(c2[2200],simde_mm_xor_si128(c2[7527],simde_mm_xor_si128(c2[7045],simde_mm_xor_si128(c2[5597],simde_mm_xor_si128(c2[5104],simde_mm_xor_si128(c2[4137],simde_mm_xor_si128(c2[5610],simde_mm_xor_si128(c2[4644],simde_mm_xor_si128(c2[5128],simde_mm_xor_si128(c2[292],simde_mm_xor_si128(c2[7553],simde_mm_xor_si128(c2[7550],simde_mm_xor_si128(c2[7068],simde_mm_xor_si128(c2[317],simde_mm_xor_si128(c2[7578],simde_mm_xor_si128(c2[314],simde_mm_xor_si128(c2[7575],simde_mm_xor_si128(c2[5642],simde_mm_xor_si128(c2[5149],simde_mm_xor_si128(c2[3720],simde_mm_xor_si128(c2[2754],simde_mm_xor_si128(c2[3238],simde_mm_xor_si128(c2[2269],simde_mm_xor_si128(c2[1787],simde_mm_xor_si128(c2[7596],simde_mm_xor_si128(c2[7114],simde_mm_xor_si128(c2[6653],simde_mm_xor_si128(c2[5676],simde_mm_xor_si128(c2[6160],simde_mm_xor_si128(c2[1808],simde_mm_xor_si128(c2[1326],simde_mm_xor_si128(c2[5679],simde_mm_xor_si128(c2[5197],simde_mm_xor_si128(c2[7622],simde_mm_xor_si128(c2[3769],simde_mm_xor_si128(c2[3287],simde_mm_xor_si128(c2[2801],simde_mm_xor_si128(c2[2319],simde_mm_xor_si128(c2[4731],simde_mm_xor_si128(c2[4249],simde_mm_xor_si128(c2[1371],simde_mm_xor_si128(c2[405],simde_mm_xor_si128(c2[889],simde_mm_xor_si128(c2[2825],simde_mm_xor_si128(c2[2332],simde_mm_xor_si128(c2[888],simde_mm_xor_si128(c2[406],simde_mm_xor_si128(c2[4783],simde_mm_xor_si128(c2[3806],simde_mm_xor_si128(c2[4290],simde_mm_xor_si128(c2[5752],simde_mm_xor_si128(c2[5259],simde_mm_xor_si128(c2[2355],simde_mm_xor_si128(c2[1873],simde_mm_xor_si128(c2[926],simde_mm_xor_si128(c2[7703],simde_mm_xor_si128(c2[444],simde_mm_xor_si128(c2[4806],simde_mm_xor_si128(c2[4313],simde_mm_xor_si128(c2[5770],simde_mm_xor_si128(c2[5288],simde_mm_xor_si128(c2[7248],simde_mm_xor_si128(c2[6271],simde_mm_xor_si128(c2[6755],simde_mm_xor_si128(c2[469],simde_mm_xor_si128(c2[7730],simde_mm_xor_si128(c2[5792],simde_mm_xor_si128(c2[5310],c2[3376])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 12
-     d2[132]=_mm_xor_si128(c2[5331],_mm_xor_si128(c2[4380],_mm_xor_si128(c2[7004],_mm_xor_si128(c2[7024],_mm_xor_si128(c2[286],c2[6208])))));
+     d2[132]=simde_mm_xor_si128(c2[5331],simde_mm_xor_si128(c2[4380],simde_mm_xor_si128(c2[7004],simde_mm_xor_si128(c2[7024],simde_mm_xor_si128(c2[286],c2[6208])))));
 
 //row: 13
-     d2[143]=_mm_xor_si128(c2[3389],_mm_xor_si128(c2[3873],_mm_xor_si128(c2[7269],_mm_xor_si128(c2[1944],_mm_xor_si128(c2[1452],_mm_xor_si128(c2[3396],_mm_xor_si128(c2[3411],_mm_xor_si128(c2[3895],_mm_xor_si128(c2[2449],_mm_xor_si128(c2[22],_mm_xor_si128(c2[3920],_mm_xor_si128(c2[4404],_mm_xor_si128(c2[1504],_mm_xor_si128(c2[3439],_mm_xor_si128(c2[5393],_mm_xor_si128(c2[5877],_mm_xor_si128(c2[1034],_mm_xor_si128(c2[1526],_mm_xor_si128(c2[5394],_mm_xor_si128(c2[2028],_mm_xor_si128(c2[90],_mm_xor_si128(c2[4444],_mm_xor_si128(c2[117],_mm_xor_si128(c2[601],_mm_xor_si128(c2[6896],_mm_xor_si128(c2[595],_mm_xor_si128(c2[3040],_mm_xor_si128(c2[3524],_mm_xor_si128(c2[5466],_mm_xor_si128(c2[6426],_mm_xor_si128(c2[1123],_mm_xor_si128(c2[1124],_mm_xor_si128(c2[4030],_mm_xor_si128(c2[4515],_mm_xor_si128(c2[178],_mm_xor_si128(c2[7444],_mm_xor_si128(c2[6478],_mm_xor_si128(c2[2143],_mm_xor_si128(c2[2627],_mm_xor_si128(c2[2142],_mm_xor_si128(c2[4071],_mm_xor_si128(c2[5553],_mm_xor_si128(c2[6037],_mm_xor_si128(c2[706],_mm_xor_si128(c2[6028],_mm_xor_si128(c2[1215],_mm_xor_si128(c2[1699],_mm_xor_si128(c2[4602],_mm_xor_si128(c2[4123],_mm_xor_si128(c2[2204],_mm_xor_si128(c2[2688],_mm_xor_si128(c2[7533],_mm_xor_si128(c2[5592],_mm_xor_si128(c2[5132],_mm_xor_si128(c2[5616],_mm_xor_si128(c2[287],_mm_xor_si128(c2[7556],_mm_xor_si128(c2[312],_mm_xor_si128(c2[309],_mm_xor_si128(c2[5637],_mm_xor_si128(c2[3242],_mm_xor_si128(c2[3726],_mm_xor_si128(c2[2275],_mm_xor_si128(c2[7591],_mm_xor_si128(c2[6164],_mm_xor_si128(c2[6648],_mm_xor_si128(c2[1814],_mm_xor_si128(c2[5685],_mm_xor_si128(c2[3764],_mm_xor_si128(c2[2796],_mm_xor_si128(c2[4737],_mm_xor_si128(c2[882],_mm_xor_si128(c2[1366],_mm_xor_si128(c2[2820],_mm_xor_si128(c2[883],_mm_xor_si128(c2[4294],_mm_xor_si128(c2[4778],_mm_xor_si128(c2[5747],_mm_xor_si128(c2[2361],_mm_xor_si128(c2[448],_mm_xor_si128(c2[932],_mm_xor_si128(c2[4801],_mm_xor_si128(c2[5765],_mm_xor_si128(c2[7221],_mm_xor_si128(c2[6759],_mm_xor_si128(c2[7243],_mm_xor_si128(c2[464],c2[5787])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[143]=simde_mm_xor_si128(c2[3389],simde_mm_xor_si128(c2[3873],simde_mm_xor_si128(c2[7269],simde_mm_xor_si128(c2[1944],simde_mm_xor_si128(c2[1452],simde_mm_xor_si128(c2[3396],simde_mm_xor_si128(c2[3411],simde_mm_xor_si128(c2[3895],simde_mm_xor_si128(c2[2449],simde_mm_xor_si128(c2[22],simde_mm_xor_si128(c2[3920],simde_mm_xor_si128(c2[4404],simde_mm_xor_si128(c2[1504],simde_mm_xor_si128(c2[3439],simde_mm_xor_si128(c2[5393],simde_mm_xor_si128(c2[5877],simde_mm_xor_si128(c2[1034],simde_mm_xor_si128(c2[1526],simde_mm_xor_si128(c2[5394],simde_mm_xor_si128(c2[2028],simde_mm_xor_si128(c2[90],simde_mm_xor_si128(c2[4444],simde_mm_xor_si128(c2[117],simde_mm_xor_si128(c2[601],simde_mm_xor_si128(c2[6896],simde_mm_xor_si128(c2[595],simde_mm_xor_si128(c2[3040],simde_mm_xor_si128(c2[3524],simde_mm_xor_si128(c2[5466],simde_mm_xor_si128(c2[6426],simde_mm_xor_si128(c2[1123],simde_mm_xor_si128(c2[1124],simde_mm_xor_si128(c2[4030],simde_mm_xor_si128(c2[4515],simde_mm_xor_si128(c2[178],simde_mm_xor_si128(c2[7444],simde_mm_xor_si128(c2[6478],simde_mm_xor_si128(c2[2143],simde_mm_xor_si128(c2[2627],simde_mm_xor_si128(c2[2142],simde_mm_xor_si128(c2[4071],simde_mm_xor_si128(c2[5553],simde_mm_xor_si128(c2[6037],simde_mm_xor_si128(c2[706],simde_mm_xor_si128(c2[6028],simde_mm_xor_si128(c2[1215],simde_mm_xor_si128(c2[1699],simde_mm_xor_si128(c2[4602],simde_mm_xor_si128(c2[4123],simde_mm_xor_si128(c2[2204],simde_mm_xor_si128(c2[2688],simde_mm_xor_si128(c2[7533],simde_mm_xor_si128(c2[5592],simde_mm_xor_si128(c2[5132],simde_mm_xor_si128(c2[5616],simde_mm_xor_si128(c2[287],simde_mm_xor_si128(c2[7556],simde_mm_xor_si128(c2[312],simde_mm_xor_si128(c2[309],simde_mm_xor_si128(c2[5637],simde_mm_xor_si128(c2[3242],simde_mm_xor_si128(c2[3726],simde_mm_xor_si128(c2[2275],simde_mm_xor_si128(c2[7591],simde_mm_xor_si128(c2[6164],simde_mm_xor_si128(c2[6648],simde_mm_xor_si128(c2[1814],simde_mm_xor_si128(c2[5685],simde_mm_xor_si128(c2[3764],simde_mm_xor_si128(c2[2796],simde_mm_xor_si128(c2[4737],simde_mm_xor_si128(c2[882],simde_mm_xor_si128(c2[1366],simde_mm_xor_si128(c2[2820],simde_mm_xor_si128(c2[883],simde_mm_xor_si128(c2[4294],simde_mm_xor_si128(c2[4778],simde_mm_xor_si128(c2[5747],simde_mm_xor_si128(c2[2361],simde_mm_xor_si128(c2[448],simde_mm_xor_si128(c2[932],simde_mm_xor_si128(c2[4801],simde_mm_xor_si128(c2[5765],simde_mm_xor_si128(c2[7221],simde_mm_xor_si128(c2[6759],simde_mm_xor_si128(c2[7243],simde_mm_xor_si128(c2[464],c2[5787])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 14
-     d2[154]=_mm_xor_si128(c2[2905],_mm_xor_si128(c2[1241],_mm_xor_si128(c2[2756],_mm_xor_si128(c2[6169],_mm_xor_si128(c2[1346],c2[7242])))));
+     d2[154]=simde_mm_xor_si128(c2[2905],simde_mm_xor_si128(c2[1241],simde_mm_xor_si128(c2[2756],simde_mm_xor_si128(c2[6169],simde_mm_xor_si128(c2[1346],c2[7242])))));
 
 //row: 15
-     d2[165]=_mm_xor_si128(c2[3875],_mm_xor_si128(c2[7260],_mm_xor_si128(c2[1946],_mm_xor_si128(c2[970],_mm_xor_si128(c2[1454],_mm_xor_si128(c2[0],_mm_xor_si128(c2[3897],_mm_xor_si128(c2[2451],_mm_xor_si128(c2[7283],_mm_xor_si128(c2[24],_mm_xor_si128(c2[5840],_mm_xor_si128(c2[4406],_mm_xor_si128(c2[1506],_mm_xor_si128(c2[3441],_mm_xor_si128(c2[5879],_mm_xor_si128(c2[1036],_mm_xor_si128(c2[1044],_mm_xor_si128(c2[1528],_mm_xor_si128(c2[2030],_mm_xor_si128(c2[92],_mm_xor_si128(c2[3962],_mm_xor_si128(c2[4446],_mm_xor_si128(c2[603],_mm_xor_si128(c2[6887],_mm_xor_si128(c2[597],_mm_xor_si128(c2[3526],_mm_xor_si128(c2[5457],_mm_xor_si128(c2[5944],_mm_xor_si128(c2[6428],_mm_xor_si128(c2[1125],_mm_xor_si128(c2[1126],_mm_xor_si128(c2[3548],_mm_xor_si128(c2[4032],_mm_xor_si128(c2[180],_mm_xor_si128(c2[7446],_mm_xor_si128(c2[5985],_mm_xor_si128(c2[6469],_mm_xor_si128(c2[2618],_mm_xor_si128(c2[2144],_mm_xor_si128(c2[4073],_mm_xor_si128(c2[6028],_mm_xor_si128(c2[708],_mm_xor_si128(c2[5546],_mm_xor_si128(c2[6030],_mm_xor_si128(c2[7480],_mm_xor_si128(c2[1701],_mm_xor_si128(c2[4604],_mm_xor_si128(c2[3630],_mm_xor_si128(c2[4114],_mm_xor_si128(c2[2690],_mm_xor_si128(c2[7524],_mm_xor_si128(c2[5110],_mm_xor_si128(c2[5594],_mm_xor_si128(c2[5618],_mm_xor_si128(c2[289],_mm_xor_si128(c2[7063],_mm_xor_si128(c2[7547],_mm_xor_si128(c2[775],_mm_xor_si128(c2[314],_mm_xor_si128(c2[311],_mm_xor_si128(c2[5155],_mm_xor_si128(c2[5639],_mm_xor_si128(c2[3728],_mm_xor_si128(c2[2266],_mm_xor_si128(c2[7593],_mm_xor_si128(c2[6650],_mm_xor_si128(c2[1805],_mm_xor_si128(c2[5192],_mm_xor_si128(c2[5676],_mm_xor_si128(c2[3766],_mm_xor_si128(c2[2798],_mm_xor_si128(c2[4255],_mm_xor_si128(c2[4739],_mm_xor_si128(c2[1368],_mm_xor_si128(c2[2822],_mm_xor_si128(c2[401],_mm_xor_si128(c2[885],_mm_xor_si128(c2[396],_mm_xor_si128(c2[4780],_mm_xor_si128(c2[5749],_mm_xor_si128(c2[2363],_mm_xor_si128(c2[934],_mm_xor_si128(c2[4803],_mm_xor_si128(c2[5283],_mm_xor_si128(c2[5767],_mm_xor_si128(c2[7245],_mm_xor_si128(c2[466],_mm_xor_si128(c2[5305],c2[5789]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[165]=simde_mm_xor_si128(c2[3875],simde_mm_xor_si128(c2[7260],simde_mm_xor_si128(c2[1946],simde_mm_xor_si128(c2[970],simde_mm_xor_si128(c2[1454],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[3897],simde_mm_xor_si128(c2[2451],simde_mm_xor_si128(c2[7283],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[5840],simde_mm_xor_si128(c2[4406],simde_mm_xor_si128(c2[1506],simde_mm_xor_si128(c2[3441],simde_mm_xor_si128(c2[5879],simde_mm_xor_si128(c2[1036],simde_mm_xor_si128(c2[1044],simde_mm_xor_si128(c2[1528],simde_mm_xor_si128(c2[2030],simde_mm_xor_si128(c2[92],simde_mm_xor_si128(c2[3962],simde_mm_xor_si128(c2[4446],simde_mm_xor_si128(c2[603],simde_mm_xor_si128(c2[6887],simde_mm_xor_si128(c2[597],simde_mm_xor_si128(c2[3526],simde_mm_xor_si128(c2[5457],simde_mm_xor_si128(c2[5944],simde_mm_xor_si128(c2[6428],simde_mm_xor_si128(c2[1125],simde_mm_xor_si128(c2[1126],simde_mm_xor_si128(c2[3548],simde_mm_xor_si128(c2[4032],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[7446],simde_mm_xor_si128(c2[5985],simde_mm_xor_si128(c2[6469],simde_mm_xor_si128(c2[2618],simde_mm_xor_si128(c2[2144],simde_mm_xor_si128(c2[4073],simde_mm_xor_si128(c2[6028],simde_mm_xor_si128(c2[708],simde_mm_xor_si128(c2[5546],simde_mm_xor_si128(c2[6030],simde_mm_xor_si128(c2[7480],simde_mm_xor_si128(c2[1701],simde_mm_xor_si128(c2[4604],simde_mm_xor_si128(c2[3630],simde_mm_xor_si128(c2[4114],simde_mm_xor_si128(c2[2690],simde_mm_xor_si128(c2[7524],simde_mm_xor_si128(c2[5110],simde_mm_xor_si128(c2[5594],simde_mm_xor_si128(c2[5618],simde_mm_xor_si128(c2[289],simde_mm_xor_si128(c2[7063],simde_mm_xor_si128(c2[7547],simde_mm_xor_si128(c2[775],simde_mm_xor_si128(c2[314],simde_mm_xor_si128(c2[311],simde_mm_xor_si128(c2[5155],simde_mm_xor_si128(c2[5639],simde_mm_xor_si128(c2[3728],simde_mm_xor_si128(c2[2266],simde_mm_xor_si128(c2[7593],simde_mm_xor_si128(c2[6650],simde_mm_xor_si128(c2[1805],simde_mm_xor_si128(c2[5192],simde_mm_xor_si128(c2[5676],simde_mm_xor_si128(c2[3766],simde_mm_xor_si128(c2[2798],simde_mm_xor_si128(c2[4255],simde_mm_xor_si128(c2[4739],simde_mm_xor_si128(c2[1368],simde_mm_xor_si128(c2[2822],simde_mm_xor_si128(c2[401],simde_mm_xor_si128(c2[885],simde_mm_xor_si128(c2[396],simde_mm_xor_si128(c2[4780],simde_mm_xor_si128(c2[5749],simde_mm_xor_si128(c2[2363],simde_mm_xor_si128(c2[934],simde_mm_xor_si128(c2[4803],simde_mm_xor_si128(c2[5283],simde_mm_xor_si128(c2[5767],simde_mm_xor_si128(c2[7245],simde_mm_xor_si128(c2[466],simde_mm_xor_si128(c2[5305],c2[5789]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 16
-     d2[176]=_mm_xor_si128(c2[3875],_mm_xor_si128(c2[7260],_mm_xor_si128(c2[1946],_mm_xor_si128(c2[1454],_mm_xor_si128(c2[3897],_mm_xor_si128(c2[2451],_mm_xor_si128(c2[24],_mm_xor_si128(c2[6798],_mm_xor_si128(c2[4406],_mm_xor_si128(c2[1506],_mm_xor_si128(c2[3441],_mm_xor_si128(c2[5879],_mm_xor_si128(c2[1036],_mm_xor_si128(c2[1528],_mm_xor_si128(c2[2493],_mm_xor_si128(c2[2030],_mm_xor_si128(c2[92],_mm_xor_si128(c2[4446],_mm_xor_si128(c2[603],_mm_xor_si128(c2[6887],_mm_xor_si128(c2[597],_mm_xor_si128(c2[3526],_mm_xor_si128(c2[5457],_mm_xor_si128(c2[6428],_mm_xor_si128(c2[1125],_mm_xor_si128(c2[1126],_mm_xor_si128(c2[4032],_mm_xor_si128(c2[180],_mm_xor_si128(c2[7446],_mm_xor_si128(c2[6469],_mm_xor_si128(c2[2618],_mm_xor_si128(c2[2144],_mm_xor_si128(c2[4073],_mm_xor_si128(c2[6028],_mm_xor_si128(c2[708],_mm_xor_si128(c2[6030],_mm_xor_si128(c2[1701],_mm_xor_si128(c2[4604],_mm_xor_si128(c2[4114],_mm_xor_si128(c2[6059],_mm_xor_si128(c2[2690],_mm_xor_si128(c2[7524],_mm_xor_si128(c2[5594],_mm_xor_si128(c2[5618],_mm_xor_si128(c2[289],_mm_xor_si128(c2[7547],_mm_xor_si128(c2[314],_mm_xor_si128(c2[311],_mm_xor_si128(c2[5639],_mm_xor_si128(c2[3728],_mm_xor_si128(c2[2266],_mm_xor_si128(c2[7593],_mm_xor_si128(c2[6650],_mm_xor_si128(c2[1805],_mm_xor_si128(c2[5676],_mm_xor_si128(c2[3766],_mm_xor_si128(c2[2798],_mm_xor_si128(c2[4739],_mm_xor_si128(c2[1368],_mm_xor_si128(c2[2822],_mm_xor_si128(c2[885],_mm_xor_si128(c2[4780],_mm_xor_si128(c2[5749],_mm_xor_si128(c2[2363],_mm_xor_si128(c2[934],_mm_xor_si128(c2[4803],_mm_xor_si128(c2[5767],_mm_xor_si128(c2[5773],_mm_xor_si128(c2[7245],_mm_xor_si128(c2[466],c2[5789]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[176]=simde_mm_xor_si128(c2[3875],simde_mm_xor_si128(c2[7260],simde_mm_xor_si128(c2[1946],simde_mm_xor_si128(c2[1454],simde_mm_xor_si128(c2[3897],simde_mm_xor_si128(c2[2451],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[6798],simde_mm_xor_si128(c2[4406],simde_mm_xor_si128(c2[1506],simde_mm_xor_si128(c2[3441],simde_mm_xor_si128(c2[5879],simde_mm_xor_si128(c2[1036],simde_mm_xor_si128(c2[1528],simde_mm_xor_si128(c2[2493],simde_mm_xor_si128(c2[2030],simde_mm_xor_si128(c2[92],simde_mm_xor_si128(c2[4446],simde_mm_xor_si128(c2[603],simde_mm_xor_si128(c2[6887],simde_mm_xor_si128(c2[597],simde_mm_xor_si128(c2[3526],simde_mm_xor_si128(c2[5457],simde_mm_xor_si128(c2[6428],simde_mm_xor_si128(c2[1125],simde_mm_xor_si128(c2[1126],simde_mm_xor_si128(c2[4032],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[7446],simde_mm_xor_si128(c2[6469],simde_mm_xor_si128(c2[2618],simde_mm_xor_si128(c2[2144],simde_mm_xor_si128(c2[4073],simde_mm_xor_si128(c2[6028],simde_mm_xor_si128(c2[708],simde_mm_xor_si128(c2[6030],simde_mm_xor_si128(c2[1701],simde_mm_xor_si128(c2[4604],simde_mm_xor_si128(c2[4114],simde_mm_xor_si128(c2[6059],simde_mm_xor_si128(c2[2690],simde_mm_xor_si128(c2[7524],simde_mm_xor_si128(c2[5594],simde_mm_xor_si128(c2[5618],simde_mm_xor_si128(c2[289],simde_mm_xor_si128(c2[7547],simde_mm_xor_si128(c2[314],simde_mm_xor_si128(c2[311],simde_mm_xor_si128(c2[5639],simde_mm_xor_si128(c2[3728],simde_mm_xor_si128(c2[2266],simde_mm_xor_si128(c2[7593],simde_mm_xor_si128(c2[6650],simde_mm_xor_si128(c2[1805],simde_mm_xor_si128(c2[5676],simde_mm_xor_si128(c2[3766],simde_mm_xor_si128(c2[2798],simde_mm_xor_si128(c2[4739],simde_mm_xor_si128(c2[1368],simde_mm_xor_si128(c2[2822],simde_mm_xor_si128(c2[885],simde_mm_xor_si128(c2[4780],simde_mm_xor_si128(c2[5749],simde_mm_xor_si128(c2[2363],simde_mm_xor_si128(c2[934],simde_mm_xor_si128(c2[4803],simde_mm_xor_si128(c2[5767],simde_mm_xor_si128(c2[5773],simde_mm_xor_si128(c2[7245],simde_mm_xor_si128(c2[466],c2[5789]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 17
-     d2[187]=_mm_xor_si128(c2[6782],_mm_xor_si128(c2[2247],_mm_xor_si128(c2[3744],_mm_xor_si128(c2[2317],c2[7238]))));
+     d2[187]=simde_mm_xor_si128(c2[6782],simde_mm_xor_si128(c2[2247],simde_mm_xor_si128(c2[3744],simde_mm_xor_si128(c2[2317],c2[7238]))));
 
 //row: 18
-     d2[198]=_mm_xor_si128(c2[7284],_mm_xor_si128(c2[7046],_mm_xor_si128(c2[3194],_mm_xor_si128(c2[2816],c2[4778]))));
+     d2[198]=simde_mm_xor_si128(c2[7284],simde_mm_xor_si128(c2[7046],simde_mm_xor_si128(c2[3194],simde_mm_xor_si128(c2[2816],c2[4778]))));
 
 //row: 19
-     d2[209]=_mm_xor_si128(c2[3393],_mm_xor_si128(c2[6804],_mm_xor_si128(c2[1615],_mm_xor_si128(c2[1149],c2[6029]))));
+     d2[209]=simde_mm_xor_si128(c2[3393],simde_mm_xor_si128(c2[6804],simde_mm_xor_si128(c2[1615],simde_mm_xor_si128(c2[1149],c2[6029]))));
 
 //row: 20
-     d2[220]=_mm_xor_si128(c2[1945],_mm_xor_si128(c2[5330],_mm_xor_si128(c2[5],_mm_xor_si128(c2[7267],_mm_xor_si128(c2[4361],_mm_xor_si128(c2[1967],_mm_xor_si128(c2[510],_mm_xor_si128(c2[5837],_mm_xor_si128(c2[2465],_mm_xor_si128(c2[7308],_mm_xor_si128(c2[1500],_mm_xor_si128(c2[3938],_mm_xor_si128(c2[6849],_mm_xor_si128(c2[7330],_mm_xor_si128(c2[554],_mm_xor_si128(c2[89],_mm_xor_si128(c2[5905],_mm_xor_si128(c2[2516],_mm_xor_si128(c2[6405],_mm_xor_si128(c2[4957],_mm_xor_si128(c2[6410],_mm_xor_si128(c2[1585],_mm_xor_si128(c2[3527],_mm_xor_si128(c2[4498],_mm_xor_si128(c2[6938],_mm_xor_si128(c2[6939],_mm_xor_si128(c2[2091],_mm_xor_si128(c2[5993],_mm_xor_si128(c2[5505],_mm_xor_si128(c2[4539],_mm_xor_si128(c2[688],_mm_xor_si128(c2[203],_mm_xor_si128(c2[2143],_mm_xor_si128(c2[5531],_mm_xor_si128(c2[4098],_mm_xor_si128(c2[6521],_mm_xor_si128(c2[4100],_mm_xor_si128(c2[7503],_mm_xor_si128(c2[2663],_mm_xor_si128(c2[2184],_mm_xor_si128(c2[2182],_mm_xor_si128(c2[749],_mm_xor_si128(c2[5594],_mm_xor_si128(c2[3653],_mm_xor_si128(c2[3677],_mm_xor_si128(c2[6102],_mm_xor_si128(c2[5617],_mm_xor_si128(c2[6116],_mm_xor_si128(c2[6124],_mm_xor_si128(c2[3698],_mm_xor_si128(c2[1787],_mm_xor_si128(c2[336],_mm_xor_si128(c2[5663],_mm_xor_si128(c2[4709],_mm_xor_si128(c2[7618],_mm_xor_si128(c2[3746],_mm_xor_si128(c2[1836],_mm_xor_si128(c2[868],_mm_xor_si128(c2[2798],_mm_xor_si128(c2[7181],_mm_xor_si128(c2[881],_mm_xor_si128(c2[6698],_mm_xor_si128(c2[2839],_mm_xor_si128(c2[3808],_mm_xor_si128(c2[422],_mm_xor_si128(c2[6736],_mm_xor_si128(c2[2862],_mm_xor_si128(c2[3837],_mm_xor_si128(c2[5304],_mm_xor_si128(c2[6279],c2[3859]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[220]=simde_mm_xor_si128(c2[1945],simde_mm_xor_si128(c2[5330],simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[7267],simde_mm_xor_si128(c2[4361],simde_mm_xor_si128(c2[1967],simde_mm_xor_si128(c2[510],simde_mm_xor_si128(c2[5837],simde_mm_xor_si128(c2[2465],simde_mm_xor_si128(c2[7308],simde_mm_xor_si128(c2[1500],simde_mm_xor_si128(c2[3938],simde_mm_xor_si128(c2[6849],simde_mm_xor_si128(c2[7330],simde_mm_xor_si128(c2[554],simde_mm_xor_si128(c2[89],simde_mm_xor_si128(c2[5905],simde_mm_xor_si128(c2[2516],simde_mm_xor_si128(c2[6405],simde_mm_xor_si128(c2[4957],simde_mm_xor_si128(c2[6410],simde_mm_xor_si128(c2[1585],simde_mm_xor_si128(c2[3527],simde_mm_xor_si128(c2[4498],simde_mm_xor_si128(c2[6938],simde_mm_xor_si128(c2[6939],simde_mm_xor_si128(c2[2091],simde_mm_xor_si128(c2[5993],simde_mm_xor_si128(c2[5505],simde_mm_xor_si128(c2[4539],simde_mm_xor_si128(c2[688],simde_mm_xor_si128(c2[203],simde_mm_xor_si128(c2[2143],simde_mm_xor_si128(c2[5531],simde_mm_xor_si128(c2[4098],simde_mm_xor_si128(c2[6521],simde_mm_xor_si128(c2[4100],simde_mm_xor_si128(c2[7503],simde_mm_xor_si128(c2[2663],simde_mm_xor_si128(c2[2184],simde_mm_xor_si128(c2[2182],simde_mm_xor_si128(c2[749],simde_mm_xor_si128(c2[5594],simde_mm_xor_si128(c2[3653],simde_mm_xor_si128(c2[3677],simde_mm_xor_si128(c2[6102],simde_mm_xor_si128(c2[5617],simde_mm_xor_si128(c2[6116],simde_mm_xor_si128(c2[6124],simde_mm_xor_si128(c2[3698],simde_mm_xor_si128(c2[1787],simde_mm_xor_si128(c2[336],simde_mm_xor_si128(c2[5663],simde_mm_xor_si128(c2[4709],simde_mm_xor_si128(c2[7618],simde_mm_xor_si128(c2[3746],simde_mm_xor_si128(c2[1836],simde_mm_xor_si128(c2[868],simde_mm_xor_si128(c2[2798],simde_mm_xor_si128(c2[7181],simde_mm_xor_si128(c2[881],simde_mm_xor_si128(c2[6698],simde_mm_xor_si128(c2[2839],simde_mm_xor_si128(c2[3808],simde_mm_xor_si128(c2[422],simde_mm_xor_si128(c2[6736],simde_mm_xor_si128(c2[2862],simde_mm_xor_si128(c2[3837],simde_mm_xor_si128(c2[5304],simde_mm_xor_si128(c2[6279],c2[3859]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 21
-     d2[231]=_mm_xor_si128(c2[1000],_mm_xor_si128(c2[3987],_mm_xor_si128(c2[5202],_mm_xor_si128(c2[7708],c2[6760]))));
+     d2[231]=simde_mm_xor_si128(c2[1000],simde_mm_xor_si128(c2[3987],simde_mm_xor_si128(c2[5202],simde_mm_xor_si128(c2[7708],c2[6760]))));
 
 //row: 22
-     d2[242]=_mm_xor_si128(c2[3878],_mm_xor_si128(c2[6565],_mm_xor_si128(c2[6097],c2[865])));
+     d2[242]=simde_mm_xor_si128(c2[3878],simde_mm_xor_si128(c2[6565],simde_mm_xor_si128(c2[6097],c2[865])));
 
 //row: 23
-     d2[253]=_mm_xor_si128(c2[991],_mm_xor_si128(c2[2948],_mm_xor_si128(c2[2640],c2[404])));
+     d2[253]=simde_mm_xor_si128(c2[991],simde_mm_xor_si128(c2[2948],simde_mm_xor_si128(c2[2640],c2[404])));
 
 //row: 24
-     d2[264]=_mm_xor_si128(c2[3880],_mm_xor_si128(c2[7265],_mm_xor_si128(c2[1940],_mm_xor_si128(c2[1459],_mm_xor_si128(c2[2906],_mm_xor_si128(c2[3902],_mm_xor_si128(c2[2445],_mm_xor_si128(c2[29],_mm_xor_si128(c2[4400],_mm_xor_si128(c2[1500],_mm_xor_si128(c2[3435],_mm_xor_si128(c2[5884],_mm_xor_si128(c2[1041],_mm_xor_si128(c2[1522],_mm_xor_si128(c2[4916],_mm_xor_si128(c2[2024],_mm_xor_si128(c2[97],_mm_xor_si128(c2[4451],_mm_xor_si128(c2[4448],_mm_xor_si128(c2[597],_mm_xor_si128(c2[6892],_mm_xor_si128(c2[602],_mm_xor_si128(c2[3520],_mm_xor_si128(c2[5462],_mm_xor_si128(c2[6433],_mm_xor_si128(c2[1130],_mm_xor_si128(c2[1131],_mm_xor_si128(c2[4026],_mm_xor_si128(c2[185],_mm_xor_si128(c2[7440],_mm_xor_si128(c2[6474],_mm_xor_si128(c2[2623],_mm_xor_si128(c2[2138],_mm_xor_si128(c2[4078],_mm_xor_si128(c2[6033],_mm_xor_si128(c2[713],_mm_xor_si128(c2[6035],_mm_xor_si128(c2[1695],_mm_xor_si128(c2[4598],_mm_xor_si128(c2[4119],_mm_xor_si128(c2[249],_mm_xor_si128(c2[2684],_mm_xor_si128(c2[7529],_mm_xor_si128(c2[5588],_mm_xor_si128(c2[5612],_mm_xor_si128(c2[294],_mm_xor_si128(c2[7552],_mm_xor_si128(c2[308],_mm_xor_si128(c2[316],_mm_xor_si128(c2[5633],_mm_xor_si128(c2[3722],_mm_xor_si128(c2[2271],_mm_xor_si128(c2[7598],_mm_xor_si128(c2[6644],_mm_xor_si128(c2[1810],_mm_xor_si128(c2[5681],_mm_xor_si128(c2[3771],_mm_xor_si128(c2[2803],_mm_xor_si128(c2[4733],_mm_xor_si128(c2[1373],_mm_xor_si128(c2[2816],_mm_xor_si128(c2[890],_mm_xor_si128(c2[4774],_mm_xor_si128(c2[5743],_mm_xor_si128(c2[2357],_mm_xor_si128(c2[928],_mm_xor_si128(c2[4797],_mm_xor_si128(c2[5772],_mm_xor_si128(c2[7239],_mm_xor_si128(c2[471],c2[5794]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[264]=simde_mm_xor_si128(c2[3880],simde_mm_xor_si128(c2[7265],simde_mm_xor_si128(c2[1940],simde_mm_xor_si128(c2[1459],simde_mm_xor_si128(c2[2906],simde_mm_xor_si128(c2[3902],simde_mm_xor_si128(c2[2445],simde_mm_xor_si128(c2[29],simde_mm_xor_si128(c2[4400],simde_mm_xor_si128(c2[1500],simde_mm_xor_si128(c2[3435],simde_mm_xor_si128(c2[5884],simde_mm_xor_si128(c2[1041],simde_mm_xor_si128(c2[1522],simde_mm_xor_si128(c2[4916],simde_mm_xor_si128(c2[2024],simde_mm_xor_si128(c2[97],simde_mm_xor_si128(c2[4451],simde_mm_xor_si128(c2[4448],simde_mm_xor_si128(c2[597],simde_mm_xor_si128(c2[6892],simde_mm_xor_si128(c2[602],simde_mm_xor_si128(c2[3520],simde_mm_xor_si128(c2[5462],simde_mm_xor_si128(c2[6433],simde_mm_xor_si128(c2[1130],simde_mm_xor_si128(c2[1131],simde_mm_xor_si128(c2[4026],simde_mm_xor_si128(c2[185],simde_mm_xor_si128(c2[7440],simde_mm_xor_si128(c2[6474],simde_mm_xor_si128(c2[2623],simde_mm_xor_si128(c2[2138],simde_mm_xor_si128(c2[4078],simde_mm_xor_si128(c2[6033],simde_mm_xor_si128(c2[713],simde_mm_xor_si128(c2[6035],simde_mm_xor_si128(c2[1695],simde_mm_xor_si128(c2[4598],simde_mm_xor_si128(c2[4119],simde_mm_xor_si128(c2[249],simde_mm_xor_si128(c2[2684],simde_mm_xor_si128(c2[7529],simde_mm_xor_si128(c2[5588],simde_mm_xor_si128(c2[5612],simde_mm_xor_si128(c2[294],simde_mm_xor_si128(c2[7552],simde_mm_xor_si128(c2[308],simde_mm_xor_si128(c2[316],simde_mm_xor_si128(c2[5633],simde_mm_xor_si128(c2[3722],simde_mm_xor_si128(c2[2271],simde_mm_xor_si128(c2[7598],simde_mm_xor_si128(c2[6644],simde_mm_xor_si128(c2[1810],simde_mm_xor_si128(c2[5681],simde_mm_xor_si128(c2[3771],simde_mm_xor_si128(c2[2803],simde_mm_xor_si128(c2[4733],simde_mm_xor_si128(c2[1373],simde_mm_xor_si128(c2[2816],simde_mm_xor_si128(c2[890],simde_mm_xor_si128(c2[4774],simde_mm_xor_si128(c2[5743],simde_mm_xor_si128(c2[2357],simde_mm_xor_si128(c2[928],simde_mm_xor_si128(c2[4797],simde_mm_xor_si128(c2[5772],simde_mm_xor_si128(c2[7239],simde_mm_xor_si128(c2[471],c2[5794]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 25
-     d2[275]=_mm_xor_si128(c2[3416],_mm_xor_si128(c2[7396],_mm_xor_si128(c2[7420],c2[3215])));
+     d2[275]=simde_mm_xor_si128(c2[3416],simde_mm_xor_si128(c2[7396],simde_mm_xor_si128(c2[7420],c2[3215])));
 
 //row: 26
-     d2[286]=_mm_xor_si128(c2[2429],_mm_xor_si128(c2[2957],_mm_xor_si128(c2[1059],c2[1788])));
+     d2[286]=simde_mm_xor_si128(c2[2429],simde_mm_xor_si128(c2[2957],simde_mm_xor_si128(c2[1059],c2[1788])));
 
 //row: 27
-     d2[297]=_mm_xor_si128(c2[5351],_mm_xor_si128(c2[3045],c2[5989]));
+     d2[297]=simde_mm_xor_si128(c2[5351],simde_mm_xor_si128(c2[3045],c2[5989]));
 
 //row: 28
-     d2[308]=_mm_xor_si128(c2[2910],_mm_xor_si128(c2[572],_mm_xor_si128(c2[4292],c2[3860])));
+     d2[308]=simde_mm_xor_si128(c2[2910],simde_mm_xor_si128(c2[572],simde_mm_xor_si128(c2[4292],c2[3860])));
 
 //row: 29
-     d2[319]=_mm_xor_si128(c2[971],_mm_xor_si128(c2[4356],_mm_xor_si128(c2[6785],_mm_xor_si128(c2[5809],_mm_xor_si128(c2[6293],_mm_xor_si128(c2[993],_mm_xor_si128(c2[7290],_mm_xor_si128(c2[4379],_mm_xor_si128(c2[4863],_mm_xor_si128(c2[512],_mm_xor_si128(c2[1502],_mm_xor_si128(c2[6345],_mm_xor_si128(c2[537],_mm_xor_si128(c2[2975],_mm_xor_si128(c2[5875],_mm_xor_si128(c2[5883],_mm_xor_si128(c2[6367],_mm_xor_si128(c2[6869],_mm_xor_si128(c2[4931],_mm_xor_si128(c2[1058],_mm_xor_si128(c2[1542],_mm_xor_si128(c2[5442],_mm_xor_si128(c2[3983],_mm_xor_si128(c2[5436],_mm_xor_si128(c2[622],_mm_xor_si128(c2[2553],_mm_xor_si128(c2[3040],_mm_xor_si128(c2[3524],_mm_xor_si128(c2[5964],_mm_xor_si128(c2[5965],_mm_xor_si128(c2[644],_mm_xor_si128(c2[1128],_mm_xor_si128(c2[5019],_mm_xor_si128(c2[4542],_mm_xor_si128(c2[3081],_mm_xor_si128(c2[3565],_mm_xor_si128(c2[7468],_mm_xor_si128(c2[6983],_mm_xor_si128(c2[1169],_mm_xor_si128(c2[3124],_mm_xor_si128(c2[5547],_mm_xor_si128(c2[2642],_mm_xor_si128(c2[3126],_mm_xor_si128(c2[6540],_mm_xor_si128(c2[1700],_mm_xor_si128(c2[726],_mm_xor_si128(c2[1210],_mm_xor_si128(c2[7529],_mm_xor_si128(c2[4620],_mm_xor_si128(c2[2206],_mm_xor_si128(c2[2690],_mm_xor_si128(c2[2714],_mm_xor_si128(c2[5128],_mm_xor_si128(c2[4159],_mm_xor_si128(c2[4643],_mm_xor_si128(c2[5153],_mm_xor_si128(c2[5150],_mm_xor_si128(c2[2251],_mm_xor_si128(c2[2735],_mm_xor_si128(c2[4186],_mm_xor_si128(c2[824],_mm_xor_si128(c2[7116],_mm_xor_si128(c2[4689],_mm_xor_si128(c2[3746],_mm_xor_si128(c2[6644],_mm_xor_si128(c2[2288],_mm_xor_si128(c2[2772],_mm_xor_si128(c2[862],_mm_xor_si128(c2[7637],_mm_xor_si128(c2[1351],_mm_xor_si128(c2[1835],_mm_xor_si128(c2[6207],_mm_xor_si128(c2[7661],_mm_xor_si128(c2[5240],_mm_xor_si128(c2[5724],_mm_xor_si128(c2[1852],_mm_xor_si128(c2[1876],_mm_xor_si128(c2[2845],_mm_xor_si128(c2[7202],_mm_xor_si128(c2[5773],_mm_xor_si128(c2[1899],_mm_xor_si128(c2[2379],_mm_xor_si128(c2[2863],_mm_xor_si128(c2[4341],_mm_xor_si128(c2[5305],_mm_xor_si128(c2[2401],c2[2885]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[319]=simde_mm_xor_si128(c2[971],simde_mm_xor_si128(c2[4356],simde_mm_xor_si128(c2[6785],simde_mm_xor_si128(c2[5809],simde_mm_xor_si128(c2[6293],simde_mm_xor_si128(c2[993],simde_mm_xor_si128(c2[7290],simde_mm_xor_si128(c2[4379],simde_mm_xor_si128(c2[4863],simde_mm_xor_si128(c2[512],simde_mm_xor_si128(c2[1502],simde_mm_xor_si128(c2[6345],simde_mm_xor_si128(c2[537],simde_mm_xor_si128(c2[2975],simde_mm_xor_si128(c2[5875],simde_mm_xor_si128(c2[5883],simde_mm_xor_si128(c2[6367],simde_mm_xor_si128(c2[6869],simde_mm_xor_si128(c2[4931],simde_mm_xor_si128(c2[1058],simde_mm_xor_si128(c2[1542],simde_mm_xor_si128(c2[5442],simde_mm_xor_si128(c2[3983],simde_mm_xor_si128(c2[5436],simde_mm_xor_si128(c2[622],simde_mm_xor_si128(c2[2553],simde_mm_xor_si128(c2[3040],simde_mm_xor_si128(c2[3524],simde_mm_xor_si128(c2[5964],simde_mm_xor_si128(c2[5965],simde_mm_xor_si128(c2[644],simde_mm_xor_si128(c2[1128],simde_mm_xor_si128(c2[5019],simde_mm_xor_si128(c2[4542],simde_mm_xor_si128(c2[3081],simde_mm_xor_si128(c2[3565],simde_mm_xor_si128(c2[7468],simde_mm_xor_si128(c2[6983],simde_mm_xor_si128(c2[1169],simde_mm_xor_si128(c2[3124],simde_mm_xor_si128(c2[5547],simde_mm_xor_si128(c2[2642],simde_mm_xor_si128(c2[3126],simde_mm_xor_si128(c2[6540],simde_mm_xor_si128(c2[1700],simde_mm_xor_si128(c2[726],simde_mm_xor_si128(c2[1210],simde_mm_xor_si128(c2[7529],simde_mm_xor_si128(c2[4620],simde_mm_xor_si128(c2[2206],simde_mm_xor_si128(c2[2690],simde_mm_xor_si128(c2[2714],simde_mm_xor_si128(c2[5128],simde_mm_xor_si128(c2[4159],simde_mm_xor_si128(c2[4643],simde_mm_xor_si128(c2[5153],simde_mm_xor_si128(c2[5150],simde_mm_xor_si128(c2[2251],simde_mm_xor_si128(c2[2735],simde_mm_xor_si128(c2[4186],simde_mm_xor_si128(c2[824],simde_mm_xor_si128(c2[7116],simde_mm_xor_si128(c2[4689],simde_mm_xor_si128(c2[3746],simde_mm_xor_si128(c2[6644],simde_mm_xor_si128(c2[2288],simde_mm_xor_si128(c2[2772],simde_mm_xor_si128(c2[862],simde_mm_xor_si128(c2[7637],simde_mm_xor_si128(c2[1351],simde_mm_xor_si128(c2[1835],simde_mm_xor_si128(c2[6207],simde_mm_xor_si128(c2[7661],simde_mm_xor_si128(c2[5240],simde_mm_xor_si128(c2[5724],simde_mm_xor_si128(c2[1852],simde_mm_xor_si128(c2[1876],simde_mm_xor_si128(c2[2845],simde_mm_xor_si128(c2[7202],simde_mm_xor_si128(c2[5773],simde_mm_xor_si128(c2[1899],simde_mm_xor_si128(c2[2379],simde_mm_xor_si128(c2[2863],simde_mm_xor_si128(c2[4341],simde_mm_xor_si128(c2[5305],simde_mm_xor_si128(c2[2401],c2[2885]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 30
-     d2[330]=_mm_xor_si128(c2[5333],_mm_xor_si128(c2[975],_mm_xor_si128(c2[2909],_mm_xor_si128(c2[3393],_mm_xor_si128(c2[2428],_mm_xor_si128(c2[2912],_mm_xor_si128(c2[5334],_mm_xor_si128(c2[5355],_mm_xor_si128(c2[3414],_mm_xor_si128(c2[3898],_mm_xor_si128(c2[998],_mm_xor_si128(c2[1482],_mm_xor_si128(c2[5853],_mm_xor_si128(c2[2953],_mm_xor_si128(c2[4404],_mm_xor_si128(c2[4888],_mm_xor_si128(c2[7326],_mm_xor_si128(c2[2494],_mm_xor_si128(c2[2491],_mm_xor_si128(c2[2975],_mm_xor_si128(c2[3477],_mm_xor_si128(c2[1066],_mm_xor_si128(c2[1550],_mm_xor_si128(c2[5420],_mm_xor_si128(c2[5904],_mm_xor_si128(c2[2050],_mm_xor_si128(c2[602],_mm_xor_si128(c2[1571],_mm_xor_si128(c2[2055],_mm_xor_si128(c2[4973],_mm_xor_si128(c2[6431],_mm_xor_si128(c2[6915],_mm_xor_si128(c2[7402],_mm_xor_si128(c2[132],_mm_xor_si128(c2[2583],_mm_xor_si128(c2[2100],_mm_xor_si128(c2[2584],_mm_xor_si128(c2[4995],_mm_xor_si128(c2[5479],_mm_xor_si128(c2[1638],_mm_xor_si128(c2[666],_mm_xor_si128(c2[1150],_mm_xor_si128(c2[7443],_mm_xor_si128(c2[184],_mm_xor_si128(c2[4076],_mm_xor_si128(c2[3591],_mm_xor_si128(c2[5047],_mm_xor_si128(c2[5531],_mm_xor_si128(c2[7486],_mm_xor_si128(c2[1682],_mm_xor_si128(c2[2166],_mm_xor_si128(c2[7004],_mm_xor_si128(c2[7488],_mm_xor_si128(c2[221],_mm_xor_si128(c2[3148],_mm_xor_si128(c2[6051],_mm_xor_si128(c2[5088],_mm_xor_si128(c2[5572],_mm_xor_si128(c2[4137],_mm_xor_si128(c2[1239],_mm_xor_si128(c2[6557],_mm_xor_si128(c2[7041],_mm_xor_si128(c2[7065],_mm_xor_si128(c2[1263],_mm_xor_si128(c2[1747],_mm_xor_si128(c2[778],_mm_xor_si128(c2[1262],_mm_xor_si128(c2[7551],_mm_xor_si128(c2[1761],_mm_xor_si128(c2[1285],_mm_xor_si128(c2[1769],_mm_xor_si128(c2[6602],_mm_xor_si128(c2[7086],_mm_xor_si128(c2[5175],_mm_xor_si128(c2[3724],_mm_xor_si128(c2[824],_mm_xor_si128(c2[1308],_mm_xor_si128(c2[354],_mm_xor_si128(c2[3263],_mm_xor_si128(c2[6650],_mm_xor_si128(c2[7134],_mm_xor_si128(c2[5224],_mm_xor_si128(c2[3772],_mm_xor_si128(c2[4256],_mm_xor_si128(c2[5702],_mm_xor_si128(c2[6186],_mm_xor_si128(c2[2826],_mm_xor_si128(c2[3785],_mm_xor_si128(c2[4269],_mm_xor_si128(c2[1848],_mm_xor_si128(c2[2332],_mm_xor_si128(c2[6227],_mm_xor_si128(c2[7196],_mm_xor_si128(c2[3326],_mm_xor_si128(c2[3810],_mm_xor_si128(c2[2381],_mm_xor_si128(c2[5766],_mm_xor_si128(c2[6250],_mm_xor_si128(c2[6741],_mm_xor_si128(c2[7225],_mm_xor_si128(c2[949],_mm_xor_si128(c2[1924],_mm_xor_si128(c2[6763],c2[7247])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[330]=simde_mm_xor_si128(c2[5333],simde_mm_xor_si128(c2[975],simde_mm_xor_si128(c2[2909],simde_mm_xor_si128(c2[3393],simde_mm_xor_si128(c2[2428],simde_mm_xor_si128(c2[2912],simde_mm_xor_si128(c2[5334],simde_mm_xor_si128(c2[5355],simde_mm_xor_si128(c2[3414],simde_mm_xor_si128(c2[3898],simde_mm_xor_si128(c2[998],simde_mm_xor_si128(c2[1482],simde_mm_xor_si128(c2[5853],simde_mm_xor_si128(c2[2953],simde_mm_xor_si128(c2[4404],simde_mm_xor_si128(c2[4888],simde_mm_xor_si128(c2[7326],simde_mm_xor_si128(c2[2494],simde_mm_xor_si128(c2[2491],simde_mm_xor_si128(c2[2975],simde_mm_xor_si128(c2[3477],simde_mm_xor_si128(c2[1066],simde_mm_xor_si128(c2[1550],simde_mm_xor_si128(c2[5420],simde_mm_xor_si128(c2[5904],simde_mm_xor_si128(c2[2050],simde_mm_xor_si128(c2[602],simde_mm_xor_si128(c2[1571],simde_mm_xor_si128(c2[2055],simde_mm_xor_si128(c2[4973],simde_mm_xor_si128(c2[6431],simde_mm_xor_si128(c2[6915],simde_mm_xor_si128(c2[7402],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[2583],simde_mm_xor_si128(c2[2100],simde_mm_xor_si128(c2[2584],simde_mm_xor_si128(c2[4995],simde_mm_xor_si128(c2[5479],simde_mm_xor_si128(c2[1638],simde_mm_xor_si128(c2[666],simde_mm_xor_si128(c2[1150],simde_mm_xor_si128(c2[7443],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[4076],simde_mm_xor_si128(c2[3591],simde_mm_xor_si128(c2[5047],simde_mm_xor_si128(c2[5531],simde_mm_xor_si128(c2[7486],simde_mm_xor_si128(c2[1682],simde_mm_xor_si128(c2[2166],simde_mm_xor_si128(c2[7004],simde_mm_xor_si128(c2[7488],simde_mm_xor_si128(c2[221],simde_mm_xor_si128(c2[3148],simde_mm_xor_si128(c2[6051],simde_mm_xor_si128(c2[5088],simde_mm_xor_si128(c2[5572],simde_mm_xor_si128(c2[4137],simde_mm_xor_si128(c2[1239],simde_mm_xor_si128(c2[6557],simde_mm_xor_si128(c2[7041],simde_mm_xor_si128(c2[7065],simde_mm_xor_si128(c2[1263],simde_mm_xor_si128(c2[1747],simde_mm_xor_si128(c2[778],simde_mm_xor_si128(c2[1262],simde_mm_xor_si128(c2[7551],simde_mm_xor_si128(c2[1761],simde_mm_xor_si128(c2[1285],simde_mm_xor_si128(c2[1769],simde_mm_xor_si128(c2[6602],simde_mm_xor_si128(c2[7086],simde_mm_xor_si128(c2[5175],simde_mm_xor_si128(c2[3724],simde_mm_xor_si128(c2[824],simde_mm_xor_si128(c2[1308],simde_mm_xor_si128(c2[354],simde_mm_xor_si128(c2[3263],simde_mm_xor_si128(c2[6650],simde_mm_xor_si128(c2[7134],simde_mm_xor_si128(c2[5224],simde_mm_xor_si128(c2[3772],simde_mm_xor_si128(c2[4256],simde_mm_xor_si128(c2[5702],simde_mm_xor_si128(c2[6186],simde_mm_xor_si128(c2[2826],simde_mm_xor_si128(c2[3785],simde_mm_xor_si128(c2[4269],simde_mm_xor_si128(c2[1848],simde_mm_xor_si128(c2[2332],simde_mm_xor_si128(c2[6227],simde_mm_xor_si128(c2[7196],simde_mm_xor_si128(c2[3326],simde_mm_xor_si128(c2[3810],simde_mm_xor_si128(c2[2381],simde_mm_xor_si128(c2[5766],simde_mm_xor_si128(c2[6250],simde_mm_xor_si128(c2[6741],simde_mm_xor_si128(c2[7225],simde_mm_xor_si128(c2[949],simde_mm_xor_si128(c2[1924],simde_mm_xor_si128(c2[6763],c2[7247])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 31
-     d2[341]=_mm_xor_si128(c2[6294],_mm_xor_si128(c2[2430],_mm_xor_si128(c2[1936],_mm_xor_si128(c2[5815],_mm_xor_si128(c2[4365],_mm_xor_si128(c2[490],_mm_xor_si128(c2[3873],_mm_xor_si128(c2[7268],_mm_xor_si128(c2[9],_mm_xor_si128(c2[6316],_mm_xor_si128(c2[2452],_mm_xor_si128(c2[4870],_mm_xor_si128(c2[995],_mm_xor_si128(c2[2443],_mm_xor_si128(c2[5838],_mm_xor_si128(c2[6322],_mm_xor_si128(c2[2448],_mm_xor_si128(c2[6825],_mm_xor_si128(c2[2950],_mm_xor_si128(c2[3925],_mm_xor_si128(c2[50],_mm_xor_si128(c2[5860],_mm_xor_si128(c2[1985],_mm_xor_si128(c2[555],_mm_xor_si128(c2[4423],_mm_xor_si128(c2[3455],_mm_xor_si128(c2[7334],_mm_xor_si128(c2[3947],_mm_xor_si128(c2[7331],_mm_xor_si128(c2[72],_mm_xor_si128(c2[4449],_mm_xor_si128(c2[574],_mm_xor_si128(c2[2511],_mm_xor_si128(c2[6390],_mm_xor_si128(c2[6865],_mm_xor_si128(c2[2517],_mm_xor_si128(c2[3001],_mm_xor_si128(c2[3022],_mm_xor_si128(c2[6890],_mm_xor_si128(c2[1563],_mm_xor_si128(c2[5442],_mm_xor_si128(c2[3016],_mm_xor_si128(c2[6895],_mm_xor_si128(c2[5945],_mm_xor_si128(c2[2070],_mm_xor_si128(c2[133],_mm_xor_si128(c2[4012],_mm_xor_si128(c2[1104],_mm_xor_si128(c2[4488],_mm_xor_si128(c2[4972],_mm_xor_si128(c2[3544],_mm_xor_si128(c2[7423],_mm_xor_si128(c2[3545],_mm_xor_si128(c2[7424],_mm_xor_si128(c2[6451],_mm_xor_si128(c2[2092],_mm_xor_si128(c2[2576],_mm_xor_si128(c2[4517],_mm_xor_si128(c2[2599],_mm_xor_si128(c2[6478],_mm_xor_si128(c2[2122],_mm_xor_si128(c2[5990],_mm_xor_si128(c2[1145],_mm_xor_si128(c2[4540],_mm_xor_si128(c2[5024],_mm_xor_si128(c2[5048],_mm_xor_si128(c2[1173],_mm_xor_si128(c2[4563],_mm_xor_si128(c2[688],_mm_xor_si128(c2[6492],_mm_xor_si128(c2[2628],_mm_xor_si128(c2[704],_mm_xor_si128(c2[4583],_mm_xor_si128(c2[3127],_mm_xor_si128(c2[7006],_mm_xor_si128(c2[706],_mm_xor_si128(c2[4101],_mm_xor_si128(c2[4585],_mm_xor_si128(c2[4120],_mm_xor_si128(c2[245],_mm_xor_si128(c2[7023],_mm_xor_si128(c2[3148],_mm_xor_si128(c2[6544],_mm_xor_si128(c2[2185],_mm_xor_si128(c2[2669],_mm_xor_si128(c2[5109],_mm_xor_si128(c2[1234],_mm_xor_si128(c2[2200],_mm_xor_si128(c2[6079],_mm_xor_si128(c2[270],_mm_xor_si128(c2[3654],_mm_xor_si128(c2[4138],_mm_xor_si128(c2[294],_mm_xor_si128(c2[4162],_mm_xor_si128(c2[2708],_mm_xor_si128(c2[6587],_mm_xor_si128(c2[2223],_mm_xor_si128(c2[5618],_mm_xor_si128(c2[6102],_mm_xor_si128(c2[2733],_mm_xor_si128(c2[6601],_mm_xor_si128(c2[2730],_mm_xor_si128(c2[6609],_mm_xor_si128(c2[315],_mm_xor_si128(c2[3699],_mm_xor_si128(c2[4183],_mm_xor_si128(c2[6147],_mm_xor_si128(c2[2272],_mm_xor_si128(c2[4696],_mm_xor_si128(c2[821],_mm_xor_si128(c2[2269],_mm_xor_si128(c2[6148],_mm_xor_si128(c2[1326],_mm_xor_si128(c2[5194],_mm_xor_si128(c2[4224],_mm_xor_si128(c2[360],_mm_xor_si128(c2[352],_mm_xor_si128(c2[3747],_mm_xor_si128(c2[4231],_mm_xor_si128(c2[6185],_mm_xor_si128(c2[2310],_mm_xor_si128(c2[5217],_mm_xor_si128(c2[1342],_mm_xor_si128(c2[7158],_mm_xor_si128(c2[2799],_mm_xor_si128(c2[3283],_mm_xor_si128(c2[3787],_mm_xor_si128(c2[7666],_mm_xor_si128(c2[5241],_mm_xor_si128(c2[1366],_mm_xor_si128(c2[3304],_mm_xor_si128(c2[6688],_mm_xor_si128(c2[7172],_mm_xor_si128(c2[7199],_mm_xor_si128(c2[3324],_mm_xor_si128(c2[425],_mm_xor_si128(c2[4293],_mm_xor_si128(c2[4782],_mm_xor_si128(c2[907],_mm_xor_si128(c2[3353],_mm_xor_si128(c2[7221],_mm_xor_si128(c2[7222],_mm_xor_si128(c2[3347],_mm_xor_si128(c2[443],_mm_xor_si128(c2[3838],_mm_xor_si128(c2[4322],_mm_xor_si128(c2[1921],_mm_xor_si128(c2[5789],_mm_xor_si128(c2[2885],_mm_xor_si128(c2[6764],_mm_xor_si128(c2[465],_mm_xor_si128(c2[3860],c2[4344]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[341]=simde_mm_xor_si128(c2[6294],simde_mm_xor_si128(c2[2430],simde_mm_xor_si128(c2[1936],simde_mm_xor_si128(c2[5815],simde_mm_xor_si128(c2[4365],simde_mm_xor_si128(c2[490],simde_mm_xor_si128(c2[3873],simde_mm_xor_si128(c2[7268],simde_mm_xor_si128(c2[9],simde_mm_xor_si128(c2[6316],simde_mm_xor_si128(c2[2452],simde_mm_xor_si128(c2[4870],simde_mm_xor_si128(c2[995],simde_mm_xor_si128(c2[2443],simde_mm_xor_si128(c2[5838],simde_mm_xor_si128(c2[6322],simde_mm_xor_si128(c2[2448],simde_mm_xor_si128(c2[6825],simde_mm_xor_si128(c2[2950],simde_mm_xor_si128(c2[3925],simde_mm_xor_si128(c2[50],simde_mm_xor_si128(c2[5860],simde_mm_xor_si128(c2[1985],simde_mm_xor_si128(c2[555],simde_mm_xor_si128(c2[4423],simde_mm_xor_si128(c2[3455],simde_mm_xor_si128(c2[7334],simde_mm_xor_si128(c2[3947],simde_mm_xor_si128(c2[7331],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[4449],simde_mm_xor_si128(c2[574],simde_mm_xor_si128(c2[2511],simde_mm_xor_si128(c2[6390],simde_mm_xor_si128(c2[6865],simde_mm_xor_si128(c2[2517],simde_mm_xor_si128(c2[3001],simde_mm_xor_si128(c2[3022],simde_mm_xor_si128(c2[6890],simde_mm_xor_si128(c2[1563],simde_mm_xor_si128(c2[5442],simde_mm_xor_si128(c2[3016],simde_mm_xor_si128(c2[6895],simde_mm_xor_si128(c2[5945],simde_mm_xor_si128(c2[2070],simde_mm_xor_si128(c2[133],simde_mm_xor_si128(c2[4012],simde_mm_xor_si128(c2[1104],simde_mm_xor_si128(c2[4488],simde_mm_xor_si128(c2[4972],simde_mm_xor_si128(c2[3544],simde_mm_xor_si128(c2[7423],simde_mm_xor_si128(c2[3545],simde_mm_xor_si128(c2[7424],simde_mm_xor_si128(c2[6451],simde_mm_xor_si128(c2[2092],simde_mm_xor_si128(c2[2576],simde_mm_xor_si128(c2[4517],simde_mm_xor_si128(c2[2599],simde_mm_xor_si128(c2[6478],simde_mm_xor_si128(c2[2122],simde_mm_xor_si128(c2[5990],simde_mm_xor_si128(c2[1145],simde_mm_xor_si128(c2[4540],simde_mm_xor_si128(c2[5024],simde_mm_xor_si128(c2[5048],simde_mm_xor_si128(c2[1173],simde_mm_xor_si128(c2[4563],simde_mm_xor_si128(c2[688],simde_mm_xor_si128(c2[6492],simde_mm_xor_si128(c2[2628],simde_mm_xor_si128(c2[704],simde_mm_xor_si128(c2[4583],simde_mm_xor_si128(c2[3127],simde_mm_xor_si128(c2[7006],simde_mm_xor_si128(c2[706],simde_mm_xor_si128(c2[4101],simde_mm_xor_si128(c2[4585],simde_mm_xor_si128(c2[4120],simde_mm_xor_si128(c2[245],simde_mm_xor_si128(c2[7023],simde_mm_xor_si128(c2[3148],simde_mm_xor_si128(c2[6544],simde_mm_xor_si128(c2[2185],simde_mm_xor_si128(c2[2669],simde_mm_xor_si128(c2[5109],simde_mm_xor_si128(c2[1234],simde_mm_xor_si128(c2[2200],simde_mm_xor_si128(c2[6079],simde_mm_xor_si128(c2[270],simde_mm_xor_si128(c2[3654],simde_mm_xor_si128(c2[4138],simde_mm_xor_si128(c2[294],simde_mm_xor_si128(c2[4162],simde_mm_xor_si128(c2[2708],simde_mm_xor_si128(c2[6587],simde_mm_xor_si128(c2[2223],simde_mm_xor_si128(c2[5618],simde_mm_xor_si128(c2[6102],simde_mm_xor_si128(c2[2733],simde_mm_xor_si128(c2[6601],simde_mm_xor_si128(c2[2730],simde_mm_xor_si128(c2[6609],simde_mm_xor_si128(c2[315],simde_mm_xor_si128(c2[3699],simde_mm_xor_si128(c2[4183],simde_mm_xor_si128(c2[6147],simde_mm_xor_si128(c2[2272],simde_mm_xor_si128(c2[4696],simde_mm_xor_si128(c2[821],simde_mm_xor_si128(c2[2269],simde_mm_xor_si128(c2[6148],simde_mm_xor_si128(c2[1326],simde_mm_xor_si128(c2[5194],simde_mm_xor_si128(c2[4224],simde_mm_xor_si128(c2[360],simde_mm_xor_si128(c2[352],simde_mm_xor_si128(c2[3747],simde_mm_xor_si128(c2[4231],simde_mm_xor_si128(c2[6185],simde_mm_xor_si128(c2[2310],simde_mm_xor_si128(c2[5217],simde_mm_xor_si128(c2[1342],simde_mm_xor_si128(c2[7158],simde_mm_xor_si128(c2[2799],simde_mm_xor_si128(c2[3283],simde_mm_xor_si128(c2[3787],simde_mm_xor_si128(c2[7666],simde_mm_xor_si128(c2[5241],simde_mm_xor_si128(c2[1366],simde_mm_xor_si128(c2[3304],simde_mm_xor_si128(c2[6688],simde_mm_xor_si128(c2[7172],simde_mm_xor_si128(c2[7199],simde_mm_xor_si128(c2[3324],simde_mm_xor_si128(c2[425],simde_mm_xor_si128(c2[4293],simde_mm_xor_si128(c2[4782],simde_mm_xor_si128(c2[907],simde_mm_xor_si128(c2[3353],simde_mm_xor_si128(c2[7221],simde_mm_xor_si128(c2[7222],simde_mm_xor_si128(c2[3347],simde_mm_xor_si128(c2[443],simde_mm_xor_si128(c2[3838],simde_mm_xor_si128(c2[4322],simde_mm_xor_si128(c2[1921],simde_mm_xor_si128(c2[5789],simde_mm_xor_si128(c2[2885],simde_mm_xor_si128(c2[6764],simde_mm_xor_si128(c2[465],simde_mm_xor_si128(c2[3860],c2[4344]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 32
-     d2[352]=_mm_xor_si128(c2[4841],_mm_xor_si128(c2[494],_mm_xor_si128(c2[2428],_mm_xor_si128(c2[2912],_mm_xor_si128(c2[1936],_mm_xor_si128(c2[2420],_mm_xor_si128(c2[7270],_mm_xor_si128(c2[4863],_mm_xor_si128(c2[2933],_mm_xor_si128(c2[3417],_mm_xor_si128(c2[506],_mm_xor_si128(c2[990],_mm_xor_si128(c2[5372],_mm_xor_si128(c2[2472],_mm_xor_si128(c2[3923],_mm_xor_si128(c2[4407],_mm_xor_si128(c2[6845],_mm_xor_si128(c2[2002],_mm_xor_si128(c2[2010],_mm_xor_si128(c2[2494],_mm_xor_si128(c2[2996],_mm_xor_si128(c2[574],_mm_xor_si128(c2[1058],_mm_xor_si128(c2[4928],_mm_xor_si128(c2[5412],_mm_xor_si128(c2[1569],_mm_xor_si128(c2[110],_mm_xor_si128(c2[1079],_mm_xor_si128(c2[1563],_mm_xor_si128(c2[4492],_mm_xor_si128(c2[5950],_mm_xor_si128(c2[6434],_mm_xor_si128(c2[6910],_mm_xor_si128(c2[7394],_mm_xor_si128(c2[2091],_mm_xor_si128(c2[1608],_mm_xor_si128(c2[2092],_mm_xor_si128(c2[4514],_mm_xor_si128(c2[4998],_mm_xor_si128(c2[1146],_mm_xor_si128(c2[185],_mm_xor_si128(c2[669],_mm_xor_si128(c2[6962],_mm_xor_si128(c2[7446],_mm_xor_si128(c2[3595],_mm_xor_si128(c2[3110],_mm_xor_si128(c2[4555],_mm_xor_si128(c2[5039],_mm_xor_si128(c2[7005],_mm_xor_si128(c2[1190],_mm_xor_si128(c2[1674],_mm_xor_si128(c2[6512],_mm_xor_si128(c2[6996],_mm_xor_si128(c2[2667],_mm_xor_si128(c2[5570],_mm_xor_si128(c2[4607],_mm_xor_si128(c2[5091],_mm_xor_si128(c2[3656],_mm_xor_si128(c2[758],_mm_xor_si128(c2[6076],_mm_xor_si128(c2[6560],_mm_xor_si128(c2[4625],_mm_xor_si128(c2[6584],_mm_xor_si128(c2[771],_mm_xor_si128(c2[1255],_mm_xor_si128(c2[286],_mm_xor_si128(c2[770],_mm_xor_si128(c2[1280],_mm_xor_si128(c2[793],_mm_xor_si128(c2[1277],_mm_xor_si128(c2[6121],_mm_xor_si128(c2[6605],_mm_xor_si128(c2[1286],_mm_xor_si128(c2[4694],_mm_xor_si128(c2[3243],_mm_xor_si128(c2[332],_mm_xor_si128(c2[816],_mm_xor_si128(c2[7616],_mm_xor_si128(c2[2782],_mm_xor_si128(c2[6169],_mm_xor_si128(c2[6653],_mm_xor_si128(c2[4732],_mm_xor_si128(c2[3280],_mm_xor_si128(c2[3764],_mm_xor_si128(c2[5221],_mm_xor_si128(c2[5705],_mm_xor_si128(c2[2334],_mm_xor_si128(c2[3304],_mm_xor_si128(c2[3788],_mm_xor_si128(c2[1367],_mm_xor_si128(c2[1851],_mm_xor_si128(c2[5746],_mm_xor_si128(c2[6715],_mm_xor_si128(c2[2845],_mm_xor_si128(c2[3329],_mm_xor_si128(c2[1900],_mm_xor_si128(c2[5285],_mm_xor_si128(c2[5769],_mm_xor_si128(c2[6249],_mm_xor_si128(c2[6733],_mm_xor_si128(c2[468],_mm_xor_si128(c2[1432],_mm_xor_si128(c2[6271],c2[6755])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[352]=simde_mm_xor_si128(c2[4841],simde_mm_xor_si128(c2[494],simde_mm_xor_si128(c2[2428],simde_mm_xor_si128(c2[2912],simde_mm_xor_si128(c2[1936],simde_mm_xor_si128(c2[2420],simde_mm_xor_si128(c2[7270],simde_mm_xor_si128(c2[4863],simde_mm_xor_si128(c2[2933],simde_mm_xor_si128(c2[3417],simde_mm_xor_si128(c2[506],simde_mm_xor_si128(c2[990],simde_mm_xor_si128(c2[5372],simde_mm_xor_si128(c2[2472],simde_mm_xor_si128(c2[3923],simde_mm_xor_si128(c2[4407],simde_mm_xor_si128(c2[6845],simde_mm_xor_si128(c2[2002],simde_mm_xor_si128(c2[2010],simde_mm_xor_si128(c2[2494],simde_mm_xor_si128(c2[2996],simde_mm_xor_si128(c2[574],simde_mm_xor_si128(c2[1058],simde_mm_xor_si128(c2[4928],simde_mm_xor_si128(c2[5412],simde_mm_xor_si128(c2[1569],simde_mm_xor_si128(c2[110],simde_mm_xor_si128(c2[1079],simde_mm_xor_si128(c2[1563],simde_mm_xor_si128(c2[4492],simde_mm_xor_si128(c2[5950],simde_mm_xor_si128(c2[6434],simde_mm_xor_si128(c2[6910],simde_mm_xor_si128(c2[7394],simde_mm_xor_si128(c2[2091],simde_mm_xor_si128(c2[1608],simde_mm_xor_si128(c2[2092],simde_mm_xor_si128(c2[4514],simde_mm_xor_si128(c2[4998],simde_mm_xor_si128(c2[1146],simde_mm_xor_si128(c2[185],simde_mm_xor_si128(c2[669],simde_mm_xor_si128(c2[6962],simde_mm_xor_si128(c2[7446],simde_mm_xor_si128(c2[3595],simde_mm_xor_si128(c2[3110],simde_mm_xor_si128(c2[4555],simde_mm_xor_si128(c2[5039],simde_mm_xor_si128(c2[7005],simde_mm_xor_si128(c2[1190],simde_mm_xor_si128(c2[1674],simde_mm_xor_si128(c2[6512],simde_mm_xor_si128(c2[6996],simde_mm_xor_si128(c2[2667],simde_mm_xor_si128(c2[5570],simde_mm_xor_si128(c2[4607],simde_mm_xor_si128(c2[5091],simde_mm_xor_si128(c2[3656],simde_mm_xor_si128(c2[758],simde_mm_xor_si128(c2[6076],simde_mm_xor_si128(c2[6560],simde_mm_xor_si128(c2[4625],simde_mm_xor_si128(c2[6584],simde_mm_xor_si128(c2[771],simde_mm_xor_si128(c2[1255],simde_mm_xor_si128(c2[286],simde_mm_xor_si128(c2[770],simde_mm_xor_si128(c2[1280],simde_mm_xor_si128(c2[793],simde_mm_xor_si128(c2[1277],simde_mm_xor_si128(c2[6121],simde_mm_xor_si128(c2[6605],simde_mm_xor_si128(c2[1286],simde_mm_xor_si128(c2[4694],simde_mm_xor_si128(c2[3243],simde_mm_xor_si128(c2[332],simde_mm_xor_si128(c2[816],simde_mm_xor_si128(c2[7616],simde_mm_xor_si128(c2[2782],simde_mm_xor_si128(c2[6169],simde_mm_xor_si128(c2[6653],simde_mm_xor_si128(c2[4732],simde_mm_xor_si128(c2[3280],simde_mm_xor_si128(c2[3764],simde_mm_xor_si128(c2[5221],simde_mm_xor_si128(c2[5705],simde_mm_xor_si128(c2[2334],simde_mm_xor_si128(c2[3304],simde_mm_xor_si128(c2[3788],simde_mm_xor_si128(c2[1367],simde_mm_xor_si128(c2[1851],simde_mm_xor_si128(c2[5746],simde_mm_xor_si128(c2[6715],simde_mm_xor_si128(c2[2845],simde_mm_xor_si128(c2[3329],simde_mm_xor_si128(c2[1900],simde_mm_xor_si128(c2[5285],simde_mm_xor_si128(c2[5769],simde_mm_xor_si128(c2[6249],simde_mm_xor_si128(c2[6733],simde_mm_xor_si128(c2[468],simde_mm_xor_si128(c2[1432],simde_mm_xor_si128(c2[6271],c2[6755])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 33
-     d2[363]=_mm_xor_si128(c2[3897],_mm_xor_si128(c2[52],_mm_xor_si128(c2[6542],c2[2888])));
+     d2[363]=simde_mm_xor_si128(c2[3897],simde_mm_xor_si128(c2[52],simde_mm_xor_si128(c2[6542],c2[2888])));
 
 //row: 34
-     d2[374]=_mm_xor_si128(c2[5811],_mm_xor_si128(c2[163],_mm_xor_si128(c2[337],c2[3281])));
+     d2[374]=simde_mm_xor_si128(c2[5811],simde_mm_xor_si128(c2[163],simde_mm_xor_si128(c2[337],c2[3281])));
 
 //row: 35
-     d2[385]=_mm_xor_si128(c2[970],_mm_xor_si128(c2[4366],_mm_xor_si128(c2[6784],_mm_xor_si128(c2[6292],_mm_xor_si128(c2[992],_mm_xor_si128(c2[7289],_mm_xor_si128(c2[4862],_mm_xor_si128(c2[1964],_mm_xor_si128(c2[1501],_mm_xor_si128(c2[6344],_mm_xor_si128(c2[536],_mm_xor_si128(c2[2974],_mm_xor_si128(c2[5874],_mm_xor_si128(c2[6366],_mm_xor_si128(c2[6868],_mm_xor_si128(c2[4930],_mm_xor_si128(c2[1541],_mm_xor_si128(c2[5441],_mm_xor_si128(c2[3982],_mm_xor_si128(c2[5435],_mm_xor_si128(c2[621],_mm_xor_si128(c2[2552],_mm_xor_si128(c2[3523],_mm_xor_si128(c2[1102],_mm_xor_si128(c2[5963],_mm_xor_si128(c2[5964],_mm_xor_si128(c2[1127],_mm_xor_si128(c2[5018],_mm_xor_si128(c2[4541],_mm_xor_si128(c2[3564],_mm_xor_si128(c2[7467],_mm_xor_si128(c2[6982],_mm_xor_si128(c2[1168],_mm_xor_si128(c2[3134],_mm_xor_si128(c2[5546],_mm_xor_si128(c2[3125],_mm_xor_si128(c2[6539],_mm_xor_si128(c2[1699],_mm_xor_si128(c2[1220],_mm_xor_si128(c2[7528],_mm_xor_si128(c2[4630],_mm_xor_si128(c2[2689],_mm_xor_si128(c2[1717],_mm_xor_si128(c2[2713],_mm_xor_si128(c2[5127],_mm_xor_si128(c2[4642],_mm_xor_si128(c2[5152],_mm_xor_si128(c2[5149],_mm_xor_si128(c2[2734],_mm_xor_si128(c2[823],_mm_xor_si128(c2[7115],_mm_xor_si128(c2[4688],_mm_xor_si128(c2[3745],_mm_xor_si128(c2[6654],_mm_xor_si128(c2[2782],_mm_xor_si128(c2[861],_mm_xor_si128(c2[7636],_mm_xor_si128(c2[1834],_mm_xor_si128(c2[6206],_mm_xor_si128(c2[7660],_mm_xor_si128(c2[5723],_mm_xor_si128(c2[1875],_mm_xor_si128(c2[2844],_mm_xor_si128(c2[7201],_mm_xor_si128(c2[5772],_mm_xor_si128(c2[1898],_mm_xor_si128(c2[2862],_mm_xor_si128(c2[4340],_mm_xor_si128(c2[5304],c2[2884])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[385]=simde_mm_xor_si128(c2[970],simde_mm_xor_si128(c2[4366],simde_mm_xor_si128(c2[6784],simde_mm_xor_si128(c2[6292],simde_mm_xor_si128(c2[992],simde_mm_xor_si128(c2[7289],simde_mm_xor_si128(c2[4862],simde_mm_xor_si128(c2[1964],simde_mm_xor_si128(c2[1501],simde_mm_xor_si128(c2[6344],simde_mm_xor_si128(c2[536],simde_mm_xor_si128(c2[2974],simde_mm_xor_si128(c2[5874],simde_mm_xor_si128(c2[6366],simde_mm_xor_si128(c2[6868],simde_mm_xor_si128(c2[4930],simde_mm_xor_si128(c2[1541],simde_mm_xor_si128(c2[5441],simde_mm_xor_si128(c2[3982],simde_mm_xor_si128(c2[5435],simde_mm_xor_si128(c2[621],simde_mm_xor_si128(c2[2552],simde_mm_xor_si128(c2[3523],simde_mm_xor_si128(c2[1102],simde_mm_xor_si128(c2[5963],simde_mm_xor_si128(c2[5964],simde_mm_xor_si128(c2[1127],simde_mm_xor_si128(c2[5018],simde_mm_xor_si128(c2[4541],simde_mm_xor_si128(c2[3564],simde_mm_xor_si128(c2[7467],simde_mm_xor_si128(c2[6982],simde_mm_xor_si128(c2[1168],simde_mm_xor_si128(c2[3134],simde_mm_xor_si128(c2[5546],simde_mm_xor_si128(c2[3125],simde_mm_xor_si128(c2[6539],simde_mm_xor_si128(c2[1699],simde_mm_xor_si128(c2[1220],simde_mm_xor_si128(c2[7528],simde_mm_xor_si128(c2[4630],simde_mm_xor_si128(c2[2689],simde_mm_xor_si128(c2[1717],simde_mm_xor_si128(c2[2713],simde_mm_xor_si128(c2[5127],simde_mm_xor_si128(c2[4642],simde_mm_xor_si128(c2[5152],simde_mm_xor_si128(c2[5149],simde_mm_xor_si128(c2[2734],simde_mm_xor_si128(c2[823],simde_mm_xor_si128(c2[7115],simde_mm_xor_si128(c2[4688],simde_mm_xor_si128(c2[3745],simde_mm_xor_si128(c2[6654],simde_mm_xor_si128(c2[2782],simde_mm_xor_si128(c2[861],simde_mm_xor_si128(c2[7636],simde_mm_xor_si128(c2[1834],simde_mm_xor_si128(c2[6206],simde_mm_xor_si128(c2[7660],simde_mm_xor_si128(c2[5723],simde_mm_xor_si128(c2[1875],simde_mm_xor_si128(c2[2844],simde_mm_xor_si128(c2[7201],simde_mm_xor_si128(c2[5772],simde_mm_xor_si128(c2[1898],simde_mm_xor_si128(c2[2862],simde_mm_xor_si128(c2[4340],simde_mm_xor_si128(c2[5304],c2[2884])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 36
-     d2[396]=_mm_xor_si128(c2[3396],_mm_xor_si128(c2[7568],_mm_xor_si128(c2[1784],c2[6204])));
+     d2[396]=simde_mm_xor_si128(c2[3396],simde_mm_xor_si128(c2[7568],simde_mm_xor_si128(c2[1784],c2[6204])));
 
 //row: 37
-     d2[407]=_mm_xor_si128(c2[5325],_mm_xor_si128(c2[5809],_mm_xor_si128(c2[1462],_mm_xor_si128(c2[3880],_mm_xor_si128(c2[3388],_mm_xor_si128(c2[5347],_mm_xor_si128(c2[5831],_mm_xor_si128(c2[4385],_mm_xor_si128(c2[1958],_mm_xor_si128(c2[7290],_mm_xor_si128(c2[5856],_mm_xor_si128(c2[6340],_mm_xor_si128(c2[3440],_mm_xor_si128(c2[5375],_mm_xor_si128(c2[7329],_mm_xor_si128(c2[70],_mm_xor_si128(c2[2970],_mm_xor_si128(c2[3462],_mm_xor_si128(c2[3964],_mm_xor_si128(c2[2026],_mm_xor_si128(c2[6380],_mm_xor_si128(c2[2053],_mm_xor_si128(c2[2537],_mm_xor_si128(c2[1078],_mm_xor_si128(c2[2531],_mm_xor_si128(c2[4976],_mm_xor_si128(c2[5460],_mm_xor_si128(c2[7402],_mm_xor_si128(c2[619],_mm_xor_si128(c2[3059],_mm_xor_si128(c2[3060],_mm_xor_si128(c2[5966],_mm_xor_si128(c2[2114],_mm_xor_si128(c2[1637],_mm_xor_si128(c2[660],_mm_xor_si128(c2[4079],_mm_xor_si128(c2[4563],_mm_xor_si128(c2[4078],_mm_xor_si128(c2[6007],_mm_xor_si128(c2[7489],_mm_xor_si128(c2[230],_mm_xor_si128(c2[2642],_mm_xor_si128(c2[221],_mm_xor_si128(c2[3151],_mm_xor_si128(c2[3635],_mm_xor_si128(c2[6538],_mm_xor_si128(c2[6059],_mm_xor_si128(c2[4140],_mm_xor_si128(c2[4624],_mm_xor_si128(c2[1726],_mm_xor_si128(c2[7528],_mm_xor_si128(c2[7068],_mm_xor_si128(c2[7552],_mm_xor_si128(c2[2223],_mm_xor_si128(c2[1738],_mm_xor_si128(c2[6097],_mm_xor_si128(c2[2248],_mm_xor_si128(c2[2245],_mm_xor_si128(c2[7573],_mm_xor_si128(c2[5178],_mm_xor_si128(c2[5662],_mm_xor_si128(c2[4211],_mm_xor_si128(c2[1784],_mm_xor_si128(c2[357],_mm_xor_si128(c2[841],_mm_xor_si128(c2[3750],_mm_xor_si128(c2[7621],_mm_xor_si128(c2[5700],_mm_xor_si128(c2[4732],_mm_xor_si128(c2[6673],_mm_xor_si128(c2[2818],_mm_xor_si128(c2[3302],_mm_xor_si128(c2[4756],_mm_xor_si128(c2[2819],_mm_xor_si128(c2[6230],_mm_xor_si128(c2[6714],_mm_xor_si128(c2[7683],_mm_xor_si128(c2[4297],_mm_xor_si128(c2[2384],_mm_xor_si128(c2[2868],_mm_xor_si128(c2[6737],_mm_xor_si128(c2[7701],_mm_xor_si128(c2[952],_mm_xor_si128(c2[1436],_mm_xor_si128(c2[2400],c2[7723])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[407]=simde_mm_xor_si128(c2[5325],simde_mm_xor_si128(c2[5809],simde_mm_xor_si128(c2[1462],simde_mm_xor_si128(c2[3880],simde_mm_xor_si128(c2[3388],simde_mm_xor_si128(c2[5347],simde_mm_xor_si128(c2[5831],simde_mm_xor_si128(c2[4385],simde_mm_xor_si128(c2[1958],simde_mm_xor_si128(c2[7290],simde_mm_xor_si128(c2[5856],simde_mm_xor_si128(c2[6340],simde_mm_xor_si128(c2[3440],simde_mm_xor_si128(c2[5375],simde_mm_xor_si128(c2[7329],simde_mm_xor_si128(c2[70],simde_mm_xor_si128(c2[2970],simde_mm_xor_si128(c2[3462],simde_mm_xor_si128(c2[3964],simde_mm_xor_si128(c2[2026],simde_mm_xor_si128(c2[6380],simde_mm_xor_si128(c2[2053],simde_mm_xor_si128(c2[2537],simde_mm_xor_si128(c2[1078],simde_mm_xor_si128(c2[2531],simde_mm_xor_si128(c2[4976],simde_mm_xor_si128(c2[5460],simde_mm_xor_si128(c2[7402],simde_mm_xor_si128(c2[619],simde_mm_xor_si128(c2[3059],simde_mm_xor_si128(c2[3060],simde_mm_xor_si128(c2[5966],simde_mm_xor_si128(c2[2114],simde_mm_xor_si128(c2[1637],simde_mm_xor_si128(c2[660],simde_mm_xor_si128(c2[4079],simde_mm_xor_si128(c2[4563],simde_mm_xor_si128(c2[4078],simde_mm_xor_si128(c2[6007],simde_mm_xor_si128(c2[7489],simde_mm_xor_si128(c2[230],simde_mm_xor_si128(c2[2642],simde_mm_xor_si128(c2[221],simde_mm_xor_si128(c2[3151],simde_mm_xor_si128(c2[3635],simde_mm_xor_si128(c2[6538],simde_mm_xor_si128(c2[6059],simde_mm_xor_si128(c2[4140],simde_mm_xor_si128(c2[4624],simde_mm_xor_si128(c2[1726],simde_mm_xor_si128(c2[7528],simde_mm_xor_si128(c2[7068],simde_mm_xor_si128(c2[7552],simde_mm_xor_si128(c2[2223],simde_mm_xor_si128(c2[1738],simde_mm_xor_si128(c2[6097],simde_mm_xor_si128(c2[2248],simde_mm_xor_si128(c2[2245],simde_mm_xor_si128(c2[7573],simde_mm_xor_si128(c2[5178],simde_mm_xor_si128(c2[5662],simde_mm_xor_si128(c2[4211],simde_mm_xor_si128(c2[1784],simde_mm_xor_si128(c2[357],simde_mm_xor_si128(c2[841],simde_mm_xor_si128(c2[3750],simde_mm_xor_si128(c2[7621],simde_mm_xor_si128(c2[5700],simde_mm_xor_si128(c2[4732],simde_mm_xor_si128(c2[6673],simde_mm_xor_si128(c2[2818],simde_mm_xor_si128(c2[3302],simde_mm_xor_si128(c2[4756],simde_mm_xor_si128(c2[2819],simde_mm_xor_si128(c2[6230],simde_mm_xor_si128(c2[6714],simde_mm_xor_si128(c2[7683],simde_mm_xor_si128(c2[4297],simde_mm_xor_si128(c2[2384],simde_mm_xor_si128(c2[2868],simde_mm_xor_si128(c2[6737],simde_mm_xor_si128(c2[7701],simde_mm_xor_si128(c2[952],simde_mm_xor_si128(c2[1436],simde_mm_xor_si128(c2[2400],c2[7723])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 38
-     d2[418]=_mm_xor_si128(c2[1946],_mm_xor_si128(c2[2135],_mm_xor_si128(c2[706],c2[3172])));
+     d2[418]=simde_mm_xor_si128(c2[1946],simde_mm_xor_si128(c2[2135],simde_mm_xor_si128(c2[706],c2[3172])));
 
 //row: 39
-     d2[429]=_mm_xor_si128(c2[5833],_mm_xor_si128(c2[3943],_mm_xor_si128(c2[2576],c2[424])));
+     d2[429]=simde_mm_xor_si128(c2[5833],simde_mm_xor_si128(c2[3943],simde_mm_xor_si128(c2[2576],c2[424])));
 
 //row: 40
-     d2[440]=_mm_xor_si128(c2[8],_mm_xor_si128(c2[6471],c2[3770]));
+     d2[440]=simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[6471],c2[3770]));
 
 //row: 41
-     d2[451]=_mm_xor_si128(c2[5353],_mm_xor_si128(c2[6362],_mm_xor_si128(c2[4555],c2[402])));
+     d2[451]=simde_mm_xor_si128(c2[5353],simde_mm_xor_si128(c2[6362],simde_mm_xor_si128(c2[4555],c2[402])));
 
 //row: 42
-     d2[462]=_mm_xor_si128(c2[3880],_mm_xor_si128(c2[7265],_mm_xor_si128(c2[1456],_mm_xor_si128(c2[1940],_mm_xor_si128(c2[975],_mm_xor_si128(c2[1459],_mm_xor_si128(c2[7],_mm_xor_si128(c2[3902],_mm_xor_si128(c2[1961],_mm_xor_si128(c2[2445],_mm_xor_si128(c2[7288],_mm_xor_si128(c2[29],_mm_xor_si128(c2[4400],_mm_xor_si128(c2[1500],_mm_xor_si128(c2[2951],_mm_xor_si128(c2[3435],_mm_xor_si128(c2[5884],_mm_xor_si128(c2[1041],_mm_xor_si128(c2[1038],_mm_xor_si128(c2[1522],_mm_xor_si128(c2[2024],_mm_xor_si128(c2[7356],_mm_xor_si128(c2[97],_mm_xor_si128(c2[3967],_mm_xor_si128(c2[4451],_mm_xor_si128(c2[1545],_mm_xor_si128(c2[597],_mm_xor_si128(c2[6892],_mm_xor_si128(c2[118],_mm_xor_si128(c2[602],_mm_xor_si128(c2[3520],_mm_xor_si128(c2[4978],_mm_xor_si128(c2[5462],_mm_xor_si128(c2[5949],_mm_xor_si128(c2[6433],_mm_xor_si128(c2[1130],_mm_xor_si128(c2[647],_mm_xor_si128(c2[1131],_mm_xor_si128(c2[3542],_mm_xor_si128(c2[4026],_mm_xor_si128(c2[185],_mm_xor_si128(c2[6956],_mm_xor_si128(c2[7440],_mm_xor_si128(c2[5990],_mm_xor_si128(c2[6474],_mm_xor_si128(c2[2623],_mm_xor_si128(c2[2138],_mm_xor_si128(c2[3594],_mm_xor_si128(c2[4078],_mm_xor_si128(c2[6033],_mm_xor_si128(c2[229],_mm_xor_si128(c2[713],_mm_xor_si128(c2[5551],_mm_xor_si128(c2[6035],_mm_xor_si128(c2[1695],_mm_xor_si128(c2[4598],_mm_xor_si128(c2[3635],_mm_xor_si128(c2[4119],_mm_xor_si128(c2[2684],_mm_xor_si128(c2[7529],_mm_xor_si128(c2[5104],_mm_xor_si128(c2[5588],_mm_xor_si128(c2[5612],_mm_xor_si128(c2[7553],_mm_xor_si128(c2[294],_mm_xor_si128(c2[7068],_mm_xor_si128(c2[7552],_mm_xor_si128(c2[308],_mm_xor_si128(c2[7575],_mm_xor_si128(c2[316],_mm_xor_si128(c2[5149],_mm_xor_si128(c2[5633],_mm_xor_si128(c2[3722],_mm_xor_si128(c2[2271],_mm_xor_si128(c2[7114],_mm_xor_si128(c2[7598],_mm_xor_si128(c2[6644],_mm_xor_si128(c2[1810],_mm_xor_si128(c2[5197],_mm_xor_si128(c2[5681],_mm_xor_si128(c2[3771],_mm_xor_si128(c2[2319],_mm_xor_si128(c2[2803],_mm_xor_si128(c2[4249],_mm_xor_si128(c2[4733],_mm_xor_si128(c2[1373],_mm_xor_si128(c2[2332],_mm_xor_si128(c2[2816],_mm_xor_si128(c2[406],_mm_xor_si128(c2[890],_mm_xor_si128(c2[4774],_mm_xor_si128(c2[5743],_mm_xor_si128(c2[1873],_mm_xor_si128(c2[2357],_mm_xor_si128(c2[928],_mm_xor_si128(c2[4313],_mm_xor_si128(c2[4797],_mm_xor_si128(c2[5288],_mm_xor_si128(c2[5772],_mm_xor_si128(c2[7239],_mm_xor_si128(c2[471],_mm_xor_si128(c2[5310],c2[5794]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[462]=simde_mm_xor_si128(c2[3880],simde_mm_xor_si128(c2[7265],simde_mm_xor_si128(c2[1456],simde_mm_xor_si128(c2[1940],simde_mm_xor_si128(c2[975],simde_mm_xor_si128(c2[1459],simde_mm_xor_si128(c2[7],simde_mm_xor_si128(c2[3902],simde_mm_xor_si128(c2[1961],simde_mm_xor_si128(c2[2445],simde_mm_xor_si128(c2[7288],simde_mm_xor_si128(c2[29],simde_mm_xor_si128(c2[4400],simde_mm_xor_si128(c2[1500],simde_mm_xor_si128(c2[2951],simde_mm_xor_si128(c2[3435],simde_mm_xor_si128(c2[5884],simde_mm_xor_si128(c2[1041],simde_mm_xor_si128(c2[1038],simde_mm_xor_si128(c2[1522],simde_mm_xor_si128(c2[2024],simde_mm_xor_si128(c2[7356],simde_mm_xor_si128(c2[97],simde_mm_xor_si128(c2[3967],simde_mm_xor_si128(c2[4451],simde_mm_xor_si128(c2[1545],simde_mm_xor_si128(c2[597],simde_mm_xor_si128(c2[6892],simde_mm_xor_si128(c2[118],simde_mm_xor_si128(c2[602],simde_mm_xor_si128(c2[3520],simde_mm_xor_si128(c2[4978],simde_mm_xor_si128(c2[5462],simde_mm_xor_si128(c2[5949],simde_mm_xor_si128(c2[6433],simde_mm_xor_si128(c2[1130],simde_mm_xor_si128(c2[647],simde_mm_xor_si128(c2[1131],simde_mm_xor_si128(c2[3542],simde_mm_xor_si128(c2[4026],simde_mm_xor_si128(c2[185],simde_mm_xor_si128(c2[6956],simde_mm_xor_si128(c2[7440],simde_mm_xor_si128(c2[5990],simde_mm_xor_si128(c2[6474],simde_mm_xor_si128(c2[2623],simde_mm_xor_si128(c2[2138],simde_mm_xor_si128(c2[3594],simde_mm_xor_si128(c2[4078],simde_mm_xor_si128(c2[6033],simde_mm_xor_si128(c2[229],simde_mm_xor_si128(c2[713],simde_mm_xor_si128(c2[5551],simde_mm_xor_si128(c2[6035],simde_mm_xor_si128(c2[1695],simde_mm_xor_si128(c2[4598],simde_mm_xor_si128(c2[3635],simde_mm_xor_si128(c2[4119],simde_mm_xor_si128(c2[2684],simde_mm_xor_si128(c2[7529],simde_mm_xor_si128(c2[5104],simde_mm_xor_si128(c2[5588],simde_mm_xor_si128(c2[5612],simde_mm_xor_si128(c2[7553],simde_mm_xor_si128(c2[294],simde_mm_xor_si128(c2[7068],simde_mm_xor_si128(c2[7552],simde_mm_xor_si128(c2[308],simde_mm_xor_si128(c2[7575],simde_mm_xor_si128(c2[316],simde_mm_xor_si128(c2[5149],simde_mm_xor_si128(c2[5633],simde_mm_xor_si128(c2[3722],simde_mm_xor_si128(c2[2271],simde_mm_xor_si128(c2[7114],simde_mm_xor_si128(c2[7598],simde_mm_xor_si128(c2[6644],simde_mm_xor_si128(c2[1810],simde_mm_xor_si128(c2[5197],simde_mm_xor_si128(c2[5681],simde_mm_xor_si128(c2[3771],simde_mm_xor_si128(c2[2319],simde_mm_xor_si128(c2[2803],simde_mm_xor_si128(c2[4249],simde_mm_xor_si128(c2[4733],simde_mm_xor_si128(c2[1373],simde_mm_xor_si128(c2[2332],simde_mm_xor_si128(c2[2816],simde_mm_xor_si128(c2[406],simde_mm_xor_si128(c2[890],simde_mm_xor_si128(c2[4774],simde_mm_xor_si128(c2[5743],simde_mm_xor_si128(c2[1873],simde_mm_xor_si128(c2[2357],simde_mm_xor_si128(c2[928],simde_mm_xor_si128(c2[4313],simde_mm_xor_si128(c2[4797],simde_mm_xor_si128(c2[5288],simde_mm_xor_si128(c2[5772],simde_mm_xor_si128(c2[7239],simde_mm_xor_si128(c2[471],simde_mm_xor_si128(c2[5310],c2[5794]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 43
-     d2[473]=_mm_xor_si128(c2[6782],_mm_xor_si128(c2[2424],_mm_xor_si128(c2[4842],_mm_xor_si128(c2[3877],_mm_xor_si128(c2[4361],_mm_xor_si128(c2[6804],_mm_xor_si128(c2[5347],_mm_xor_si128(c2[2447],_mm_xor_si128(c2[2931],_mm_xor_si128(c2[992],_mm_xor_si128(c2[7313],_mm_xor_si128(c2[4402],_mm_xor_si128(c2[6337],_mm_xor_si128(c2[1043],_mm_xor_si128(c2[3943],_mm_xor_si128(c2[3940],_mm_xor_si128(c2[4424],_mm_xor_si128(c2[4937],_mm_xor_si128(c2[2999],_mm_xor_si128(c2[6869],_mm_xor_si128(c2[7353],_mm_xor_si128(c2[3499],_mm_xor_si128(c2[2051],_mm_xor_si128(c2[3504],_mm_xor_si128(c2[6433],_mm_xor_si128(c2[621],_mm_xor_si128(c2[1108],_mm_xor_si128(c2[1592],_mm_xor_si128(c2[4032],_mm_xor_si128(c2[4033],_mm_xor_si128(c2[6455],_mm_xor_si128(c2[6939],_mm_xor_si128(c2[3087],_mm_xor_si128(c2[2599],_mm_xor_si128(c2[1149],_mm_xor_si128(c2[1633],_mm_xor_si128(c2[5525],_mm_xor_si128(c2[5040],_mm_xor_si128(c2[6980],_mm_xor_si128(c2[1192],_mm_xor_si128(c2[3615],_mm_xor_si128(c2[710],_mm_xor_si128(c2[1194],_mm_xor_si128(c2[4608],_mm_xor_si128(c2[7511],_mm_xor_si128(c2[6537],_mm_xor_si128(c2[7021],_mm_xor_si128(c2[5597],_mm_xor_si128(c2[2688],_mm_xor_si128(c2[274],_mm_xor_si128(c2[758],_mm_xor_si128(c2[771],_mm_xor_si128(c2[3196],_mm_xor_si128(c2[2227],_mm_xor_si128(c2[2711],_mm_xor_si128(c2[3221],_mm_xor_si128(c2[3218],_mm_xor_si128(c2[308],_mm_xor_si128(c2[792],_mm_xor_si128(c2[6624],_mm_xor_si128(c2[5173],_mm_xor_si128(c2[2757],_mm_xor_si128(c2[1814],_mm_xor_si128(c2[4712],_mm_xor_si128(c2[356],_mm_xor_si128(c2[840],_mm_xor_si128(c2[1804],_mm_xor_si128(c2[6673],_mm_xor_si128(c2[5705],_mm_xor_si128(c2[7151],_mm_xor_si128(c2[7635],_mm_xor_si128(c2[4275],_mm_xor_si128(c2[5729],_mm_xor_si128(c2[3308],_mm_xor_si128(c2[3792],_mm_xor_si128(c2[2819],_mm_xor_si128(c2[7687],_mm_xor_si128(c2[902],_mm_xor_si128(c2[5259],_mm_xor_si128(c2[3830],_mm_xor_si128(c2[7710],_mm_xor_si128(c2[447],_mm_xor_si128(c2[931],_mm_xor_si128(c2[2398],_mm_xor_si128(c2[3373],_mm_xor_si128(c2[469],c2[953]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[473]=simde_mm_xor_si128(c2[6782],simde_mm_xor_si128(c2[2424],simde_mm_xor_si128(c2[4842],simde_mm_xor_si128(c2[3877],simde_mm_xor_si128(c2[4361],simde_mm_xor_si128(c2[6804],simde_mm_xor_si128(c2[5347],simde_mm_xor_si128(c2[2447],simde_mm_xor_si128(c2[2931],simde_mm_xor_si128(c2[992],simde_mm_xor_si128(c2[7313],simde_mm_xor_si128(c2[4402],simde_mm_xor_si128(c2[6337],simde_mm_xor_si128(c2[1043],simde_mm_xor_si128(c2[3943],simde_mm_xor_si128(c2[3940],simde_mm_xor_si128(c2[4424],simde_mm_xor_si128(c2[4937],simde_mm_xor_si128(c2[2999],simde_mm_xor_si128(c2[6869],simde_mm_xor_si128(c2[7353],simde_mm_xor_si128(c2[3499],simde_mm_xor_si128(c2[2051],simde_mm_xor_si128(c2[3504],simde_mm_xor_si128(c2[6433],simde_mm_xor_si128(c2[621],simde_mm_xor_si128(c2[1108],simde_mm_xor_si128(c2[1592],simde_mm_xor_si128(c2[4032],simde_mm_xor_si128(c2[4033],simde_mm_xor_si128(c2[6455],simde_mm_xor_si128(c2[6939],simde_mm_xor_si128(c2[3087],simde_mm_xor_si128(c2[2599],simde_mm_xor_si128(c2[1149],simde_mm_xor_si128(c2[1633],simde_mm_xor_si128(c2[5525],simde_mm_xor_si128(c2[5040],simde_mm_xor_si128(c2[6980],simde_mm_xor_si128(c2[1192],simde_mm_xor_si128(c2[3615],simde_mm_xor_si128(c2[710],simde_mm_xor_si128(c2[1194],simde_mm_xor_si128(c2[4608],simde_mm_xor_si128(c2[7511],simde_mm_xor_si128(c2[6537],simde_mm_xor_si128(c2[7021],simde_mm_xor_si128(c2[5597],simde_mm_xor_si128(c2[2688],simde_mm_xor_si128(c2[274],simde_mm_xor_si128(c2[758],simde_mm_xor_si128(c2[771],simde_mm_xor_si128(c2[3196],simde_mm_xor_si128(c2[2227],simde_mm_xor_si128(c2[2711],simde_mm_xor_si128(c2[3221],simde_mm_xor_si128(c2[3218],simde_mm_xor_si128(c2[308],simde_mm_xor_si128(c2[792],simde_mm_xor_si128(c2[6624],simde_mm_xor_si128(c2[5173],simde_mm_xor_si128(c2[2757],simde_mm_xor_si128(c2[1814],simde_mm_xor_si128(c2[4712],simde_mm_xor_si128(c2[356],simde_mm_xor_si128(c2[840],simde_mm_xor_si128(c2[1804],simde_mm_xor_si128(c2[6673],simde_mm_xor_si128(c2[5705],simde_mm_xor_si128(c2[7151],simde_mm_xor_si128(c2[7635],simde_mm_xor_si128(c2[4275],simde_mm_xor_si128(c2[5729],simde_mm_xor_si128(c2[3308],simde_mm_xor_si128(c2[3792],simde_mm_xor_si128(c2[2819],simde_mm_xor_si128(c2[7687],simde_mm_xor_si128(c2[902],simde_mm_xor_si128(c2[5259],simde_mm_xor_si128(c2[3830],simde_mm_xor_si128(c2[7710],simde_mm_xor_si128(c2[447],simde_mm_xor_si128(c2[931],simde_mm_xor_si128(c2[2398],simde_mm_xor_si128(c2[3373],simde_mm_xor_si128(c2[469],c2[953]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 44
-     d2[484]=_mm_xor_si128(c2[2906],_mm_xor_si128(c2[6302],_mm_xor_si128(c2[977],_mm_xor_si128(c2[485],_mm_xor_si128(c2[7264],_mm_xor_si128(c2[2928],_mm_xor_si128(c2[1482],_mm_xor_si128(c2[6798],_mm_xor_si128(c2[3437],_mm_xor_si128(c2[537],_mm_xor_si128(c2[2472],_mm_xor_si128(c2[4910],_mm_xor_si128(c2[67],_mm_xor_si128(c2[559],_mm_xor_si128(c2[1061],_mm_xor_si128(c2[6866],_mm_xor_si128(c2[3477],_mm_xor_si128(c2[7377],_mm_xor_si128(c2[5918],_mm_xor_si128(c2[7371],_mm_xor_si128(c2[2557],_mm_xor_si128(c2[4488],_mm_xor_si128(c2[5459],_mm_xor_si128(c2[156],_mm_xor_si128(c2[157],_mm_xor_si128(c2[3063],_mm_xor_si128(c2[2094],_mm_xor_si128(c2[6954],_mm_xor_si128(c2[6477],_mm_xor_si128(c2[5500],_mm_xor_si128(c2[1660],_mm_xor_si128(c2[1175],_mm_xor_si128(c2[3104],_mm_xor_si128(c2[2625],_mm_xor_si128(c2[5070],_mm_xor_si128(c2[7482],_mm_xor_si128(c2[5061],_mm_xor_si128(c2[732],_mm_xor_si128(c2[3635],_mm_xor_si128(c2[3156],_mm_xor_si128(c2[1721],_mm_xor_si128(c2[6566],_mm_xor_si128(c2[4625],_mm_xor_si128(c2[4649],_mm_xor_si128(c2[7063],_mm_xor_si128(c2[6578],_mm_xor_si128(c2[7088],_mm_xor_si128(c2[7085],_mm_xor_si128(c2[4670],_mm_xor_si128(c2[2759],_mm_xor_si128(c2[1308],_mm_xor_si128(c2[6624],_mm_xor_si128(c2[5681],_mm_xor_si128(c2[836],_mm_xor_si128(c2[4718],_mm_xor_si128(c2[2797],_mm_xor_si128(c2[1829],_mm_xor_si128(c2[3770],_mm_xor_si128(c2[399],_mm_xor_si128(c2[1853],_mm_xor_si128(c2[7659],_mm_xor_si128(c2[3811],_mm_xor_si128(c2[4780],_mm_xor_si128(c2[1394],_mm_xor_si128(c2[7708],_mm_xor_si128(c2[3834],_mm_xor_si128(c2[4798],_mm_xor_si128(c2[6276],_mm_xor_si128(c2[7240],c2[4820])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[484]=simde_mm_xor_si128(c2[2906],simde_mm_xor_si128(c2[6302],simde_mm_xor_si128(c2[977],simde_mm_xor_si128(c2[485],simde_mm_xor_si128(c2[7264],simde_mm_xor_si128(c2[2928],simde_mm_xor_si128(c2[1482],simde_mm_xor_si128(c2[6798],simde_mm_xor_si128(c2[3437],simde_mm_xor_si128(c2[537],simde_mm_xor_si128(c2[2472],simde_mm_xor_si128(c2[4910],simde_mm_xor_si128(c2[67],simde_mm_xor_si128(c2[559],simde_mm_xor_si128(c2[1061],simde_mm_xor_si128(c2[6866],simde_mm_xor_si128(c2[3477],simde_mm_xor_si128(c2[7377],simde_mm_xor_si128(c2[5918],simde_mm_xor_si128(c2[7371],simde_mm_xor_si128(c2[2557],simde_mm_xor_si128(c2[4488],simde_mm_xor_si128(c2[5459],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[157],simde_mm_xor_si128(c2[3063],simde_mm_xor_si128(c2[2094],simde_mm_xor_si128(c2[6954],simde_mm_xor_si128(c2[6477],simde_mm_xor_si128(c2[5500],simde_mm_xor_si128(c2[1660],simde_mm_xor_si128(c2[1175],simde_mm_xor_si128(c2[3104],simde_mm_xor_si128(c2[2625],simde_mm_xor_si128(c2[5070],simde_mm_xor_si128(c2[7482],simde_mm_xor_si128(c2[5061],simde_mm_xor_si128(c2[732],simde_mm_xor_si128(c2[3635],simde_mm_xor_si128(c2[3156],simde_mm_xor_si128(c2[1721],simde_mm_xor_si128(c2[6566],simde_mm_xor_si128(c2[4625],simde_mm_xor_si128(c2[4649],simde_mm_xor_si128(c2[7063],simde_mm_xor_si128(c2[6578],simde_mm_xor_si128(c2[7088],simde_mm_xor_si128(c2[7085],simde_mm_xor_si128(c2[4670],simde_mm_xor_si128(c2[2759],simde_mm_xor_si128(c2[1308],simde_mm_xor_si128(c2[6624],simde_mm_xor_si128(c2[5681],simde_mm_xor_si128(c2[836],simde_mm_xor_si128(c2[4718],simde_mm_xor_si128(c2[2797],simde_mm_xor_si128(c2[1829],simde_mm_xor_si128(c2[3770],simde_mm_xor_si128(c2[399],simde_mm_xor_si128(c2[1853],simde_mm_xor_si128(c2[7659],simde_mm_xor_si128(c2[3811],simde_mm_xor_si128(c2[4780],simde_mm_xor_si128(c2[1394],simde_mm_xor_si128(c2[7708],simde_mm_xor_si128(c2[3834],simde_mm_xor_si128(c2[4798],simde_mm_xor_si128(c2[6276],simde_mm_xor_si128(c2[7240],c2[4820])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 45
-     d2[495]=_mm_xor_si128(c2[995],_mm_xor_si128(c2[1588],c2[5547]));
+     d2[495]=simde_mm_xor_si128(c2[995],simde_mm_xor_si128(c2[1588],c2[5547]));
   }
 }
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc192_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc192_byte.c
index 1dfdfa3d9bb3b0ba0f433681d9df65fabdb797a8..32b7f6c19dfedc8c3f82847876b1655a98d25f68 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc192_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc192_byte.c
@@ -1,9 +1,10 @@
+#ifdef __AVX2__
 #include "PHY/sse_intrin.h"
 // generated code for Zc=192, byte encoding
 static inline void ldpc192_byte(uint8_t *c,uint8_t *d) {
-  __m256i *csimd=(__m256i *)c,*dsimd=(__m256i *)d;
+  simde__m256i *csimd=(simde__m256i *)c,*dsimd=(simde__m256i *)d;
 
-  __m256i *c2,*d2;
+  simde__m256i *c2,*d2;
 
   int i2;
   for (i2=0; i2<6; i2++) {
@@ -149,3 +150,4 @@ static inline void ldpc192_byte(uint8_t *c,uint8_t *d) {
      d2[270]=simde_mm256_xor_si256(c2[1864],simde_mm256_xor_si256(c2[5620],c2[4080]));
   }
 }
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc192_byte_128.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc192_byte_128.c
new file mode 100644
index 0000000000000000000000000000000000000000..72268702680a703df03562772d5d99c0beb4c8f2
--- /dev/null
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc192_byte_128.c
@@ -0,0 +1,153 @@
+#ifndef __AVX2__
+#include "PHY/sse_intrin.h"
+// generated code for Zc=192, byte encoding
+static inline void ldpc192_byte(uint8_t *c,uint8_t *d) {
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
+
+  simde__m128i *c2,*d2;
+
+  int i2;
+  for (i2=0; i2<12; i2++) {
+     c2=&csimd[i2];
+     d2=&dsimd[i2];
+
+//row: 0
+     d2[0]=simde_mm_xor_si128(c2[1591],simde_mm_xor_si128(c2[6340],simde_mm_xor_si128(c2[6864],simde_mm_xor_si128(c2[2117],simde_mm_xor_si128(c2[1609],simde_mm_xor_si128(c2[5307],simde_mm_xor_si128(c2[3725],simde_mm_xor_si128(c2[1107],simde_mm_xor_si128(c2[6388],simde_mm_xor_si128(c2[4280],simde_mm_xor_si128(c2[611],simde_mm_xor_si128(c2[4828],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[102],simde_mm_xor_si128(c2[6440],simde_mm_xor_si128(c2[1685],simde_mm_xor_si128(c2[2771],simde_mm_xor_si128(c2[129],simde_mm_xor_si128(c2[124],simde_mm_xor_si128(c2[4369],simde_mm_xor_si128(c2[682],simde_mm_xor_si128(c2[3840],simde_mm_xor_si128(c2[5984],simde_mm_xor_si128(c2[5980],simde_mm_xor_si128(c2[4929],simde_mm_xor_si128(c2[6008],simde_mm_xor_si128(c2[202],simde_mm_xor_si128(c2[4419],simde_mm_xor_si128(c2[7087],simde_mm_xor_si128(c2[1283],simde_mm_xor_si128(c2[8139],simde_mm_xor_si128(c2[246],simde_mm_xor_si128(c2[776],simde_mm_xor_si128(c2[2360],simde_mm_xor_si128(c2[7134],simde_mm_xor_si128(c2[3966],simde_mm_xor_si128(c2[799],simde_mm_xor_si128(c2[817],simde_mm_xor_si128(c2[3465],simde_mm_xor_si128(c2[3986],simde_mm_xor_si128(c2[2962],simde_mm_xor_si128(c2[4536],simde_mm_xor_si128(c2[2961],simde_mm_xor_si128(c2[5089],simde_mm_xor_si128(c2[4565],simde_mm_xor_si128(c2[2449],simde_mm_xor_si128(c2[4057],simde_mm_xor_si128(c2[1950],simde_mm_xor_si128(c2[3003],simde_mm_xor_si128(c2[5670],simde_mm_xor_si128(c2[1450],simde_mm_xor_si128(c2[391],simde_mm_xor_si128(c2[1471],simde_mm_xor_si128(c2[2000],simde_mm_xor_si128(c2[6750],simde_mm_xor_si128(c2[1491],simde_mm_xor_si128(c2[435],simde_mm_xor_si128(c2[8356],simde_mm_xor_si128(c2[2579],simde_mm_xor_si128(c2[6272],simde_mm_xor_si128(c2[7320],simde_mm_xor_si128(c2[5768],simde_mm_xor_si128(c2[7344],simde_mm_xor_si128(c2[4178],simde_mm_xor_si128(c2[5793],simde_mm_xor_si128(c2[511],c2[3154]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 1
+     d2[12]=simde_mm_xor_si128(c2[1591],simde_mm_xor_si128(c2[2119],simde_mm_xor_si128(c2[6868],simde_mm_xor_si128(c2[7392],simde_mm_xor_si128(c2[2645],simde_mm_xor_si128(c2[1609],simde_mm_xor_si128(c2[2137],simde_mm_xor_si128(c2[5835],simde_mm_xor_si128(c2[4253],simde_mm_xor_si128(c2[1107],simde_mm_xor_si128(c2[1635],simde_mm_xor_si128(c2[6916],simde_mm_xor_si128(c2[4808],simde_mm_xor_si128(c2[611],simde_mm_xor_si128(c2[1139],simde_mm_xor_si128(c2[5356],simde_mm_xor_si128(c2[600],simde_mm_xor_si128(c2[630],simde_mm_xor_si128(c2[6968],simde_mm_xor_si128(c2[2213],simde_mm_xor_si128(c2[2771],simde_mm_xor_si128(c2[3299],simde_mm_xor_si128(c2[657],simde_mm_xor_si128(c2[652],simde_mm_xor_si128(c2[4369],simde_mm_xor_si128(c2[4897],simde_mm_xor_si128(c2[1210],simde_mm_xor_si128(c2[4368],simde_mm_xor_si128(c2[6512],simde_mm_xor_si128(c2[6508],simde_mm_xor_si128(c2[5457],simde_mm_xor_si128(c2[6536],simde_mm_xor_si128(c2[730],simde_mm_xor_si128(c2[4947],simde_mm_xor_si128(c2[7087],simde_mm_xor_si128(c2[7615],simde_mm_xor_si128(c2[1811],simde_mm_xor_si128(c2[220],simde_mm_xor_si128(c2[246],simde_mm_xor_si128(c2[774],simde_mm_xor_si128(c2[1304],simde_mm_xor_si128(c2[2888],simde_mm_xor_si128(c2[7134],simde_mm_xor_si128(c2[7662],simde_mm_xor_si128(c2[4494],simde_mm_xor_si128(c2[1327],simde_mm_xor_si128(c2[817],simde_mm_xor_si128(c2[1345],simde_mm_xor_si128(c2[3993],simde_mm_xor_si128(c2[4514],simde_mm_xor_si128(c2[2962],simde_mm_xor_si128(c2[3490],simde_mm_xor_si128(c2[5064],simde_mm_xor_si128(c2[3489],simde_mm_xor_si128(c2[5617],simde_mm_xor_si128(c2[5093],simde_mm_xor_si128(c2[2977],simde_mm_xor_si128(c2[4057],simde_mm_xor_si128(c2[4585],simde_mm_xor_si128(c2[2478],simde_mm_xor_si128(c2[3531],simde_mm_xor_si128(c2[5670],simde_mm_xor_si128(c2[6198],simde_mm_xor_si128(c2[1978],simde_mm_xor_si128(c2[919],simde_mm_xor_si128(c2[1999],simde_mm_xor_si128(c2[2528],simde_mm_xor_si128(c2[7278],simde_mm_xor_si128(c2[1491],simde_mm_xor_si128(c2[2019],simde_mm_xor_si128(c2[963],simde_mm_xor_si128(c2[437],simde_mm_xor_si128(c2[2579],simde_mm_xor_si128(c2[3107],simde_mm_xor_si128(c2[6800],simde_mm_xor_si128(c2[7848],simde_mm_xor_si128(c2[5768],simde_mm_xor_si128(c2[6296],simde_mm_xor_si128(c2[7872],simde_mm_xor_si128(c2[4706],simde_mm_xor_si128(c2[5793],simde_mm_xor_si128(c2[6321],simde_mm_xor_si128(c2[1039],c2[3682])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 2
+     d2[24]=simde_mm_xor_si128(c2[2119],simde_mm_xor_si128(c2[6868],simde_mm_xor_si128(c2[6864],simde_mm_xor_si128(c2[7392],simde_mm_xor_si128(c2[2117],simde_mm_xor_si128(c2[2645],simde_mm_xor_si128(c2[2137],simde_mm_xor_si128(c2[5307],simde_mm_xor_si128(c2[5835],simde_mm_xor_si128(c2[3725],simde_mm_xor_si128(c2[4253],simde_mm_xor_si128(c2[1635],simde_mm_xor_si128(c2[6916],simde_mm_xor_si128(c2[4280],simde_mm_xor_si128(c2[4808],simde_mm_xor_si128(c2[1139],simde_mm_xor_si128(c2[5356],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[600],simde_mm_xor_si128(c2[630],simde_mm_xor_si128(c2[6440],simde_mm_xor_si128(c2[6968],simde_mm_xor_si128(c2[1685],simde_mm_xor_si128(c2[2213],simde_mm_xor_si128(c2[3299],simde_mm_xor_si128(c2[657],simde_mm_xor_si128(c2[124],simde_mm_xor_si128(c2[652],simde_mm_xor_si128(c2[4897],simde_mm_xor_si128(c2[682],simde_mm_xor_si128(c2[1210],simde_mm_xor_si128(c2[3840],simde_mm_xor_si128(c2[4368],simde_mm_xor_si128(c2[6512],simde_mm_xor_si128(c2[5980],simde_mm_xor_si128(c2[6508],simde_mm_xor_si128(c2[4929],simde_mm_xor_si128(c2[5457],simde_mm_xor_si128(c2[6536],simde_mm_xor_si128(c2[202],simde_mm_xor_si128(c2[730],simde_mm_xor_si128(c2[4419],simde_mm_xor_si128(c2[4947],simde_mm_xor_si128(c2[7615],simde_mm_xor_si128(c2[1811],simde_mm_xor_si128(c2[8139],simde_mm_xor_si128(c2[220],simde_mm_xor_si128(c2[774],simde_mm_xor_si128(c2[776],simde_mm_xor_si128(c2[1304],simde_mm_xor_si128(c2[2360],simde_mm_xor_si128(c2[2888],simde_mm_xor_si128(c2[7662],simde_mm_xor_si128(c2[4494],simde_mm_xor_si128(c2[799],simde_mm_xor_si128(c2[1327],simde_mm_xor_si128(c2[1345],simde_mm_xor_si128(c2[3993],simde_mm_xor_si128(c2[3986],simde_mm_xor_si128(c2[4514],simde_mm_xor_si128(c2[3490],simde_mm_xor_si128(c2[4536],simde_mm_xor_si128(c2[5064],simde_mm_xor_si128(c2[2961],simde_mm_xor_si128(c2[3489],simde_mm_xor_si128(c2[5617],simde_mm_xor_si128(c2[4565],simde_mm_xor_si128(c2[5093],simde_mm_xor_si128(c2[2449],simde_mm_xor_si128(c2[2977],simde_mm_xor_si128(c2[4585],simde_mm_xor_si128(c2[2478],simde_mm_xor_si128(c2[3003],simde_mm_xor_si128(c2[3531],simde_mm_xor_si128(c2[6198],simde_mm_xor_si128(c2[1978],simde_mm_xor_si128(c2[391],simde_mm_xor_si128(c2[919],simde_mm_xor_si128(c2[1999],simde_mm_xor_si128(c2[2000],simde_mm_xor_si128(c2[2528],simde_mm_xor_si128(c2[6750],simde_mm_xor_si128(c2[7278],simde_mm_xor_si128(c2[2019],simde_mm_xor_si128(c2[435],simde_mm_xor_si128(c2[963],simde_mm_xor_si128(c2[8356],simde_mm_xor_si128(c2[437],simde_mm_xor_si128(c2[3107],simde_mm_xor_si128(c2[6800],simde_mm_xor_si128(c2[7320],simde_mm_xor_si128(c2[7848],simde_mm_xor_si128(c2[6296],simde_mm_xor_si128(c2[7344],simde_mm_xor_si128(c2[7872],simde_mm_xor_si128(c2[4178],simde_mm_xor_si128(c2[4706],simde_mm_xor_si128(c2[6321],simde_mm_xor_si128(c2[1039],simde_mm_xor_si128(c2[3154],c2[3682]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 3
+     d2[36]=simde_mm_xor_si128(c2[2119],simde_mm_xor_si128(c2[6868],simde_mm_xor_si128(c2[7392],simde_mm_xor_si128(c2[2117],simde_mm_xor_si128(c2[2645],simde_mm_xor_si128(c2[2137],simde_mm_xor_si128(c2[5835],simde_mm_xor_si128(c2[3725],simde_mm_xor_si128(c2[4253],simde_mm_xor_si128(c2[1635],simde_mm_xor_si128(c2[6916],simde_mm_xor_si128(c2[4808],simde_mm_xor_si128(c2[1139],simde_mm_xor_si128(c2[5356],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[600],simde_mm_xor_si128(c2[630],simde_mm_xor_si128(c2[6968],simde_mm_xor_si128(c2[1685],simde_mm_xor_si128(c2[2213],simde_mm_xor_si128(c2[3299],simde_mm_xor_si128(c2[657],simde_mm_xor_si128(c2[652],simde_mm_xor_si128(c2[4897],simde_mm_xor_si128(c2[1210],simde_mm_xor_si128(c2[3840],simde_mm_xor_si128(c2[4368],simde_mm_xor_si128(c2[6512],simde_mm_xor_si128(c2[6508],simde_mm_xor_si128(c2[4929],simde_mm_xor_si128(c2[5457],simde_mm_xor_si128(c2[6536],simde_mm_xor_si128(c2[730],simde_mm_xor_si128(c2[4419],simde_mm_xor_si128(c2[4947],simde_mm_xor_si128(c2[7615],simde_mm_xor_si128(c2[1811],simde_mm_xor_si128(c2[220],simde_mm_xor_si128(c2[774],simde_mm_xor_si128(c2[1304],simde_mm_xor_si128(c2[2360],simde_mm_xor_si128(c2[2888],simde_mm_xor_si128(c2[7662],simde_mm_xor_si128(c2[4494],simde_mm_xor_si128(c2[799],simde_mm_xor_si128(c2[1327],simde_mm_xor_si128(c2[1345],simde_mm_xor_si128(c2[3993],simde_mm_xor_si128(c2[3986],simde_mm_xor_si128(c2[4514],simde_mm_xor_si128(c2[3490],simde_mm_xor_si128(c2[5064],simde_mm_xor_si128(c2[2961],simde_mm_xor_si128(c2[3489],simde_mm_xor_si128(c2[5617],simde_mm_xor_si128(c2[5093],simde_mm_xor_si128(c2[2449],simde_mm_xor_si128(c2[2977],simde_mm_xor_si128(c2[4585],simde_mm_xor_si128(c2[2478],simde_mm_xor_si128(c2[3531],simde_mm_xor_si128(c2[6198],simde_mm_xor_si128(c2[1978],simde_mm_xor_si128(c2[391],simde_mm_xor_si128(c2[919],simde_mm_xor_si128(c2[1999],simde_mm_xor_si128(c2[2528],simde_mm_xor_si128(c2[6750],simde_mm_xor_si128(c2[7278],simde_mm_xor_si128(c2[2019],simde_mm_xor_si128(c2[963],simde_mm_xor_si128(c2[8356],simde_mm_xor_si128(c2[437],simde_mm_xor_si128(c2[3107],simde_mm_xor_si128(c2[6800],simde_mm_xor_si128(c2[7848],simde_mm_xor_si128(c2[6296],simde_mm_xor_si128(c2[7872],simde_mm_xor_si128(c2[4178],simde_mm_xor_si128(c2[4706],simde_mm_xor_si128(c2[6321],simde_mm_xor_si128(c2[1039],simde_mm_xor_si128(c2[3154],c2[3682])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 4
+     d2[48]=simde_mm_xor_si128(c2[6344],c2[2675]);
+
+//row: 5
+     d2[60]=simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[4754],simde_mm_xor_si128(c2[5290],simde_mm_xor_si128(c2[531],simde_mm_xor_si128(c2[1584],simde_mm_xor_si128(c2[35],simde_mm_xor_si128(c2[3721],simde_mm_xor_si128(c2[2139],simde_mm_xor_si128(c2[7416],simde_mm_xor_si128(c2[7968],simde_mm_xor_si128(c2[4802],simde_mm_xor_si128(c2[2694],simde_mm_xor_si128(c2[7472],simde_mm_xor_si128(c2[3242],simde_mm_xor_si128(c2[6945],simde_mm_xor_si128(c2[1663],simde_mm_xor_si128(c2[6963],simde_mm_xor_si128(c2[4854],simde_mm_xor_si128(c2[99],simde_mm_xor_si128(c2[1185],simde_mm_xor_si128(c2[6990],simde_mm_xor_si128(c2[6985],simde_mm_xor_si128(c2[2795],simde_mm_xor_si128(c2[7543],simde_mm_xor_si128(c2[2266],simde_mm_xor_si128(c2[4398],simde_mm_xor_si128(c2[4394],simde_mm_xor_si128(c2[3343],simde_mm_xor_si128(c2[4422],simde_mm_xor_si128(c2[7063],simde_mm_xor_si128(c2[2833],simde_mm_xor_si128(c2[5501],simde_mm_xor_si128(c2[8144],simde_mm_xor_si128(c2[6553],simde_mm_xor_si128(c2[7107],simde_mm_xor_si128(c2[7637],simde_mm_xor_si128(c2[774],simde_mm_xor_si128(c2[5548],simde_mm_xor_si128(c2[2380],simde_mm_xor_si128(c2[7660],simde_mm_xor_si128(c2[7690],simde_mm_xor_si128(c2[1879],simde_mm_xor_si128(c2[2400],simde_mm_xor_si128(c2[3466],simde_mm_xor_si128(c2[1376],simde_mm_xor_si128(c2[2962],simde_mm_xor_si128(c2[1375],simde_mm_xor_si128(c2[3515],simde_mm_xor_si128(c2[2979],simde_mm_xor_si128(c2[875],simde_mm_xor_si128(c2[2483],simde_mm_xor_si128(c2[364],simde_mm_xor_si128(c2[1417],simde_mm_xor_si128(c2[4084],simde_mm_xor_si128(c2[8311],simde_mm_xor_si128(c2[7252],simde_mm_xor_si128(c2[915],simde_mm_xor_si128(c2[8332],simde_mm_xor_si128(c2[414],simde_mm_xor_si128(c2[5164],simde_mm_xor_si128(c2[8352],simde_mm_xor_si128(c2[7296],simde_mm_xor_si128(c2[6770],simde_mm_xor_si128(c2[993],simde_mm_xor_si128(c2[4686],simde_mm_xor_si128(c2[5746],simde_mm_xor_si128(c2[4182],simde_mm_xor_si128(c2[5770],simde_mm_xor_si128(c2[2592],simde_mm_xor_si128(c2[4207],simde_mm_xor_si128(c2[7372],simde_mm_xor_si128(c2[1568],c2[2091]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 6
+     d2[72]=simde_mm_xor_si128(c2[3173],simde_mm_xor_si128(c2[676],simde_mm_xor_si128(c2[768],simde_mm_xor_si128(c2[8193],simde_mm_xor_si128(c2[6653],simde_mm_xor_si128(c2[7275],simde_mm_xor_si128(c2[1489],c2[1010])))))));
+
+//row: 7
+     d2[84]=simde_mm_xor_si128(c2[4752],simde_mm_xor_si128(c2[7419],simde_mm_xor_si128(c2[6439],simde_mm_xor_si128(c2[7040],simde_mm_xor_si128(c2[1254],c2[1399])))));
+
+//row: 8
+     d2[96]=simde_mm_xor_si128(c2[3179],simde_mm_xor_si128(c2[7397],simde_mm_xor_si128(c2[7928],simde_mm_xor_si128(c2[3699],simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[3707],simde_mm_xor_si128(c2[4235],simde_mm_xor_si128(c2[3705],simde_mm_xor_si128(c2[7395],simde_mm_xor_si128(c2[7923],simde_mm_xor_si128(c2[1591],simde_mm_xor_si128(c2[3197],simde_mm_xor_si128(c2[7427],simde_mm_xor_si128(c2[6895],simde_mm_xor_si128(c2[2138],simde_mm_xor_si128(c2[2666],simde_mm_xor_si128(c2[5313],simde_mm_xor_si128(c2[556],simde_mm_xor_si128(c2[1084],simde_mm_xor_si128(c2[1619],simde_mm_xor_si128(c2[2695],simde_mm_xor_si128(c2[6913],simde_mm_xor_si128(c2[7976],simde_mm_xor_si128(c2[3747],simde_mm_xor_si128(c2[5856],simde_mm_xor_si128(c2[1111],simde_mm_xor_si128(c2[1639],simde_mm_xor_si128(c2[2187],simde_mm_xor_si128(c2[6417],simde_mm_xor_si128(c2[6416],simde_mm_xor_si128(c2[2187],simde_mm_xor_si128(c2[1660],simde_mm_xor_si128(c2[5362],simde_mm_xor_si128(c2[5890],simde_mm_xor_si128(c2[2722],simde_mm_xor_si128(c2[1690],simde_mm_xor_si128(c2[5908],simde_mm_xor_si128(c2[8016],simde_mm_xor_si128(c2[3271],simde_mm_xor_si128(c2[3799],simde_mm_xor_si128(c2[3273],simde_mm_xor_si128(c2[6963],simde_mm_xor_si128(c2[7491],simde_mm_xor_si128(c2[4347],simde_mm_xor_si128(c2[130],simde_mm_xor_si128(c2[1705],simde_mm_xor_si128(c2[5935],simde_mm_xor_si128(c2[1712],simde_mm_xor_si128(c2[5402],simde_mm_xor_si128(c2[5930],simde_mm_xor_si128(c2[5957],simde_mm_xor_si128(c2[1728],simde_mm_xor_si128(c2[2258],simde_mm_xor_si128(c2[5960],simde_mm_xor_si128(c2[6488],simde_mm_xor_si128(c2[5428],simde_mm_xor_si128(c2[683],simde_mm_xor_si128(c2[1211],simde_mm_xor_si128(c2[7560],simde_mm_xor_si128(c2[3343],simde_mm_xor_si128(c2[7568],simde_mm_xor_si128(c2[2811],simde_mm_xor_si128(c2[3339],simde_mm_xor_si128(c2[6505],simde_mm_xor_si128(c2[1760],simde_mm_xor_si128(c2[2288],simde_mm_xor_si128(c2[7584],simde_mm_xor_si128(c2[3367],simde_mm_xor_si128(c2[1778],simde_mm_xor_si128(c2[5480],simde_mm_xor_si128(c2[6008],simde_mm_xor_si128(c2[6007],simde_mm_xor_si128(c2[1250],simde_mm_xor_si128(c2[1778],simde_mm_xor_si128(c2[216],simde_mm_xor_si128(c2[4446],simde_mm_xor_si128(c2[2859],simde_mm_xor_si128(c2[7089],simde_mm_xor_si128(c2[1280],simde_mm_xor_si128(c2[4970],simde_mm_xor_si128(c2[5498],simde_mm_xor_si128(c2[1834],simde_mm_xor_si128(c2[6052],simde_mm_xor_si128(c2[2352],simde_mm_xor_si128(c2[6054],simde_mm_xor_si128(c2[6582],simde_mm_xor_si128(c2[3936],simde_mm_xor_si128(c2[7638],simde_mm_xor_si128(c2[8166],simde_mm_xor_si128(c2[275],simde_mm_xor_si128(c2[4493],simde_mm_xor_si128(c2[5554],simde_mm_xor_si128(c2[1325],simde_mm_xor_si128(c2[2387],simde_mm_xor_si128(c2[6077],simde_mm_xor_si128(c2[6605],simde_mm_xor_si128(c2[2405],simde_mm_xor_si128(c2[6635],simde_mm_xor_si128(c2[5041],simde_mm_xor_si128(c2[824],simde_mm_xor_si128(c2[5574],simde_mm_xor_si128(c2[817],simde_mm_xor_si128(c2[1345],simde_mm_xor_si128(c2[1345],simde_mm_xor_si128(c2[4538],simde_mm_xor_si128(c2[321],simde_mm_xor_si128(c2[6124],simde_mm_xor_si128(c2[1379],simde_mm_xor_si128(c2[1907],simde_mm_xor_si128(c2[4537],simde_mm_xor_si128(c2[8239],simde_mm_xor_si128(c2[320],simde_mm_xor_si128(c2[6677],simde_mm_xor_si128(c2[2448],simde_mm_xor_si128(c2[6153],simde_mm_xor_si128(c2[1396],simde_mm_xor_si128(c2[1924],simde_mm_xor_si128(c2[4037],simde_mm_xor_si128(c2[7739],simde_mm_xor_si128(c2[8267],simde_mm_xor_si128(c2[5645],simde_mm_xor_si128(c2[1416],simde_mm_xor_si128(c2[3538],simde_mm_xor_si128(c2[7756],simde_mm_xor_si128(c2[4591],simde_mm_xor_si128(c2[8281],simde_mm_xor_si128(c2[362],simde_mm_xor_si128(c2[7258],simde_mm_xor_si128(c2[3029],simde_mm_xor_si128(c2[3026],simde_mm_xor_si128(c2[7256],simde_mm_xor_si128(c2[1979],simde_mm_xor_si128(c2[5669],simde_mm_xor_si128(c2[6197],simde_mm_xor_si128(c2[4082],simde_mm_xor_si128(c2[3059],simde_mm_xor_si128(c2[7277],simde_mm_xor_si128(c2[3576],simde_mm_xor_si128(c2[7278],simde_mm_xor_si128(c2[7806],simde_mm_xor_si128(c2[8338],simde_mm_xor_si128(c2[3581],simde_mm_xor_si128(c2[4109],simde_mm_xor_si128(c2[3079],simde_mm_xor_si128(c2[7297],simde_mm_xor_si128(c2[2023],simde_mm_xor_si128(c2[5713],simde_mm_xor_si128(c2[6241],simde_mm_xor_si128(c2[1497],simde_mm_xor_si128(c2[5187],simde_mm_xor_si128(c2[5715],simde_mm_xor_si128(c2[4155],simde_mm_xor_si128(c2[8385],simde_mm_xor_si128(c2[7848],simde_mm_xor_si128(c2[3631],simde_mm_xor_si128(c2[461],simde_mm_xor_si128(c2[4163],simde_mm_xor_si128(c2[4691],simde_mm_xor_si128(c2[458],simde_mm_xor_si128(c2[7344],simde_mm_xor_si128(c2[3127],simde_mm_xor_si128(c2[485],simde_mm_xor_si128(c2[4187],simde_mm_xor_si128(c2[4715],simde_mm_xor_si128(c2[5766],simde_mm_xor_si128(c2[1009],simde_mm_xor_si128(c2[1537],simde_mm_xor_si128(c2[7369],simde_mm_xor_si128(c2[3152],simde_mm_xor_si128(c2[2099],simde_mm_xor_si128(c2[6317],simde_mm_xor_si128(c2[4730],simde_mm_xor_si128(c2[8432],simde_mm_xor_si128(c2[513],c2[515]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 9
+     d2[108]=simde_mm_xor_si128(c2[7402],simde_mm_xor_si128(c2[4250],simde_mm_xor_si128(c2[776],simde_mm_xor_si128(c2[2912],simde_mm_xor_si128(c2[5067],simde_mm_xor_si128(c2[8334],simde_mm_xor_si128(c2[8355],c2[1541])))))));
+
+//row: 10
+     d2[120]=simde_mm_xor_si128(c2[2670],simde_mm_xor_si128(c2[1641],simde_mm_xor_si128(c2[1157],simde_mm_xor_si128(c2[8094],simde_mm_xor_si128(c2[8123],c2[1402])))));
+
+//row: 11
+     d2[132]=simde_mm_xor_si128(c2[532],simde_mm_xor_si128(c2[3170],simde_mm_xor_si128(c2[3698],simde_mm_xor_si128(c2[5281],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[5817],simde_mm_xor_si128(c2[536],simde_mm_xor_si128(c2[1058],simde_mm_xor_si128(c2[4224],simde_mm_xor_si128(c2[3],simde_mm_xor_si128(c2[562],simde_mm_xor_si128(c2[3200],simde_mm_xor_si128(c2[3728],simde_mm_xor_si128(c2[4248],simde_mm_xor_si128(c2[7426],simde_mm_xor_si128(c2[2666],simde_mm_xor_si128(c2[5832],simde_mm_xor_si128(c2[3198],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[2698],simde_mm_xor_si128(c2[3226],simde_mm_xor_si128(c2[5329],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[3221],simde_mm_xor_si128(c2[6387],simde_mm_xor_si128(c2[7999],simde_mm_xor_si128(c2[2190],simde_mm_xor_si128(c2[2718],simde_mm_xor_si128(c2[3769],simde_mm_xor_si128(c2[6947],simde_mm_xor_si128(c2[7472],simde_mm_xor_si128(c2[2191],simde_mm_xor_si128(c2[7490],simde_mm_xor_si128(c2[2209],simde_mm_xor_si128(c2[5381],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[626],simde_mm_xor_si128(c2[3792],simde_mm_xor_si128(c2[1712],simde_mm_xor_si128(c2[4350],simde_mm_xor_si128(c2[4878],simde_mm_xor_si128(c2[7517],simde_mm_xor_si128(c2[2236],simde_mm_xor_si128(c2[7512],simde_mm_xor_si128(c2[2243],simde_mm_xor_si128(c2[3322],simde_mm_xor_si128(c2[5960],simde_mm_xor_si128(c2[6488],simde_mm_xor_si128(c2[8070],simde_mm_xor_si128(c2[2789],simde_mm_xor_si128(c2[2793],simde_mm_xor_si128(c2[5959],simde_mm_xor_si128(c2[4925],simde_mm_xor_si128(c2[8091],simde_mm_xor_si128(c2[4921],simde_mm_xor_si128(c2[8099],simde_mm_xor_si128(c2[3870],simde_mm_xor_si128(c2[7036],simde_mm_xor_si128(c2[4949],simde_mm_xor_si128(c2[8115],simde_mm_xor_si128(c2[7590],simde_mm_xor_si128(c2[2309],simde_mm_xor_si128(c2[3360],simde_mm_xor_si128(c2[6538],simde_mm_xor_si128(c2[6028],simde_mm_xor_si128(c2[219],simde_mm_xor_si128(c2[747],simde_mm_xor_si128(c2[224],simde_mm_xor_si128(c2[3390],simde_mm_xor_si128(c2[7080],simde_mm_xor_si128(c2[1811],simde_mm_xor_si128(c2[7634],simde_mm_xor_si128(c2[1825],simde_mm_xor_si128(c2[2353],simde_mm_xor_si128(c2[8164],simde_mm_xor_si128(c2[2883],simde_mm_xor_si128(c2[1301],simde_mm_xor_si128(c2[4467],simde_mm_xor_si128(c2[6075],simde_mm_xor_si128(c2[266],simde_mm_xor_si128(c2[794],simde_mm_xor_si128(c2[2907],simde_mm_xor_si128(c2[6073],simde_mm_xor_si128(c2[8187],simde_mm_xor_si128(c2[2906],simde_mm_xor_si128(c2[8217],simde_mm_xor_si128(c2[2408],simde_mm_xor_si128(c2[2936],simde_mm_xor_si128(c2[2406],simde_mm_xor_si128(c2[5572],simde_mm_xor_si128(c2[2939],simde_mm_xor_si128(c2[6105],simde_mm_xor_si128(c2[4512],simde_mm_xor_si128(c2[1903],simde_mm_xor_si128(c2[4541],simde_mm_xor_si128(c2[5069],simde_mm_xor_si128(c2[3489],simde_mm_xor_si128(c2[6655],simde_mm_xor_si128(c2[1902],simde_mm_xor_si128(c2[5068],simde_mm_xor_si128(c2[4042],simde_mm_xor_si128(c2[7208],simde_mm_xor_si128(c2[3506],simde_mm_xor_si128(c2[6672],simde_mm_xor_si128(c2[1402],simde_mm_xor_si128(c2[4568],simde_mm_xor_si128(c2[3010],simde_mm_xor_si128(c2[5648],simde_mm_xor_si128(c2[6176],simde_mm_xor_si128(c2[891],simde_mm_xor_si128(c2[4057],simde_mm_xor_si128(c2[1944],simde_mm_xor_si128(c2[5122],simde_mm_xor_si128(c2[4611],simde_mm_xor_si128(c2[7249],simde_mm_xor_si128(c2[7777],simde_mm_xor_si128(c2[391],simde_mm_xor_si128(c2[3557],simde_mm_xor_si128(c2[7779],simde_mm_xor_si128(c2[2498],simde_mm_xor_si128(c2[8306],simde_mm_xor_si128(c2[412],simde_mm_xor_si128(c2[3578],simde_mm_xor_si128(c2[941],simde_mm_xor_si128(c2[4107],simde_mm_xor_si128(c2[5691],simde_mm_xor_si128(c2[410],simde_mm_xor_si128(c2[432],simde_mm_xor_si128(c2[3082],simde_mm_xor_si128(c2[3610],simde_mm_xor_si128(c2[7835],simde_mm_xor_si128(c2[2554],simde_mm_xor_si128(c2[7297],simde_mm_xor_si128(c2[2016],simde_mm_xor_si128(c2[1520],simde_mm_xor_si128(c2[4158],simde_mm_xor_si128(c2[4686],simde_mm_xor_si128(c2[5213],simde_mm_xor_si128(c2[8379],simde_mm_xor_si128(c2[6273],simde_mm_xor_si128(c2[992],simde_mm_xor_si128(c2[4709],simde_mm_xor_si128(c2[7347],simde_mm_xor_si128(c2[7875],simde_mm_xor_si128(c2[6297],simde_mm_xor_si128(c2[1016],simde_mm_xor_si128(c2[3131],simde_mm_xor_si128(c2[6297],simde_mm_xor_si128(c2[4734],simde_mm_xor_si128(c2[7372],simde_mm_xor_si128(c2[7900],simde_mm_xor_si128(c2[7899],simde_mm_xor_si128(c2[2618],simde_mm_xor_si128(c2[2095],simde_mm_xor_si128(c2[5261],c2[6851])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 12
+     d2[144]=simde_mm_xor_si128(c2[6868],simde_mm_xor_si128(c2[5315],simde_mm_xor_si128(c2[7642],simde_mm_xor_si128(c2[4490],simde_mm_xor_si128(c2[1371],c2[5716])))));
+
+//row: 13
+     d2[156]=simde_mm_xor_si128(c2[2646],simde_mm_xor_si128(c2[3174],simde_mm_xor_si128(c2[7923],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[3700],simde_mm_xor_si128(c2[4759],simde_mm_xor_si128(c2[2664],simde_mm_xor_si128(c2[3192],simde_mm_xor_si128(c2[6890],simde_mm_xor_si128(c2[5308],simde_mm_xor_si128(c2[2162],simde_mm_xor_si128(c2[2690],simde_mm_xor_si128(c2[7971],simde_mm_xor_si128(c2[5863],simde_mm_xor_si128(c2[1666],simde_mm_xor_si128(c2[2194],simde_mm_xor_si128(c2[6411],simde_mm_xor_si128(c2[1667],simde_mm_xor_si128(c2[611],simde_mm_xor_si128(c2[1685],simde_mm_xor_si128(c2[8023],simde_mm_xor_si128(c2[3268],simde_mm_xor_si128(c2[3826],simde_mm_xor_si128(c2[4354],simde_mm_xor_si128(c2[1712],simde_mm_xor_si128(c2[1707],simde_mm_xor_si128(c2[5424],simde_mm_xor_si128(c2[5952],simde_mm_xor_si128(c2[2265],simde_mm_xor_si128(c2[5435],simde_mm_xor_si128(c2[7567],simde_mm_xor_si128(c2[7563],simde_mm_xor_si128(c2[6512],simde_mm_xor_si128(c2[5452],simde_mm_xor_si128(c2[7591],simde_mm_xor_si128(c2[1785],simde_mm_xor_si128(c2[6002],simde_mm_xor_si128(c2[8142],simde_mm_xor_si128(c2[223],simde_mm_xor_si128(c2[2866],simde_mm_xor_si128(c2[1275],simde_mm_xor_si128(c2[1301],simde_mm_xor_si128(c2[1829],simde_mm_xor_si128(c2[2359],simde_mm_xor_si128(c2[3943],simde_mm_xor_si128(c2[8189],simde_mm_xor_si128(c2[270],simde_mm_xor_si128(c2[5549],simde_mm_xor_si128(c2[2382],simde_mm_xor_si128(c2[1872],simde_mm_xor_si128(c2[2400],simde_mm_xor_si128(c2[5048],simde_mm_xor_si128(c2[5569],simde_mm_xor_si128(c2[4017],simde_mm_xor_si128(c2[4545],simde_mm_xor_si128(c2[6131],simde_mm_xor_si128(c2[4544],simde_mm_xor_si128(c2[6672],simde_mm_xor_si128(c2[6148],simde_mm_xor_si128(c2[4032],simde_mm_xor_si128(c2[5112],simde_mm_xor_si128(c2[5640],simde_mm_xor_si128(c2[3533],simde_mm_xor_si128(c2[4586],simde_mm_xor_si128(c2[6725],simde_mm_xor_si128(c2[7253],simde_mm_xor_si128(c2[3033],simde_mm_xor_si128(c2[1974],simde_mm_xor_si128(c2[3054],simde_mm_xor_si128(c2[3583],simde_mm_xor_si128(c2[8333],simde_mm_xor_si128(c2[2546],simde_mm_xor_si128(c2[3074],simde_mm_xor_si128(c2[2018],simde_mm_xor_si128(c2[1492],simde_mm_xor_si128(c2[3634],simde_mm_xor_si128(c2[4162],simde_mm_xor_si128(c2[7855],simde_mm_xor_si128(c2[456],simde_mm_xor_si128(c2[6823],simde_mm_xor_si128(c2[7351],simde_mm_xor_si128(c2[480],simde_mm_xor_si128(c2[5761],simde_mm_xor_si128(c2[2071],simde_mm_xor_si128(c2[6848],simde_mm_xor_si128(c2[7376],simde_mm_xor_si128(c2[2094],c2[4737])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 14
+     d2[168]=simde_mm_xor_si128(c2[7400],simde_mm_xor_si128(c2[4515],simde_mm_xor_si128(c2[5120],simde_mm_xor_si128(c2[5141],simde_mm_xor_si128(c2[6225],c2[6840])))));
+
+//row: 15
+     d2[180]=simde_mm_xor_si128(c2[528],simde_mm_xor_si128(c2[5289],simde_mm_xor_si128(c2[5813],simde_mm_xor_si128(c2[538],simde_mm_xor_si128(c2[1066],simde_mm_xor_si128(c2[531],simde_mm_xor_si128(c2[558],simde_mm_xor_si128(c2[4256],simde_mm_xor_si128(c2[2146],simde_mm_xor_si128(c2[2674],simde_mm_xor_si128(c2[1080],simde_mm_xor_si128(c2[56],simde_mm_xor_si128(c2[5337],simde_mm_xor_si128(c2[3217],simde_mm_xor_si128(c2[7995],simde_mm_xor_si128(c2[3777],simde_mm_xor_si128(c2[6940],simde_mm_xor_si128(c2[7468],simde_mm_xor_si128(c2[7498],simde_mm_xor_si128(c2[5377],simde_mm_xor_si128(c2[106],simde_mm_xor_si128(c2[634],simde_mm_xor_si128(c2[1708],simde_mm_xor_si128(c2[7513],simde_mm_xor_si128(c2[7520],simde_mm_xor_si128(c2[3318],simde_mm_xor_si128(c2[8066],simde_mm_xor_si128(c2[2261],simde_mm_xor_si128(c2[2789],simde_mm_xor_si128(c2[4921],simde_mm_xor_si128(c2[4929],simde_mm_xor_si128(c2[3338],simde_mm_xor_si128(c2[3866],simde_mm_xor_si128(c2[4945],simde_mm_xor_si128(c2[7586],simde_mm_xor_si128(c2[2840],simde_mm_xor_si128(c2[3368],simde_mm_xor_si128(c2[6024],simde_mm_xor_si128(c2[220],simde_mm_xor_si128(c2[7088],simde_mm_xor_si128(c2[7642],simde_mm_xor_si128(c2[8160],simde_mm_xor_si128(c2[769],simde_mm_xor_si128(c2[1297],simde_mm_xor_si128(c2[1297],simde_mm_xor_si128(c2[6083],simde_mm_xor_si128(c2[2915],simde_mm_xor_si128(c2[7667],simde_mm_xor_si128(c2[8195],simde_mm_xor_si128(c2[8213],simde_mm_xor_si128(c2[2402],simde_mm_xor_si128(c2[2407],simde_mm_xor_si128(c2[2935],simde_mm_xor_si128(c2[1899],simde_mm_xor_si128(c2[3485],simde_mm_xor_si128(c2[1370],simde_mm_xor_si128(c2[1898],simde_mm_xor_si128(c2[7711],simde_mm_xor_si128(c2[4038],simde_mm_xor_si128(c2[3514],simde_mm_xor_si128(c2[870],simde_mm_xor_si128(c2[1398],simde_mm_xor_si128(c2[3006],simde_mm_xor_si128(c2[899],simde_mm_xor_si128(c2[1952],simde_mm_xor_si128(c2[4619],simde_mm_xor_si128(c2[387],simde_mm_xor_si128(c2[7259],simde_mm_xor_si128(c2[7787],simde_mm_xor_si128(c2[408],simde_mm_xor_si128(c2[937],simde_mm_xor_si128(c2[5171],simde_mm_xor_si128(c2[5699],simde_mm_xor_si128(c2[440],simde_mm_xor_si128(c2[7831],simde_mm_xor_si128(c2[6777],simde_mm_xor_si128(c2[7305],simde_mm_xor_si128(c2[4131],simde_mm_xor_si128(c2[1516],simde_mm_xor_si128(c2[5209],simde_mm_xor_si128(c2[6269],simde_mm_xor_si128(c2[4705],simde_mm_xor_si128(c2[6293],simde_mm_xor_si128(c2[2599],simde_mm_xor_si128(c2[3127],simde_mm_xor_si128(c2[4730],simde_mm_xor_si128(c2[7907],simde_mm_xor_si128(c2[1563],c2[2091]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 16
+     d2[192]=simde_mm_xor_si128(c2[6346],simde_mm_xor_si128(c2[2648],simde_mm_xor_si128(c2[3172],simde_mm_xor_si128(c2[6872],simde_mm_xor_si128(c2[6364],simde_mm_xor_si128(c2[1615],simde_mm_xor_si128(c2[33],simde_mm_xor_si128(c2[6888],simde_mm_xor_si128(c2[5862],simde_mm_xor_si128(c2[2696],simde_mm_xor_si128(c2[576],simde_mm_xor_si128(c2[5354],simde_mm_xor_si128(c2[1136],simde_mm_xor_si128(c2[4827],simde_mm_xor_si128(c2[1137],simde_mm_xor_si128(c2[4857],simde_mm_xor_si128(c2[2736],simde_mm_xor_si128(c2[6440],simde_mm_xor_si128(c2[7514],simde_mm_xor_si128(c2[4872],simde_mm_xor_si128(c2[4879],simde_mm_xor_si128(c2[677],simde_mm_xor_si128(c2[5425],simde_mm_xor_si128(c2[148],simde_mm_xor_si128(c2[2280],simde_mm_xor_si128(c2[2288],simde_mm_xor_si128(c2[1225],simde_mm_xor_si128(c2[2304],simde_mm_xor_si128(c2[4945],simde_mm_xor_si128(c2[727],simde_mm_xor_si128(c2[3395],simde_mm_xor_si128(c2[6026],simde_mm_xor_si128(c2[4447],simde_mm_xor_si128(c2[5001],simde_mm_xor_si128(c2[5531],simde_mm_xor_si128(c2[7115],simde_mm_xor_si128(c2[3442],simde_mm_xor_si128(c2[274],simde_mm_xor_si128(c2[5554],simde_mm_xor_si128(c2[5019],simde_mm_xor_si128(c2[5572],simde_mm_xor_si128(c2[8208],simde_mm_xor_si128(c2[294],simde_mm_xor_si128(c2[7705],simde_mm_xor_si128(c2[844],simde_mm_xor_si128(c2[7704],simde_mm_xor_si128(c2[1397],simde_mm_xor_si128(c2[873],simde_mm_xor_si128(c2[7204],simde_mm_xor_si128(c2[365],simde_mm_xor_si128(c2[6705],simde_mm_xor_si128(c2[7758],simde_mm_xor_si128(c2[1978],simde_mm_xor_si128(c2[6193],simde_mm_xor_si128(c2[5146],simde_mm_xor_si128(c2[6226],simde_mm_xor_si128(c2[6755],simde_mm_xor_si128(c2[3058],simde_mm_xor_si128(c2[6246],simde_mm_xor_si128(c2[5190],simde_mm_xor_si128(c2[4664],simde_mm_xor_si128(c2[7322],simde_mm_xor_si128(c2[2568],simde_mm_xor_si128(c2[3628],simde_mm_xor_si128(c2[2064],simde_mm_xor_si128(c2[3652],simde_mm_xor_si128(c2[486],simde_mm_xor_si128(c2[1014],simde_mm_xor_si128(c2[2089],simde_mm_xor_si128(c2[5266],c2[7897]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 17
+     d2[204]=simde_mm_xor_si128(c2[2116],simde_mm_xor_si128(c2[8262],simde_mm_xor_si128(c2[917],simde_mm_xor_si128(c2[3586],c2[4211]))));
+
+//row: 18
+     d2[216]=simde_mm_xor_si128(c2[1088],simde_mm_xor_si128(c2[1882],simde_mm_xor_si128(c2[4541],simde_mm_xor_si128(c2[2552],c2[2568]))));
+
+//row: 19
+     d2[228]=simde_mm_xor_si128(c2[537],simde_mm_xor_si128(c2[2665],simde_mm_xor_si128(c2[4401],simde_mm_xor_si128(c2[1251],c2[2880]))));
+
+//row: 20
+     d2[240]=simde_mm_xor_si128(c2[7402],simde_mm_xor_si128(c2[3704],simde_mm_xor_si128(c2[4228],simde_mm_xor_si128(c2[7928],simde_mm_xor_si128(c2[5819],simde_mm_xor_si128(c2[7420],simde_mm_xor_si128(c2[2671],simde_mm_xor_si128(c2[1089],simde_mm_xor_si128(c2[6918],simde_mm_xor_si128(c2[3752],simde_mm_xor_si128(c2[1632],simde_mm_xor_si128(c2[6410],simde_mm_xor_si128(c2[2192],simde_mm_xor_si128(c2[5883],simde_mm_xor_si128(c2[7464],simde_mm_xor_si128(c2[5913],simde_mm_xor_si128(c2[3792],simde_mm_xor_si128(c2[7496],simde_mm_xor_si128(c2[123],simde_mm_xor_si128(c2[5928],simde_mm_xor_si128(c2[5935],simde_mm_xor_si128(c2[1733],simde_mm_xor_si128(c2[6481],simde_mm_xor_si128(c2[1204],simde_mm_xor_si128(c2[3336],simde_mm_xor_si128(c2[3344],simde_mm_xor_si128(c2[2281],simde_mm_xor_si128(c2[3360],simde_mm_xor_si128(c2[6001],simde_mm_xor_si128(c2[1783],simde_mm_xor_si128(c2[4451],simde_mm_xor_si128(c2[7082],simde_mm_xor_si128(c2[5503],simde_mm_xor_si128(c2[4444],simde_mm_xor_si128(c2[6057],simde_mm_xor_si128(c2[6587],simde_mm_xor_si128(c2[8171],simde_mm_xor_si128(c2[4498],simde_mm_xor_si128(c2[1330],simde_mm_xor_si128(c2[6610],simde_mm_xor_si128(c2[2913],simde_mm_xor_si128(c2[6628],simde_mm_xor_si128(c2[817],simde_mm_xor_si128(c2[1350],simde_mm_xor_si128(c2[314],simde_mm_xor_si128(c2[1900],simde_mm_xor_si128(c2[313],simde_mm_xor_si128(c2[2453],simde_mm_xor_si128(c2[1929],simde_mm_xor_si128(c2[8260],simde_mm_xor_si128(c2[1421],simde_mm_xor_si128(c2[7761],simde_mm_xor_si128(c2[367],simde_mm_xor_si128(c2[3034],simde_mm_xor_si128(c2[7249],simde_mm_xor_si128(c2[6202],simde_mm_xor_si128(c2[7282],simde_mm_xor_si128(c2[7811],simde_mm_xor_si128(c2[4114],simde_mm_xor_si128(c2[7302],simde_mm_xor_si128(c2[6246],simde_mm_xor_si128(c2[5720],simde_mm_xor_si128(c2[8378],simde_mm_xor_si128(c2[3624],simde_mm_xor_si128(c2[4684],simde_mm_xor_si128(c2[3120],simde_mm_xor_si128(c2[4708],simde_mm_xor_si128(c2[1542],simde_mm_xor_si128(c2[3145],simde_mm_xor_si128(c2[6322],c2[506]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 21
+     d2[252]=simde_mm_xor_si128(c2[6888],simde_mm_xor_si128(c2[3294],simde_mm_xor_si128(c2[4616],simde_mm_xor_si128(c2[3121],c2[1038]))));
+
+//row: 22
+     d2[264]=simde_mm_xor_si128(c2[7393],simde_mm_xor_si128(c2[6096],simde_mm_xor_si128(c2[5066],c2[3577])));
+
+//row: 23
+     d2[276]=simde_mm_xor_si128(c2[4249],simde_mm_xor_si128(c2[4805],simde_mm_xor_si128(c2[7107],c2[6241])));
+
+//row: 24
+     d2[288]=simde_mm_xor_si128(c2[6873],simde_mm_xor_si128(c2[3175],simde_mm_xor_si128(c2[3699],simde_mm_xor_si128(c2[7399],simde_mm_xor_si128(c2[5286],simde_mm_xor_si128(c2[6891],simde_mm_xor_si128(c2[2142],simde_mm_xor_si128(c2[560],simde_mm_xor_si128(c2[6389],simde_mm_xor_si128(c2[3223],simde_mm_xor_si128(c2[1115],simde_mm_xor_si128(c2[5881],simde_mm_xor_si128(c2[1663],simde_mm_xor_si128(c2[5354],simde_mm_xor_si128(c2[7473],simde_mm_xor_si128(c2[5384],simde_mm_xor_si128(c2[3275],simde_mm_xor_si128(c2[6967],simde_mm_xor_si128(c2[5906],simde_mm_xor_si128(c2[8041],simde_mm_xor_si128(c2[5411],simde_mm_xor_si128(c2[5406],simde_mm_xor_si128(c2[1204],simde_mm_xor_si128(c2[5952],simde_mm_xor_si128(c2[675],simde_mm_xor_si128(c2[2819],simde_mm_xor_si128(c2[2815],simde_mm_xor_si128(c2[1752],simde_mm_xor_si128(c2[2843],simde_mm_xor_si128(c2[5472],simde_mm_xor_si128(c2[1254],simde_mm_xor_si128(c2[3922],simde_mm_xor_si128(c2[6553],simde_mm_xor_si128(c2[4974],simde_mm_xor_si128(c2[5528],simde_mm_xor_si128(c2[6058],simde_mm_xor_si128(c2[7642],simde_mm_xor_si128(c2[3969],simde_mm_xor_si128(c2[801],simde_mm_xor_si128(c2[6081],simde_mm_xor_si128(c2[1857],simde_mm_xor_si128(c2[6099],simde_mm_xor_si128(c2[288],simde_mm_xor_si128(c2[821],simde_mm_xor_si128(c2[8232],simde_mm_xor_si128(c2[1371],simde_mm_xor_si128(c2[8243],simde_mm_xor_si128(c2[1924],simde_mm_xor_si128(c2[1400],simde_mm_xor_si128(c2[7731],simde_mm_xor_si128(c2[892],simde_mm_xor_si128(c2[7232],simde_mm_xor_si128(c2[8285],simde_mm_xor_si128(c2[2505],simde_mm_xor_si128(c2[6720],simde_mm_xor_si128(c2[5673],simde_mm_xor_si128(c2[6753],simde_mm_xor_si128(c2[7282],simde_mm_xor_si128(c2[3585],simde_mm_xor_si128(c2[6773],simde_mm_xor_si128(c2[5717],simde_mm_xor_si128(c2[5191],simde_mm_xor_si128(c2[7849],simde_mm_xor_si128(c2[3107],simde_mm_xor_si128(c2[4155],simde_mm_xor_si128(c2[2603],simde_mm_xor_si128(c2[4179],simde_mm_xor_si128(c2[1013],simde_mm_xor_si128(c2[2616],simde_mm_xor_si128(c2[5793],c2[8424]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 25
+     d2[300]=simde_mm_xor_si128(c2[4252],simde_mm_xor_si128(c2[673],simde_mm_xor_si128(c2[8099],c2[4567])));
+
+//row: 26
+     d2[312]=simde_mm_xor_si128(c2[3700],simde_mm_xor_si128(c2[581],simde_mm_xor_si128(c2[6436],c2[4592])));
+
+//row: 27
+     d2[324]=simde_mm_xor_si128(c2[1080],simde_mm_xor_si128(c2[1200],c2[2838]));
+
+//row: 28
+     d2[336]=simde_mm_xor_si128(c2[7393],simde_mm_xor_si128(c2[1681],simde_mm_xor_si128(c2[2571],c2[1565])));
+
+//row: 29
+     d2[348]=simde_mm_xor_si128(c2[1056],simde_mm_xor_si128(c2[5817],simde_mm_xor_si128(c2[6341],simde_mm_xor_si128(c2[1066],simde_mm_xor_si128(c2[1594],simde_mm_xor_si128(c2[1086],simde_mm_xor_si128(c2[4784],simde_mm_xor_si128(c2[2674],simde_mm_xor_si128(c2[3202],simde_mm_xor_si128(c2[6363],simde_mm_xor_si128(c2[584],simde_mm_xor_si128(c2[5865],simde_mm_xor_si128(c2[3745],simde_mm_xor_si128(c2[76],simde_mm_xor_si128(c2[4305],simde_mm_xor_si128(c2[7468],simde_mm_xor_si128(c2[7996],simde_mm_xor_si128(c2[8026],simde_mm_xor_si128(c2[5905],simde_mm_xor_si128(c2[634],simde_mm_xor_si128(c2[1162],simde_mm_xor_si128(c2[2236],simde_mm_xor_si128(c2[8041],simde_mm_xor_si128(c2[8048],simde_mm_xor_si128(c2[3846],simde_mm_xor_si128(c2[147],simde_mm_xor_si128(c2[2789],simde_mm_xor_si128(c2[3317],simde_mm_xor_si128(c2[5449],simde_mm_xor_si128(c2[5457],simde_mm_xor_si128(c2[3866],simde_mm_xor_si128(c2[4394],simde_mm_xor_si128(c2[5473],simde_mm_xor_si128(c2[8114],simde_mm_xor_si128(c2[3368],simde_mm_xor_si128(c2[3896],simde_mm_xor_si128(c2[6552],simde_mm_xor_si128(c2[748],simde_mm_xor_si128(c2[7616],simde_mm_xor_si128(c2[8170],simde_mm_xor_si128(c2[241],simde_mm_xor_si128(c2[1297],simde_mm_xor_si128(c2[1825],simde_mm_xor_si128(c2[6611],simde_mm_xor_si128(c2[3443],simde_mm_xor_si128(c2[8195],simde_mm_xor_si128(c2[264],simde_mm_xor_si128(c2[294],simde_mm_xor_si128(c2[2930],simde_mm_xor_si128(c2[2935],simde_mm_xor_si128(c2[3463],simde_mm_xor_si128(c2[2427],simde_mm_xor_si128(c2[4013],simde_mm_xor_si128(c2[1898],simde_mm_xor_si128(c2[2426],simde_mm_xor_si128(c2[4566],simde_mm_xor_si128(c2[4042],simde_mm_xor_si128(c2[1398],simde_mm_xor_si128(c2[1926],simde_mm_xor_si128(c2[2976],simde_mm_xor_si128(c2[3534],simde_mm_xor_si128(c2[1427],simde_mm_xor_si128(c2[2480],simde_mm_xor_si128(c2[5147],simde_mm_xor_si128(c2[915],simde_mm_xor_si128(c2[7787],simde_mm_xor_si128(c2[8315],simde_mm_xor_si128(c2[936],simde_mm_xor_si128(c2[1465],simde_mm_xor_si128(c2[5699],simde_mm_xor_si128(c2[6227],simde_mm_xor_si128(c2[968],simde_mm_xor_si128(c2[8359],simde_mm_xor_si128(c2[7305],simde_mm_xor_si128(c2[7833],simde_mm_xor_si128(c2[2025],simde_mm_xor_si128(c2[2044],simde_mm_xor_si128(c2[5737],simde_mm_xor_si128(c2[6797],simde_mm_xor_si128(c2[5233],simde_mm_xor_si128(c2[6821],simde_mm_xor_si128(c2[3127],simde_mm_xor_si128(c2[3655],simde_mm_xor_si128(c2[5258],simde_mm_xor_si128(c2[8435],simde_mm_xor_si128(c2[2091],c2[2619]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 30
+     d2[360]=simde_mm_xor_si128(c2[7392],simde_mm_xor_si128(c2[3706],simde_mm_xor_si128(c2[3702],simde_mm_xor_si128(c2[4230],simde_mm_xor_si128(c2[7402],simde_mm_xor_si128(c2[7930],simde_mm_xor_si128(c2[7929],simde_mm_xor_si128(c2[7422],simde_mm_xor_si128(c2[2145],simde_mm_xor_si128(c2[2673],simde_mm_xor_si128(c2[563],simde_mm_xor_si128(c2[1091],simde_mm_xor_si128(c2[6920],simde_mm_xor_si128(c2[3754],simde_mm_xor_si128(c2[1106],simde_mm_xor_si128(c2[1634],simde_mm_xor_si128(c2[6412],simde_mm_xor_si128(c2[2194],simde_mm_xor_si128(c2[5357],simde_mm_xor_si128(c2[5885],simde_mm_xor_si128(c2[5915],simde_mm_xor_si128(c2[3266],simde_mm_xor_si128(c2[3794],simde_mm_xor_si128(c2[6970],simde_mm_xor_si128(c2[7498],simde_mm_xor_si128(c2[125],simde_mm_xor_si128(c2[5930],simde_mm_xor_si128(c2[5409],simde_mm_xor_si128(c2[5937],simde_mm_xor_si128(c2[1735],simde_mm_xor_si128(c2[5955],simde_mm_xor_si128(c2[6483],simde_mm_xor_si128(c2[678],simde_mm_xor_si128(c2[1206],simde_mm_xor_si128(c2[3338],simde_mm_xor_si128(c2[2818],simde_mm_xor_si128(c2[3346],simde_mm_xor_si128(c2[1755],simde_mm_xor_si128(c2[2283],simde_mm_xor_si128(c2[3362],simde_mm_xor_si128(c2[5475],simde_mm_xor_si128(c2[6003],simde_mm_xor_si128(c2[1257],simde_mm_xor_si128(c2[1785],simde_mm_xor_si128(c2[4441],simde_mm_xor_si128(c2[7084],simde_mm_xor_si128(c2[4977],simde_mm_xor_si128(c2[5505],simde_mm_xor_si128(c2[6059],simde_mm_xor_si128(c2[6049],simde_mm_xor_si128(c2[6577],simde_mm_xor_si128(c2[7633],simde_mm_xor_si128(c2[8161],simde_mm_xor_si128(c2[2882],simde_mm_xor_si128(c2[4488],simde_mm_xor_si128(c2[1320],simde_mm_xor_si128(c2[6072],simde_mm_xor_si128(c2[6600],simde_mm_xor_si128(c2[6630],simde_mm_xor_si128(c2[819],simde_mm_xor_si128(c2[824],simde_mm_xor_si128(c2[1352],simde_mm_xor_si128(c2[316],simde_mm_xor_si128(c2[1374],simde_mm_xor_si128(c2[1902],simde_mm_xor_si128(c2[8234],simde_mm_xor_si128(c2[315],simde_mm_xor_si128(c2[2428],simde_mm_xor_si128(c2[2455],simde_mm_xor_si128(c2[1403],simde_mm_xor_si128(c2[1931],simde_mm_xor_si128(c2[7734],simde_mm_xor_si128(c2[8262],simde_mm_xor_si128(c2[1423],simde_mm_xor_si128(c2[7763],simde_mm_xor_si128(c2[8288],simde_mm_xor_si128(c2[369],simde_mm_xor_si128(c2[3024],simde_mm_xor_si128(c2[7251],simde_mm_xor_si128(c2[5664],simde_mm_xor_si128(c2[6192],simde_mm_xor_si128(c2[7272],simde_mm_xor_si128(c2[7273],simde_mm_xor_si128(c2[7801],simde_mm_xor_si128(c2[3576],simde_mm_xor_si128(c2[4104],simde_mm_xor_si128(c2[7304],simde_mm_xor_si128(c2[5720],simde_mm_xor_si128(c2[6248],simde_mm_xor_si128(c2[5194],simde_mm_xor_si128(c2[5722],simde_mm_xor_si128(c2[8380],simde_mm_xor_si128(c2[3626],simde_mm_xor_si128(c2[4158],simde_mm_xor_si128(c2[4686],simde_mm_xor_si128(c2[3122],simde_mm_xor_si128(c2[4182],simde_mm_xor_si128(c2[4710],simde_mm_xor_si128(c2[1016],simde_mm_xor_si128(c2[1544],simde_mm_xor_si128(c2[3147],simde_mm_xor_si128(c2[6312],simde_mm_xor_si128(c2[8427],c2[508])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 31
+     d2[372]=simde_mm_xor_si128(c2[2651],simde_mm_xor_si128(c2[2123],simde_mm_xor_si128(c2[7400],simde_mm_xor_si128(c2[6872],simde_mm_xor_si128(c2[7924],simde_mm_xor_si128(c2[7396],simde_mm_xor_si128(c2[3177],simde_mm_xor_si128(c2[2121],simde_mm_xor_si128(c2[2649],simde_mm_xor_si128(c2[2669],simde_mm_xor_si128(c2[2141],simde_mm_xor_si128(c2[6367],simde_mm_xor_si128(c2[5839],simde_mm_xor_si128(c2[4785],simde_mm_xor_si128(c2[3729],simde_mm_xor_si128(c2[4257],simde_mm_xor_si128(c2[2142],simde_mm_xor_si128(c2[2167],simde_mm_xor_si128(c2[1639],simde_mm_xor_si128(c2[7448],simde_mm_xor_si128(c2[6920],simde_mm_xor_si128(c2[5328],simde_mm_xor_si128(c2[4800],simde_mm_xor_si128(c2[1659],simde_mm_xor_si128(c2[1131],simde_mm_xor_si128(c2[5888],simde_mm_xor_si128(c2[5360],simde_mm_xor_si128(c2[1132],simde_mm_xor_si128(c2[76],simde_mm_xor_si128(c2[604],simde_mm_xor_si128(c2[1162],simde_mm_xor_si128(c2[634],simde_mm_xor_si128(c2[7488],simde_mm_xor_si128(c2[6960],simde_mm_xor_si128(c2[2745],simde_mm_xor_si128(c2[1689],simde_mm_xor_si128(c2[2217],simde_mm_xor_si128(c2[3819],simde_mm_xor_si128(c2[3291],simde_mm_xor_si128(c2[1177],simde_mm_xor_si128(c2[649],simde_mm_xor_si128(c2[1184],simde_mm_xor_si128(c2[656],simde_mm_xor_si128(c2[5429],simde_mm_xor_si128(c2[4901],simde_mm_xor_si128(c2[1730],simde_mm_xor_si128(c2[1202],simde_mm_xor_si128(c2[4900],simde_mm_xor_si128(c2[3844],simde_mm_xor_si128(c2[4372],simde_mm_xor_si128(c2[7032],simde_mm_xor_si128(c2[6504],simde_mm_xor_si128(c2[7040],simde_mm_xor_si128(c2[6512],simde_mm_xor_si128(c2[5977],simde_mm_xor_si128(c2[4921],simde_mm_xor_si128(c2[5449],simde_mm_xor_si128(c2[3865],simde_mm_xor_si128(c2[7056],simde_mm_xor_si128(c2[6528],simde_mm_xor_si128(c2[1250],simde_mm_xor_si128(c2[722],simde_mm_xor_si128(c2[5479],simde_mm_xor_si128(c2[4423],simde_mm_xor_si128(c2[4951],simde_mm_xor_si128(c2[8147],simde_mm_xor_si128(c2[7619],simde_mm_xor_si128(c2[2331],simde_mm_xor_si128(c2[1803],simde_mm_xor_si128(c2[752],simde_mm_xor_si128(c2[224],simde_mm_xor_si128(c2[1306],simde_mm_xor_si128(c2[778],simde_mm_xor_si128(c2[1824],simde_mm_xor_si128(c2[1296],simde_mm_xor_si128(c2[3408],simde_mm_xor_si128(c2[2352],simde_mm_xor_si128(c2[2880],simde_mm_xor_si128(c2[8194],simde_mm_xor_si128(c2[7666],simde_mm_xor_si128(c2[5026],simde_mm_xor_si128(c2[4498],simde_mm_xor_si128(c2[1859],simde_mm_xor_si128(c2[803],simde_mm_xor_si128(c2[1331],simde_mm_xor_si128(c2[1877],simde_mm_xor_si128(c2[1349],simde_mm_xor_si128(c2[4513],simde_mm_xor_si128(c2[3985],simde_mm_xor_si128(c2[5046],simde_mm_xor_si128(c2[3990],simde_mm_xor_si128(c2[4518],simde_mm_xor_si128(c2[4010],simde_mm_xor_si128(c2[3482],simde_mm_xor_si128(c2[5596],simde_mm_xor_si128(c2[5068],simde_mm_xor_si128(c2[4009],simde_mm_xor_si128(c2[2953],simde_mm_xor_si128(c2[3481],simde_mm_xor_si128(c2[6149],simde_mm_xor_si128(c2[5621],simde_mm_xor_si128(c2[5625],simde_mm_xor_si128(c2[5097],simde_mm_xor_si128(c2[3509],simde_mm_xor_si128(c2[2453],simde_mm_xor_si128(c2[2981],simde_mm_xor_si128(c2[5117],simde_mm_xor_si128(c2[4589],simde_mm_xor_si128(c2[3010],simde_mm_xor_si128(c2[2482],simde_mm_xor_si128(c2[4063],simde_mm_xor_si128(c2[3535],simde_mm_xor_si128(c2[6730],simde_mm_xor_si128(c2[6202],simde_mm_xor_si128(c2[2498],simde_mm_xor_si128(c2[1970],simde_mm_xor_si128(c2[1451],simde_mm_xor_si128(c2[395],simde_mm_xor_si128(c2[923],simde_mm_xor_si128(c2[2531],simde_mm_xor_si128(c2[2003],simde_mm_xor_si128(c2[3048],simde_mm_xor_si128(c2[2520],simde_mm_xor_si128(c2[7810],simde_mm_xor_si128(c2[6754],simde_mm_xor_si128(c2[7282],simde_mm_xor_si128(c2[2551],simde_mm_xor_si128(c2[2023],simde_mm_xor_si128(c2[1495],simde_mm_xor_si128(c2[967],simde_mm_xor_si128(c2[969],simde_mm_xor_si128(c2[8360],simde_mm_xor_si128(c2[441],simde_mm_xor_si128(c2[3627],simde_mm_xor_si128(c2[3099],simde_mm_xor_si128(c2[7320],simde_mm_xor_si128(c2[6792],simde_mm_xor_si128(c2[8380],simde_mm_xor_si128(c2[7852],simde_mm_xor_si128(c2[6816],simde_mm_xor_si128(c2[6288],simde_mm_xor_si128(c2[8404],simde_mm_xor_si128(c2[7876],simde_mm_xor_si128(c2[5238],simde_mm_xor_si128(c2[4182],simde_mm_xor_si128(c2[4710],simde_mm_xor_si128(c2[6841],simde_mm_xor_si128(c2[6313],simde_mm_xor_si128(c2[1571],simde_mm_xor_si128(c2[1043],simde_mm_xor_si128(c2[4202],simde_mm_xor_si128(c2[3146],c2[3674]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 32
+     d2[384]=simde_mm_xor_si128(c2[1585],simde_mm_xor_si128(c2[6346],simde_mm_xor_si128(c2[6342],simde_mm_xor_si128(c2[6870],simde_mm_xor_si128(c2[1595],simde_mm_xor_si128(c2[2123],simde_mm_xor_si128(c2[3174],simde_mm_xor_si128(c2[1615],simde_mm_xor_si128(c2[4785],simde_mm_xor_si128(c2[5313],simde_mm_xor_si128(c2[3203],simde_mm_xor_si128(c2[3731],simde_mm_xor_si128(c2[1113],simde_mm_xor_si128(c2[6394],simde_mm_xor_si128(c2[3746],simde_mm_xor_si128(c2[4274],simde_mm_xor_si128(c2[605],simde_mm_xor_si128(c2[4834],simde_mm_xor_si128(c2[7997],simde_mm_xor_si128(c2[78],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[5906],simde_mm_xor_si128(c2[6434],simde_mm_xor_si128(c2[1163],simde_mm_xor_si128(c2[1691],simde_mm_xor_si128(c2[2765],simde_mm_xor_si128(c2[123],simde_mm_xor_si128(c2[8049],simde_mm_xor_si128(c2[130],simde_mm_xor_si128(c2[4375],simde_mm_xor_si128(c2[148],simde_mm_xor_si128(c2[676],simde_mm_xor_si128(c2[3318],simde_mm_xor_si128(c2[3846],simde_mm_xor_si128(c2[5978],simde_mm_xor_si128(c2[5458],simde_mm_xor_si128(c2[5986],simde_mm_xor_si128(c2[4395],simde_mm_xor_si128(c2[4923],simde_mm_xor_si128(c2[6002],simde_mm_xor_si128(c2[8115],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[3897],simde_mm_xor_si128(c2[4425],simde_mm_xor_si128(c2[7081],simde_mm_xor_si128(c2[1277],simde_mm_xor_si128(c2[7617],simde_mm_xor_si128(c2[8145],simde_mm_xor_si128(c2[240],simde_mm_xor_si128(c2[242],simde_mm_xor_si128(c2[770],simde_mm_xor_si128(c2[1826],simde_mm_xor_si128(c2[2354],simde_mm_xor_si128(c2[7128],simde_mm_xor_si128(c2[3960],simde_mm_xor_si128(c2[265],simde_mm_xor_si128(c2[793],simde_mm_xor_si128(c2[823],simde_mm_xor_si128(c2[3459],simde_mm_xor_si128(c2[3464],simde_mm_xor_si128(c2[3992],simde_mm_xor_si128(c2[5040],simde_mm_xor_si128(c2[2956],simde_mm_xor_si128(c2[4014],simde_mm_xor_si128(c2[4542],simde_mm_xor_si128(c2[2427],simde_mm_xor_si128(c2[2955],simde_mm_xor_si128(c2[5095],simde_mm_xor_si128(c2[4043],simde_mm_xor_si128(c2[4571],simde_mm_xor_si128(c2[1927],simde_mm_xor_si128(c2[2455],simde_mm_xor_si128(c2[8266],simde_mm_xor_si128(c2[4063],simde_mm_xor_si128(c2[1944],simde_mm_xor_si128(c2[2481],simde_mm_xor_si128(c2[3009],simde_mm_xor_si128(c2[5664],simde_mm_xor_si128(c2[1444],simde_mm_xor_si128(c2[8304],simde_mm_xor_si128(c2[385],simde_mm_xor_si128(c2[1465],simde_mm_xor_si128(c2[1466],simde_mm_xor_si128(c2[1994],simde_mm_xor_si128(c2[6216],simde_mm_xor_si128(c2[6744],simde_mm_xor_si128(c2[1497],simde_mm_xor_si128(c2[8360],simde_mm_xor_si128(c2[441],simde_mm_xor_si128(c2[7834],simde_mm_xor_si128(c2[8362],simde_mm_xor_si128(c2[2573],simde_mm_xor_si128(c2[6266],simde_mm_xor_si128(c2[6798],simde_mm_xor_si128(c2[7326],simde_mm_xor_si128(c2[5762],simde_mm_xor_si128(c2[6822],simde_mm_xor_si128(c2[7350],simde_mm_xor_si128(c2[3656],simde_mm_xor_si128(c2[4184],simde_mm_xor_si128(c2[5787],simde_mm_xor_si128(c2[505],simde_mm_xor_si128(c2[2620],c2[3148])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 33
+     d2[396]=simde_mm_xor_si128(c2[1616],simde_mm_xor_si128(c2[4272],simde_mm_xor_si128(c2[5026],c2[5262])));
+
+//row: 34
+     d2[408]=simde_mm_xor_si128(c2[3170],simde_mm_xor_si128(c2[2289],simde_mm_xor_si128(c2[5640],c2[4639])));
+
+//row: 35
+     d2[420]=simde_mm_xor_si128(c2[6875],simde_mm_xor_si128(c2[3177],simde_mm_xor_si128(c2[3701],simde_mm_xor_si128(c2[7401],simde_mm_xor_si128(c2[6893],simde_mm_xor_si128(c2[2144],simde_mm_xor_si128(c2[562],simde_mm_xor_si128(c2[32],simde_mm_xor_si128(c2[6391],simde_mm_xor_si128(c2[3225],simde_mm_xor_si128(c2[1105],simde_mm_xor_si128(c2[5883],simde_mm_xor_si128(c2[1665],simde_mm_xor_si128(c2[5356],simde_mm_xor_si128(c2[5386],simde_mm_xor_si128(c2[3265],simde_mm_xor_si128(c2[6969],simde_mm_xor_si128(c2[8043],simde_mm_xor_si128(c2[5401],simde_mm_xor_si128(c2[5408],simde_mm_xor_si128(c2[1206],simde_mm_xor_si128(c2[5954],simde_mm_xor_si128(c2[677],simde_mm_xor_si128(c2[8072],simde_mm_xor_si128(c2[2809],simde_mm_xor_si128(c2[2817],simde_mm_xor_si128(c2[1754],simde_mm_xor_si128(c2[2833],simde_mm_xor_si128(c2[5474],simde_mm_xor_si128(c2[1256],simde_mm_xor_si128(c2[3912],simde_mm_xor_si128(c2[6555],simde_mm_xor_si128(c2[4976],simde_mm_xor_si128(c2[5530],simde_mm_xor_si128(c2[6048],simde_mm_xor_si128(c2[7632],simde_mm_xor_si128(c2[3971],simde_mm_xor_si128(c2[803],simde_mm_xor_si128(c2[6083],simde_mm_xor_si128(c2[6101],simde_mm_xor_si128(c2[290],simde_mm_xor_si128(c2[823],simde_mm_xor_si128(c2[1344],simde_mm_xor_si128(c2[8234],simde_mm_xor_si128(c2[1373],simde_mm_xor_si128(c2[8233],simde_mm_xor_si128(c2[1926],simde_mm_xor_si128(c2[1402],simde_mm_xor_si128(c2[7733],simde_mm_xor_si128(c2[894],simde_mm_xor_si128(c2[7234],simde_mm_xor_si128(c2[8287],simde_mm_xor_si128(c2[2507],simde_mm_xor_si128(c2[6722],simde_mm_xor_si128(c2[5675],simde_mm_xor_si128(c2[6755],simde_mm_xor_si128(c2[7272],simde_mm_xor_si128(c2[3587],simde_mm_xor_si128(c2[6775],simde_mm_xor_si128(c2[5719],simde_mm_xor_si128(c2[5193],simde_mm_xor_si128(c2[7851],simde_mm_xor_si128(c2[3097],simde_mm_xor_si128(c2[4157],simde_mm_xor_si128(c2[2593],simde_mm_xor_si128(c2[4181],simde_mm_xor_si128(c2[1015],simde_mm_xor_si128(c2[2618],simde_mm_xor_si128(c2[5795],c2[8426])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 36
+     d2[432]=simde_mm_xor_si128(c2[1057],simde_mm_xor_si128(c2[5095],simde_mm_xor_si128(c2[5118],c2[3073])));
+
+//row: 37
+     d2[444]=simde_mm_xor_si128(c2[3170],simde_mm_xor_si128(c2[3698],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[536],simde_mm_xor_si128(c2[4224],simde_mm_xor_si128(c2[3200],simde_mm_xor_si128(c2[3728],simde_mm_xor_si128(c2[7426],simde_mm_xor_si128(c2[5832],simde_mm_xor_si128(c2[6892],simde_mm_xor_si128(c2[2698],simde_mm_xor_si128(c2[3226],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[6387],simde_mm_xor_si128(c2[2190],simde_mm_xor_si128(c2[2718],simde_mm_xor_si128(c2[6947],simde_mm_xor_si128(c2[2191],simde_mm_xor_si128(c2[2209],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[3792],simde_mm_xor_si128(c2[4350],simde_mm_xor_si128(c2[4878],simde_mm_xor_si128(c2[2236],simde_mm_xor_si128(c2[2243],simde_mm_xor_si128(c2[5960],simde_mm_xor_si128(c2[6488],simde_mm_xor_si128(c2[2789],simde_mm_xor_si128(c2[5959],simde_mm_xor_si128(c2[8091],simde_mm_xor_si128(c2[8099],simde_mm_xor_si128(c2[7036],simde_mm_xor_si128(c2[8115],simde_mm_xor_si128(c2[2309],simde_mm_xor_si128(c2[6538],simde_mm_xor_si128(c2[219],simde_mm_xor_si128(c2[747],simde_mm_xor_si128(c2[3390],simde_mm_xor_si128(c2[1811],simde_mm_xor_si128(c2[1825],simde_mm_xor_si128(c2[2353],simde_mm_xor_si128(c2[2883],simde_mm_xor_si128(c2[4467],simde_mm_xor_si128(c2[266],simde_mm_xor_si128(c2[794],simde_mm_xor_si128(c2[6073],simde_mm_xor_si128(c2[2906],simde_mm_xor_si128(c2[2408],simde_mm_xor_si128(c2[2936],simde_mm_xor_si128(c2[5572],simde_mm_xor_si128(c2[6105],simde_mm_xor_si128(c2[4541],simde_mm_xor_si128(c2[5069],simde_mm_xor_si128(c2[6655],simde_mm_xor_si128(c2[5068],simde_mm_xor_si128(c2[1373],simde_mm_xor_si128(c2[7208],simde_mm_xor_si128(c2[6672],simde_mm_xor_si128(c2[4568],simde_mm_xor_si128(c2[5648],simde_mm_xor_si128(c2[6176],simde_mm_xor_si128(c2[4057],simde_mm_xor_si128(c2[5122],simde_mm_xor_si128(c2[7249],simde_mm_xor_si128(c2[7777],simde_mm_xor_si128(c2[3557],simde_mm_xor_si128(c2[2498],simde_mm_xor_si128(c2[3578],simde_mm_xor_si128(c2[4107],simde_mm_xor_si128(c2[410],simde_mm_xor_si128(c2[3082],simde_mm_xor_si128(c2[3610],simde_mm_xor_si128(c2[2554],simde_mm_xor_si128(c2[2016],simde_mm_xor_si128(c2[4158],simde_mm_xor_si128(c2[4686],simde_mm_xor_si128(c2[8379],simde_mm_xor_si128(c2[992],simde_mm_xor_si128(c2[7347],simde_mm_xor_si128(c2[7875],simde_mm_xor_si128(c2[1016],simde_mm_xor_si128(c2[6297],simde_mm_xor_si128(c2[7372],simde_mm_xor_si128(c2[7900],simde_mm_xor_si128(c2[2618],c2[5261])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 38
+     d2[456]=simde_mm_xor_si128(c2[4763],simde_mm_xor_si128(c2[755],simde_mm_xor_si128(c2[774],c2[3457])));
+
+//row: 39
+     d2[468]=simde_mm_xor_si128(c2[1084],simde_mm_xor_si128(c2[6941],simde_mm_xor_si128(c2[5457],c2[5214])));
+
+//row: 40
+     d2[480]=simde_mm_xor_si128(c2[7930],simde_mm_xor_si128(c2[2834],c2[4639]));
+
+//row: 41
+     d2[492]=simde_mm_xor_si128(c2[2139],simde_mm_xor_si128(c2[5359],simde_mm_xor_si128(c2[6032],c2[438])));
+
+//row: 42
+     d2[504]=simde_mm_xor_si128(c2[7400],simde_mm_xor_si128(c2[3702],simde_mm_xor_si128(c2[3698],simde_mm_xor_si128(c2[4226],simde_mm_xor_si128(c2[7398],simde_mm_xor_si128(c2[7926],simde_mm_xor_si128(c2[535],simde_mm_xor_si128(c2[7418],simde_mm_xor_si128(c2[2141],simde_mm_xor_si128(c2[2669],simde_mm_xor_si128(c2[559],simde_mm_xor_si128(c2[1087],simde_mm_xor_si128(c2[6916],simde_mm_xor_si128(c2[3750],simde_mm_xor_si128(c2[1114],simde_mm_xor_si128(c2[1642],simde_mm_xor_si128(c2[6408],simde_mm_xor_si128(c2[2190],simde_mm_xor_si128(c2[5353],simde_mm_xor_si128(c2[5881],simde_mm_xor_si128(c2[5911],simde_mm_xor_si128(c2[3274],simde_mm_xor_si128(c2[3802],simde_mm_xor_si128(c2[6966],simde_mm_xor_si128(c2[7494],simde_mm_xor_si128(c2[7488],simde_mm_xor_si128(c2[121],simde_mm_xor_si128(c2[5938],simde_mm_xor_si128(c2[5405],simde_mm_xor_si128(c2[5933],simde_mm_xor_si128(c2[1731],simde_mm_xor_si128(c2[5963],simde_mm_xor_si128(c2[6491],simde_mm_xor_si128(c2[674],simde_mm_xor_si128(c2[1202],simde_mm_xor_si128(c2[3346],simde_mm_xor_si128(c2[2814],simde_mm_xor_si128(c2[3342],simde_mm_xor_si128(c2[1763],simde_mm_xor_si128(c2[2291],simde_mm_xor_si128(c2[3370],simde_mm_xor_si128(c2[5483],simde_mm_xor_si128(c2[6011],simde_mm_xor_si128(c2[1253],simde_mm_xor_si128(c2[1781],simde_mm_xor_si128(c2[4449],simde_mm_xor_si128(c2[7080],simde_mm_xor_si128(c2[4973],simde_mm_xor_si128(c2[5501],simde_mm_xor_si128(c2[6055],simde_mm_xor_si128(c2[6057],simde_mm_xor_si128(c2[6585],simde_mm_xor_si128(c2[7641],simde_mm_xor_si128(c2[8169],simde_mm_xor_si128(c2[4496],simde_mm_xor_si128(c2[1328],simde_mm_xor_si128(c2[6080],simde_mm_xor_si128(c2[6608],simde_mm_xor_si128(c2[6626],simde_mm_xor_si128(c2[827],simde_mm_xor_si128(c2[820],simde_mm_xor_si128(c2[1348],simde_mm_xor_si128(c2[312],simde_mm_xor_si128(c2[1370],simde_mm_xor_si128(c2[1898],simde_mm_xor_si128(c2[8242],simde_mm_xor_si128(c2[323],simde_mm_xor_si128(c2[2451],simde_mm_xor_si128(c2[1399],simde_mm_xor_si128(c2[1927],simde_mm_xor_si128(c2[7730],simde_mm_xor_si128(c2[8258],simde_mm_xor_si128(c2[1419],simde_mm_xor_si128(c2[7759],simde_mm_xor_si128(c2[8284],simde_mm_xor_si128(c2[365],simde_mm_xor_si128(c2[3032],simde_mm_xor_si128(c2[7259],simde_mm_xor_si128(c2[5672],simde_mm_xor_si128(c2[6200],simde_mm_xor_si128(c2[7280],simde_mm_xor_si128(c2[7281],simde_mm_xor_si128(c2[7809],simde_mm_xor_si128(c2[3584],simde_mm_xor_si128(c2[4112],simde_mm_xor_si128(c2[7300],simde_mm_xor_si128(c2[5716],simde_mm_xor_si128(c2[6244],simde_mm_xor_si128(c2[5190],simde_mm_xor_si128(c2[5718],simde_mm_xor_si128(c2[8376],simde_mm_xor_si128(c2[3634],simde_mm_xor_si128(c2[4154],simde_mm_xor_si128(c2[4682],simde_mm_xor_si128(c2[3130],simde_mm_xor_si128(c2[4178],simde_mm_xor_si128(c2[4706],simde_mm_xor_si128(c2[1012],simde_mm_xor_si128(c2[1540],simde_mm_xor_si128(c2[3155],simde_mm_xor_si128(c2[6320],simde_mm_xor_si128(c2[8435],c2[504]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 43
+     d2[516]=simde_mm_xor_si128(c2[6341],simde_mm_xor_si128(c2[2643],simde_mm_xor_si128(c2[3179],simde_mm_xor_si128(c2[6339],simde_mm_xor_si128(c2[6867],simde_mm_xor_si128(c2[6371],simde_mm_xor_si128(c2[1610],simde_mm_xor_si128(c2[7947],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[559],simde_mm_xor_si128(c2[5857],simde_mm_xor_si128(c2[2691],simde_mm_xor_si128(c2[583],simde_mm_xor_si128(c2[5361],simde_mm_xor_si128(c2[1131],simde_mm_xor_si128(c2[4306],simde_mm_xor_si128(c2[4834],simde_mm_xor_si128(c2[4852],simde_mm_xor_si128(c2[2743],simde_mm_xor_si128(c2[5907],simde_mm_xor_si128(c2[6435],simde_mm_xor_si128(c2[7521],simde_mm_xor_si128(c2[4879],simde_mm_xor_si128(c2[4874],simde_mm_xor_si128(c2[672],simde_mm_xor_si128(c2[5432],simde_mm_xor_si128(c2[8074],simde_mm_xor_si128(c2[155],simde_mm_xor_si128(c2[2287],simde_mm_xor_si128(c2[2283],simde_mm_xor_si128(c2[704],simde_mm_xor_si128(c2[1232],simde_mm_xor_si128(c2[2311],simde_mm_xor_si128(c2[4952],simde_mm_xor_si128(c2[194],simde_mm_xor_si128(c2[722],simde_mm_xor_si128(c2[3390],simde_mm_xor_si128(c2[6033],simde_mm_xor_si128(c2[4442],simde_mm_xor_si128(c2[4996],simde_mm_xor_si128(c2[5526],simde_mm_xor_si128(c2[6582],simde_mm_xor_si128(c2[7110],simde_mm_xor_si128(c2[3437],simde_mm_xor_si128(c2[269],simde_mm_xor_si128(c2[5021],simde_mm_xor_si128(c2[5549],simde_mm_xor_si128(c2[5579],simde_mm_xor_si128(c2[8215],simde_mm_xor_si128(c2[8208],simde_mm_xor_si128(c2[289],simde_mm_xor_si128(c2[7712],simde_mm_xor_si128(c2[851],simde_mm_xor_si128(c2[7183],simde_mm_xor_si128(c2[7711],simde_mm_xor_si128(c2[1392],simde_mm_xor_si128(c2[868],simde_mm_xor_si128(c2[6683],simde_mm_xor_si128(c2[7211],simde_mm_xor_si128(c2[360],simde_mm_xor_si128(c2[6700],simde_mm_xor_si128(c2[7753],simde_mm_xor_si128(c2[1973],simde_mm_xor_si128(c2[6200],simde_mm_xor_si128(c2[4613],simde_mm_xor_si128(c2[5141],simde_mm_xor_si128(c2[2504],simde_mm_xor_si128(c2[6221],simde_mm_xor_si128(c2[6750],simde_mm_xor_si128(c2[2525],simde_mm_xor_si128(c2[3053],simde_mm_xor_si128(c2[6241],simde_mm_xor_si128(c2[5185],simde_mm_xor_si128(c2[4131],simde_mm_xor_si128(c2[4659],simde_mm_xor_si128(c2[1495],simde_mm_xor_si128(c2[7329],simde_mm_xor_si128(c2[2575],simde_mm_xor_si128(c2[3635],simde_mm_xor_si128(c2[2071],simde_mm_xor_si128(c2[3659],simde_mm_xor_si128(c2[8400],simde_mm_xor_si128(c2[481],simde_mm_xor_si128(c2[2096],simde_mm_xor_si128(c2[5261],simde_mm_xor_si128(c2[7376],c2[7904]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 44
+     d2[528]=simde_mm_xor_si128(c2[2640],simde_mm_xor_si128(c2[7401],simde_mm_xor_si128(c2[7925],simde_mm_xor_si128(c2[3178],simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[2670],simde_mm_xor_si128(c2[6368],simde_mm_xor_si128(c2[4786],simde_mm_xor_si128(c2[2168],simde_mm_xor_si128(c2[7449],simde_mm_xor_si128(c2[5329],simde_mm_xor_si128(c2[1660],simde_mm_xor_si128(c2[5889],simde_mm_xor_si128(c2[1133],simde_mm_xor_si128(c2[1163],simde_mm_xor_si128(c2[7489],simde_mm_xor_si128(c2[2746],simde_mm_xor_si128(c2[3820],simde_mm_xor_si128(c2[1178],simde_mm_xor_si128(c2[1185],simde_mm_xor_si128(c2[5430],simde_mm_xor_si128(c2[1731],simde_mm_xor_si128(c2[4901],simde_mm_xor_si128(c2[7033],simde_mm_xor_si128(c2[7041],simde_mm_xor_si128(c2[5978],simde_mm_xor_si128(c2[7564],simde_mm_xor_si128(c2[7057],simde_mm_xor_si128(c2[1251],simde_mm_xor_si128(c2[5480],simde_mm_xor_si128(c2[8136],simde_mm_xor_si128(c2[2332],simde_mm_xor_si128(c2[753],simde_mm_xor_si128(c2[1810],simde_mm_xor_si128(c2[1307],simde_mm_xor_si128(c2[1825],simde_mm_xor_si128(c2[3409],simde_mm_xor_si128(c2[8195],simde_mm_xor_si128(c2[5027],simde_mm_xor_si128(c2[1848],simde_mm_xor_si128(c2[1878],simde_mm_xor_si128(c2[4514],simde_mm_xor_si128(c2[5047],simde_mm_xor_si128(c2[4011],simde_mm_xor_si128(c2[5597],simde_mm_xor_si128(c2[4010],simde_mm_xor_si128(c2[6150],simde_mm_xor_si128(c2[5626],simde_mm_xor_si128(c2[3510],simde_mm_xor_si128(c2[5118],simde_mm_xor_si128(c2[3011],simde_mm_xor_si128(c2[4064],simde_mm_xor_si128(c2[6731],simde_mm_xor_si128(c2[2499],simde_mm_xor_si128(c2[1440],simde_mm_xor_si128(c2[2520],simde_mm_xor_si128(c2[3049],simde_mm_xor_si128(c2[7811],simde_mm_xor_si128(c2[2552],simde_mm_xor_si128(c2[1496],simde_mm_xor_si128(c2[970],simde_mm_xor_si128(c2[3628],simde_mm_xor_si128(c2[7321],simde_mm_xor_si128(c2[8381],simde_mm_xor_si128(c2[6817],simde_mm_xor_si128(c2[8405],simde_mm_xor_si128(c2[5239],simde_mm_xor_si128(c2[6842],simde_mm_xor_si128(c2[1560],c2[4203])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 45
+     d2[540]=simde_mm_xor_si128(c2[3728],simde_mm_xor_si128(c2[2793],c2[8160]));
+  }
+}
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc208_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc208_byte.c
index eb2452a0f1b5ef147f6158999af7167e991c6543..77feb30c5ec425b26560ab288237d19d0ce7e76d 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc208_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc208_byte.c
@@ -1,9 +1,9 @@
 #include "PHY/sse_intrin.h"
 // generated code for Zc=208, byte encoding
 static inline void ldpc208_byte(uint8_t *c,uint8_t *d) {
-  __m128i *csimd=(__m128i *)c,*dsimd=(__m128i *)d;
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
 
-  __m128i *c2,*d2;
+  simde__m128i *c2,*d2;
 
   int i2;
   for (i2=0; i2<13; i2++) {
@@ -11,141 +11,141 @@ static inline void ldpc208_byte(uint8_t *c,uint8_t *d) {
      d2=&dsimd[i2];
 
 //row: 0
-     d2[0]=_mm_xor_si128(c2[4010],_mm_xor_si128(c2[7443],_mm_xor_si128(c2[6293],_mm_xor_si128(c2[6298],_mm_xor_si128(c2[4036],_mm_xor_si128(c2[6898],_mm_xor_si128(c2[7468],_mm_xor_si128(c2[4062],_mm_xor_si128(c2[1203],_mm_xor_si128(c2[6923],_mm_xor_si128(c2[4088],_mm_xor_si128(c2[1795],_mm_xor_si128(c2[4662],_mm_xor_si128(c2[4114],_mm_xor_si128(c2[6400],_mm_xor_si128(c2[4680],_mm_xor_si128(c2[4140],_mm_xor_si128(c2[709],_mm_xor_si128(c2[6428],_mm_xor_si128(c2[4166],_mm_xor_si128(c2[7022],_mm_xor_si128(c2[4741],_mm_xor_si128(c2[4192],_mm_xor_si128(c2[4195],_mm_xor_si128(c2[7053],_mm_xor_si128(c2[4218],_mm_xor_si128(c2[7652],_mm_xor_si128(c2[7650],_mm_xor_si128(c2[4244],_mm_xor_si128(c2[5388],_mm_xor_si128(c2[807],_mm_xor_si128(c2[4270],_mm_xor_si128(c2[5985],_mm_xor_si128(c2[8270],_mm_xor_si128(c2[4296],_mm_xor_si128(c2[4297],_mm_xor_si128(c2[6017],_mm_xor_si128(c2[4322],_mm_xor_si128(c2[2038],_mm_xor_si128(c2[887],_mm_xor_si128(c2[4348],_mm_xor_si128(c2[1485],_mm_xor_si128(c2[8357],_mm_xor_si128(c2[4374],_mm_xor_si128(c2[2088],_mm_xor_si128(c2[7234],_mm_xor_si128(c2[4400],_mm_xor_si128(c2[7832],_mm_xor_si128(c2[7265],_mm_xor_si128(c2[4426],_mm_xor_si128(c2[3284],_mm_xor_si128(c2[2144],_mm_xor_si128(c2[4452],_mm_xor_si128(c2[3883],_mm_xor_si128(c2[2734],_mm_xor_si128(c2[4478],_mm_xor_si128(c2[6192],_mm_xor_si128(c2[9048],_mm_xor_si128(c2[4504],_mm_xor_si128(c2[9077],_mm_xor_si128(c2[1075],_mm_xor_si128(c2[4530],_mm_xor_si128(c2[5678],_mm_xor_si128(c2[5674],_mm_xor_si128(c2[4556],_mm_xor_si128(c2[3986],c2[3408]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[0]=simde_mm_xor_si128(c2[4010],simde_mm_xor_si128(c2[7443],simde_mm_xor_si128(c2[6293],simde_mm_xor_si128(c2[6298],simde_mm_xor_si128(c2[4036],simde_mm_xor_si128(c2[6898],simde_mm_xor_si128(c2[7468],simde_mm_xor_si128(c2[4062],simde_mm_xor_si128(c2[1203],simde_mm_xor_si128(c2[6923],simde_mm_xor_si128(c2[4088],simde_mm_xor_si128(c2[1795],simde_mm_xor_si128(c2[4662],simde_mm_xor_si128(c2[4114],simde_mm_xor_si128(c2[6400],simde_mm_xor_si128(c2[4680],simde_mm_xor_si128(c2[4140],simde_mm_xor_si128(c2[709],simde_mm_xor_si128(c2[6428],simde_mm_xor_si128(c2[4166],simde_mm_xor_si128(c2[7022],simde_mm_xor_si128(c2[4741],simde_mm_xor_si128(c2[4192],simde_mm_xor_si128(c2[4195],simde_mm_xor_si128(c2[7053],simde_mm_xor_si128(c2[4218],simde_mm_xor_si128(c2[7652],simde_mm_xor_si128(c2[7650],simde_mm_xor_si128(c2[4244],simde_mm_xor_si128(c2[5388],simde_mm_xor_si128(c2[807],simde_mm_xor_si128(c2[4270],simde_mm_xor_si128(c2[5985],simde_mm_xor_si128(c2[8270],simde_mm_xor_si128(c2[4296],simde_mm_xor_si128(c2[4297],simde_mm_xor_si128(c2[6017],simde_mm_xor_si128(c2[4322],simde_mm_xor_si128(c2[2038],simde_mm_xor_si128(c2[887],simde_mm_xor_si128(c2[4348],simde_mm_xor_si128(c2[1485],simde_mm_xor_si128(c2[8357],simde_mm_xor_si128(c2[4374],simde_mm_xor_si128(c2[2088],simde_mm_xor_si128(c2[7234],simde_mm_xor_si128(c2[4400],simde_mm_xor_si128(c2[7832],simde_mm_xor_si128(c2[7265],simde_mm_xor_si128(c2[4426],simde_mm_xor_si128(c2[3284],simde_mm_xor_si128(c2[2144],simde_mm_xor_si128(c2[4452],simde_mm_xor_si128(c2[3883],simde_mm_xor_si128(c2[2734],simde_mm_xor_si128(c2[4478],simde_mm_xor_si128(c2[6192],simde_mm_xor_si128(c2[9048],simde_mm_xor_si128(c2[4504],simde_mm_xor_si128(c2[9077],simde_mm_xor_si128(c2[1075],simde_mm_xor_si128(c2[4530],simde_mm_xor_si128(c2[5678],simde_mm_xor_si128(c2[5674],simde_mm_xor_si128(c2[4556],simde_mm_xor_si128(c2[3986],c2[3408]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 1
-     d2[13]=_mm_xor_si128(c2[0],_mm_xor_si128(c2[4010],_mm_xor_si128(c2[7443],_mm_xor_si128(c2[6293],_mm_xor_si128(c2[6298],_mm_xor_si128(c2[26],_mm_xor_si128(c2[4036],_mm_xor_si128(c2[6898],_mm_xor_si128(c2[7468],_mm_xor_si128(c2[52],_mm_xor_si128(c2[4062],_mm_xor_si128(c2[1203],_mm_xor_si128(c2[6923],_mm_xor_si128(c2[78],_mm_xor_si128(c2[4088],_mm_xor_si128(c2[1795],_mm_xor_si128(c2[4662],_mm_xor_si128(c2[4114],_mm_xor_si128(c2[6400],_mm_xor_si128(c2[4680],_mm_xor_si128(c2[130],_mm_xor_si128(c2[4140],_mm_xor_si128(c2[709],_mm_xor_si128(c2[6428],_mm_xor_si128(c2[156],_mm_xor_si128(c2[4166],_mm_xor_si128(c2[7022],_mm_xor_si128(c2[4741],_mm_xor_si128(c2[4192],_mm_xor_si128(c2[4195],_mm_xor_si128(c2[7053],_mm_xor_si128(c2[4218],_mm_xor_si128(c2[7652],_mm_xor_si128(c2[7650],_mm_xor_si128(c2[234],_mm_xor_si128(c2[4244],_mm_xor_si128(c2[5388],_mm_xor_si128(c2[807],_mm_xor_si128(c2[260],_mm_xor_si128(c2[4270],_mm_xor_si128(c2[5985],_mm_xor_si128(c2[8270],_mm_xor_si128(c2[286],_mm_xor_si128(c2[4296],_mm_xor_si128(c2[4297],_mm_xor_si128(c2[6017],_mm_xor_si128(c2[312],_mm_xor_si128(c2[4322],_mm_xor_si128(c2[2038],_mm_xor_si128(c2[887],_mm_xor_si128(c2[338],_mm_xor_si128(c2[4348],_mm_xor_si128(c2[1485],_mm_xor_si128(c2[8357],_mm_xor_si128(c2[4374],_mm_xor_si128(c2[2088],_mm_xor_si128(c2[7234],_mm_xor_si128(c2[390],_mm_xor_si128(c2[4400],_mm_xor_si128(c2[7832],_mm_xor_si128(c2[7265],_mm_xor_si128(c2[416],_mm_xor_si128(c2[4426],_mm_xor_si128(c2[3284],_mm_xor_si128(c2[2144],_mm_xor_si128(c2[4452],_mm_xor_si128(c2[3883],_mm_xor_si128(c2[2734],_mm_xor_si128(c2[468],_mm_xor_si128(c2[4478],_mm_xor_si128(c2[6192],_mm_xor_si128(c2[9048],_mm_xor_si128(c2[494],_mm_xor_si128(c2[4504],_mm_xor_si128(c2[9077],_mm_xor_si128(c2[1075],_mm_xor_si128(c2[520],_mm_xor_si128(c2[4530],_mm_xor_si128(c2[5678],_mm_xor_si128(c2[5674],_mm_xor_si128(c2[546],_mm_xor_si128(c2[4556],_mm_xor_si128(c2[3986],c2[3408])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[13]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[4010],simde_mm_xor_si128(c2[7443],simde_mm_xor_si128(c2[6293],simde_mm_xor_si128(c2[6298],simde_mm_xor_si128(c2[26],simde_mm_xor_si128(c2[4036],simde_mm_xor_si128(c2[6898],simde_mm_xor_si128(c2[7468],simde_mm_xor_si128(c2[52],simde_mm_xor_si128(c2[4062],simde_mm_xor_si128(c2[1203],simde_mm_xor_si128(c2[6923],simde_mm_xor_si128(c2[78],simde_mm_xor_si128(c2[4088],simde_mm_xor_si128(c2[1795],simde_mm_xor_si128(c2[4662],simde_mm_xor_si128(c2[4114],simde_mm_xor_si128(c2[6400],simde_mm_xor_si128(c2[4680],simde_mm_xor_si128(c2[130],simde_mm_xor_si128(c2[4140],simde_mm_xor_si128(c2[709],simde_mm_xor_si128(c2[6428],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[4166],simde_mm_xor_si128(c2[7022],simde_mm_xor_si128(c2[4741],simde_mm_xor_si128(c2[4192],simde_mm_xor_si128(c2[4195],simde_mm_xor_si128(c2[7053],simde_mm_xor_si128(c2[4218],simde_mm_xor_si128(c2[7652],simde_mm_xor_si128(c2[7650],simde_mm_xor_si128(c2[234],simde_mm_xor_si128(c2[4244],simde_mm_xor_si128(c2[5388],simde_mm_xor_si128(c2[807],simde_mm_xor_si128(c2[260],simde_mm_xor_si128(c2[4270],simde_mm_xor_si128(c2[5985],simde_mm_xor_si128(c2[8270],simde_mm_xor_si128(c2[286],simde_mm_xor_si128(c2[4296],simde_mm_xor_si128(c2[4297],simde_mm_xor_si128(c2[6017],simde_mm_xor_si128(c2[312],simde_mm_xor_si128(c2[4322],simde_mm_xor_si128(c2[2038],simde_mm_xor_si128(c2[887],simde_mm_xor_si128(c2[338],simde_mm_xor_si128(c2[4348],simde_mm_xor_si128(c2[1485],simde_mm_xor_si128(c2[8357],simde_mm_xor_si128(c2[4374],simde_mm_xor_si128(c2[2088],simde_mm_xor_si128(c2[7234],simde_mm_xor_si128(c2[390],simde_mm_xor_si128(c2[4400],simde_mm_xor_si128(c2[7832],simde_mm_xor_si128(c2[7265],simde_mm_xor_si128(c2[416],simde_mm_xor_si128(c2[4426],simde_mm_xor_si128(c2[3284],simde_mm_xor_si128(c2[2144],simde_mm_xor_si128(c2[4452],simde_mm_xor_si128(c2[3883],simde_mm_xor_si128(c2[2734],simde_mm_xor_si128(c2[468],simde_mm_xor_si128(c2[4478],simde_mm_xor_si128(c2[6192],simde_mm_xor_si128(c2[9048],simde_mm_xor_si128(c2[494],simde_mm_xor_si128(c2[4504],simde_mm_xor_si128(c2[9077],simde_mm_xor_si128(c2[1075],simde_mm_xor_si128(c2[520],simde_mm_xor_si128(c2[4530],simde_mm_xor_si128(c2[5678],simde_mm_xor_si128(c2[5674],simde_mm_xor_si128(c2[546],simde_mm_xor_si128(c2[4556],simde_mm_xor_si128(c2[3986],c2[3408])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 2
-     d2[26]=_mm_xor_si128(c2[4010],_mm_xor_si128(c2[7443],_mm_xor_si128(c2[2296],_mm_xor_si128(c2[6293],_mm_xor_si128(c2[2288],_mm_xor_si128(c2[6298],_mm_xor_si128(c2[4036],_mm_xor_si128(c2[2888],_mm_xor_si128(c2[6898],_mm_xor_si128(c2[3458],_mm_xor_si128(c2[7468],_mm_xor_si128(c2[4062],_mm_xor_si128(c2[1203],_mm_xor_si128(c2[2913],_mm_xor_si128(c2[6923],_mm_xor_si128(c2[4088],_mm_xor_si128(c2[1795],_mm_xor_si128(c2[652],_mm_xor_si128(c2[4662],_mm_xor_si128(c2[4114],_mm_xor_si128(c2[2403],_mm_xor_si128(c2[6400],_mm_xor_si128(c2[683],_mm_xor_si128(c2[4680],_mm_xor_si128(c2[4140],_mm_xor_si128(c2[709],_mm_xor_si128(c2[2418],_mm_xor_si128(c2[6428],_mm_xor_si128(c2[4166],_mm_xor_si128(c2[3025],_mm_xor_si128(c2[7022],_mm_xor_si128(c2[731],_mm_xor_si128(c2[4741],_mm_xor_si128(c2[4192],_mm_xor_si128(c2[185],_mm_xor_si128(c2[4195],_mm_xor_si128(c2[3043],_mm_xor_si128(c2[7053],_mm_xor_si128(c2[4218],_mm_xor_si128(c2[3642],_mm_xor_si128(c2[7652],_mm_xor_si128(c2[3640],_mm_xor_si128(c2[7650],_mm_xor_si128(c2[4244],_mm_xor_si128(c2[5388],_mm_xor_si128(c2[5961],_mm_xor_si128(c2[807],_mm_xor_si128(c2[4270],_mm_xor_si128(c2[1988],_mm_xor_si128(c2[5985],_mm_xor_si128(c2[4273],_mm_xor_si128(c2[8270],_mm_xor_si128(c2[4296],_mm_xor_si128(c2[4297],_mm_xor_si128(c2[2007],_mm_xor_si128(c2[6017],_mm_xor_si128(c2[4322],_mm_xor_si128(c2[2038],_mm_xor_si128(c2[6041],_mm_xor_si128(c2[887],_mm_xor_si128(c2[4348],_mm_xor_si128(c2[6639],_mm_xor_si128(c2[1485],_mm_xor_si128(c2[4347],_mm_xor_si128(c2[8357],_mm_xor_si128(c2[4374],_mm_xor_si128(c2[7229],_mm_xor_si128(c2[2088],_mm_xor_si128(c2[3224],_mm_xor_si128(c2[7234],_mm_xor_si128(c2[4400],_mm_xor_si128(c2[7832],_mm_xor_si128(c2[3255],_mm_xor_si128(c2[7265],_mm_xor_si128(c2[4426],_mm_xor_si128(c2[3284],_mm_xor_si128(c2[7285],_mm_xor_si128(c2[2144],_mm_xor_si128(c2[4452],_mm_xor_si128(c2[9024],_mm_xor_si128(c2[3883],_mm_xor_si128(c2[7888],_mm_xor_si128(c2[2734],_mm_xor_si128(c2[4478],_mm_xor_si128(c2[2195],_mm_xor_si128(c2[6192],_mm_xor_si128(c2[5051],_mm_xor_si128(c2[9048],_mm_xor_si128(c2[4504],_mm_xor_si128(c2[9077],_mm_xor_si128(c2[6216],_mm_xor_si128(c2[1075],_mm_xor_si128(c2[4530],_mm_xor_si128(c2[1668],_mm_xor_si128(c2[5678],_mm_xor_si128(c2[1664],_mm_xor_si128(c2[5674],_mm_xor_si128(c2[4556],_mm_xor_si128(c2[3986],_mm_xor_si128(c2[8562],c2[3408]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[26]=simde_mm_xor_si128(c2[4010],simde_mm_xor_si128(c2[7443],simde_mm_xor_si128(c2[2296],simde_mm_xor_si128(c2[6293],simde_mm_xor_si128(c2[2288],simde_mm_xor_si128(c2[6298],simde_mm_xor_si128(c2[4036],simde_mm_xor_si128(c2[2888],simde_mm_xor_si128(c2[6898],simde_mm_xor_si128(c2[3458],simde_mm_xor_si128(c2[7468],simde_mm_xor_si128(c2[4062],simde_mm_xor_si128(c2[1203],simde_mm_xor_si128(c2[2913],simde_mm_xor_si128(c2[6923],simde_mm_xor_si128(c2[4088],simde_mm_xor_si128(c2[1795],simde_mm_xor_si128(c2[652],simde_mm_xor_si128(c2[4662],simde_mm_xor_si128(c2[4114],simde_mm_xor_si128(c2[2403],simde_mm_xor_si128(c2[6400],simde_mm_xor_si128(c2[683],simde_mm_xor_si128(c2[4680],simde_mm_xor_si128(c2[4140],simde_mm_xor_si128(c2[709],simde_mm_xor_si128(c2[2418],simde_mm_xor_si128(c2[6428],simde_mm_xor_si128(c2[4166],simde_mm_xor_si128(c2[3025],simde_mm_xor_si128(c2[7022],simde_mm_xor_si128(c2[731],simde_mm_xor_si128(c2[4741],simde_mm_xor_si128(c2[4192],simde_mm_xor_si128(c2[185],simde_mm_xor_si128(c2[4195],simde_mm_xor_si128(c2[3043],simde_mm_xor_si128(c2[7053],simde_mm_xor_si128(c2[4218],simde_mm_xor_si128(c2[3642],simde_mm_xor_si128(c2[7652],simde_mm_xor_si128(c2[3640],simde_mm_xor_si128(c2[7650],simde_mm_xor_si128(c2[4244],simde_mm_xor_si128(c2[5388],simde_mm_xor_si128(c2[5961],simde_mm_xor_si128(c2[807],simde_mm_xor_si128(c2[4270],simde_mm_xor_si128(c2[1988],simde_mm_xor_si128(c2[5985],simde_mm_xor_si128(c2[4273],simde_mm_xor_si128(c2[8270],simde_mm_xor_si128(c2[4296],simde_mm_xor_si128(c2[4297],simde_mm_xor_si128(c2[2007],simde_mm_xor_si128(c2[6017],simde_mm_xor_si128(c2[4322],simde_mm_xor_si128(c2[2038],simde_mm_xor_si128(c2[6041],simde_mm_xor_si128(c2[887],simde_mm_xor_si128(c2[4348],simde_mm_xor_si128(c2[6639],simde_mm_xor_si128(c2[1485],simde_mm_xor_si128(c2[4347],simde_mm_xor_si128(c2[8357],simde_mm_xor_si128(c2[4374],simde_mm_xor_si128(c2[7229],simde_mm_xor_si128(c2[2088],simde_mm_xor_si128(c2[3224],simde_mm_xor_si128(c2[7234],simde_mm_xor_si128(c2[4400],simde_mm_xor_si128(c2[7832],simde_mm_xor_si128(c2[3255],simde_mm_xor_si128(c2[7265],simde_mm_xor_si128(c2[4426],simde_mm_xor_si128(c2[3284],simde_mm_xor_si128(c2[7285],simde_mm_xor_si128(c2[2144],simde_mm_xor_si128(c2[4452],simde_mm_xor_si128(c2[9024],simde_mm_xor_si128(c2[3883],simde_mm_xor_si128(c2[7888],simde_mm_xor_si128(c2[2734],simde_mm_xor_si128(c2[4478],simde_mm_xor_si128(c2[2195],simde_mm_xor_si128(c2[6192],simde_mm_xor_si128(c2[5051],simde_mm_xor_si128(c2[9048],simde_mm_xor_si128(c2[4504],simde_mm_xor_si128(c2[9077],simde_mm_xor_si128(c2[6216],simde_mm_xor_si128(c2[1075],simde_mm_xor_si128(c2[4530],simde_mm_xor_si128(c2[1668],simde_mm_xor_si128(c2[5678],simde_mm_xor_si128(c2[1664],simde_mm_xor_si128(c2[5674],simde_mm_xor_si128(c2[4556],simde_mm_xor_si128(c2[3986],simde_mm_xor_si128(c2[8562],c2[3408]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 3
-     d2[39]=_mm_xor_si128(c2[4010],_mm_xor_si128(c2[7443],_mm_xor_si128(c2[6293],_mm_xor_si128(c2[2288],_mm_xor_si128(c2[6298],_mm_xor_si128(c2[4036],_mm_xor_si128(c2[6898],_mm_xor_si128(c2[3458],_mm_xor_si128(c2[7468],_mm_xor_si128(c2[4062],_mm_xor_si128(c2[1203],_mm_xor_si128(c2[6923],_mm_xor_si128(c2[4088],_mm_xor_si128(c2[1795],_mm_xor_si128(c2[652],_mm_xor_si128(c2[4662],_mm_xor_si128(c2[4114],_mm_xor_si128(c2[6400],_mm_xor_si128(c2[683],_mm_xor_si128(c2[4680],_mm_xor_si128(c2[4140],_mm_xor_si128(c2[709],_mm_xor_si128(c2[6428],_mm_xor_si128(c2[4166],_mm_xor_si128(c2[7022],_mm_xor_si128(c2[731],_mm_xor_si128(c2[4741],_mm_xor_si128(c2[4192],_mm_xor_si128(c2[4195],_mm_xor_si128(c2[3043],_mm_xor_si128(c2[7053],_mm_xor_si128(c2[4218],_mm_xor_si128(c2[7652],_mm_xor_si128(c2[3640],_mm_xor_si128(c2[7650],_mm_xor_si128(c2[4244],_mm_xor_si128(c2[5388],_mm_xor_si128(c2[807],_mm_xor_si128(c2[4270],_mm_xor_si128(c2[5985],_mm_xor_si128(c2[4273],_mm_xor_si128(c2[8270],_mm_xor_si128(c2[4296],_mm_xor_si128(c2[4297],_mm_xor_si128(c2[2007],_mm_xor_si128(c2[6017],_mm_xor_si128(c2[4322],_mm_xor_si128(c2[2038],_mm_xor_si128(c2[6041],_mm_xor_si128(c2[887],_mm_xor_si128(c2[4348],_mm_xor_si128(c2[1485],_mm_xor_si128(c2[4347],_mm_xor_si128(c2[8357],_mm_xor_si128(c2[4374],_mm_xor_si128(c2[2088],_mm_xor_si128(c2[3224],_mm_xor_si128(c2[7234],_mm_xor_si128(c2[4400],_mm_xor_si128(c2[7832],_mm_xor_si128(c2[7265],_mm_xor_si128(c2[4426],_mm_xor_si128(c2[3284],_mm_xor_si128(c2[7285],_mm_xor_si128(c2[2144],_mm_xor_si128(c2[4452],_mm_xor_si128(c2[3883],_mm_xor_si128(c2[7888],_mm_xor_si128(c2[2734],_mm_xor_si128(c2[4478],_mm_xor_si128(c2[6192],_mm_xor_si128(c2[5051],_mm_xor_si128(c2[9048],_mm_xor_si128(c2[4504],_mm_xor_si128(c2[9077],_mm_xor_si128(c2[1075],_mm_xor_si128(c2[4530],_mm_xor_si128(c2[5678],_mm_xor_si128(c2[1664],_mm_xor_si128(c2[5674],_mm_xor_si128(c2[4556],_mm_xor_si128(c2[3986],_mm_xor_si128(c2[8562],c2[3408])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[39]=simde_mm_xor_si128(c2[4010],simde_mm_xor_si128(c2[7443],simde_mm_xor_si128(c2[6293],simde_mm_xor_si128(c2[2288],simde_mm_xor_si128(c2[6298],simde_mm_xor_si128(c2[4036],simde_mm_xor_si128(c2[6898],simde_mm_xor_si128(c2[3458],simde_mm_xor_si128(c2[7468],simde_mm_xor_si128(c2[4062],simde_mm_xor_si128(c2[1203],simde_mm_xor_si128(c2[6923],simde_mm_xor_si128(c2[4088],simde_mm_xor_si128(c2[1795],simde_mm_xor_si128(c2[652],simde_mm_xor_si128(c2[4662],simde_mm_xor_si128(c2[4114],simde_mm_xor_si128(c2[6400],simde_mm_xor_si128(c2[683],simde_mm_xor_si128(c2[4680],simde_mm_xor_si128(c2[4140],simde_mm_xor_si128(c2[709],simde_mm_xor_si128(c2[6428],simde_mm_xor_si128(c2[4166],simde_mm_xor_si128(c2[7022],simde_mm_xor_si128(c2[731],simde_mm_xor_si128(c2[4741],simde_mm_xor_si128(c2[4192],simde_mm_xor_si128(c2[4195],simde_mm_xor_si128(c2[3043],simde_mm_xor_si128(c2[7053],simde_mm_xor_si128(c2[4218],simde_mm_xor_si128(c2[7652],simde_mm_xor_si128(c2[3640],simde_mm_xor_si128(c2[7650],simde_mm_xor_si128(c2[4244],simde_mm_xor_si128(c2[5388],simde_mm_xor_si128(c2[807],simde_mm_xor_si128(c2[4270],simde_mm_xor_si128(c2[5985],simde_mm_xor_si128(c2[4273],simde_mm_xor_si128(c2[8270],simde_mm_xor_si128(c2[4296],simde_mm_xor_si128(c2[4297],simde_mm_xor_si128(c2[2007],simde_mm_xor_si128(c2[6017],simde_mm_xor_si128(c2[4322],simde_mm_xor_si128(c2[2038],simde_mm_xor_si128(c2[6041],simde_mm_xor_si128(c2[887],simde_mm_xor_si128(c2[4348],simde_mm_xor_si128(c2[1485],simde_mm_xor_si128(c2[4347],simde_mm_xor_si128(c2[8357],simde_mm_xor_si128(c2[4374],simde_mm_xor_si128(c2[2088],simde_mm_xor_si128(c2[3224],simde_mm_xor_si128(c2[7234],simde_mm_xor_si128(c2[4400],simde_mm_xor_si128(c2[7832],simde_mm_xor_si128(c2[7265],simde_mm_xor_si128(c2[4426],simde_mm_xor_si128(c2[3284],simde_mm_xor_si128(c2[7285],simde_mm_xor_si128(c2[2144],simde_mm_xor_si128(c2[4452],simde_mm_xor_si128(c2[3883],simde_mm_xor_si128(c2[7888],simde_mm_xor_si128(c2[2734],simde_mm_xor_si128(c2[4478],simde_mm_xor_si128(c2[6192],simde_mm_xor_si128(c2[5051],simde_mm_xor_si128(c2[9048],simde_mm_xor_si128(c2[4504],simde_mm_xor_si128(c2[9077],simde_mm_xor_si128(c2[1075],simde_mm_xor_si128(c2[4530],simde_mm_xor_si128(c2[5678],simde_mm_xor_si128(c2[1664],simde_mm_xor_si128(c2[5674],simde_mm_xor_si128(c2[4556],simde_mm_xor_si128(c2[3986],simde_mm_xor_si128(c2[8562],c2[3408])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 4
-     d2[52]=_mm_xor_si128(c2[4577],c2[6902]);
+     d2[52]=simde_mm_xor_si128(c2[4577],c2[6902]);
 
 //row: 5
-     d2[65]=_mm_xor_si128(c2[8590],_mm_xor_si128(c2[2872],_mm_xor_si128(c2[1722],_mm_xor_si128(c2[1727],_mm_xor_si128(c2[5159],_mm_xor_si128(c2[8616],_mm_xor_si128(c2[2314],_mm_xor_si128(c2[2897],_mm_xor_si128(c2[2320],_mm_xor_si128(c2[8642],_mm_xor_si128(c2[5783],_mm_xor_si128(c2[2352],_mm_xor_si128(c2[8668],_mm_xor_si128(c2[6375],_mm_xor_si128(c2[78],_mm_xor_si128(c2[4655],_mm_xor_si128(c2[8694],_mm_xor_si128(c2[1829],_mm_xor_si128(c2[109],_mm_xor_si128(c2[8720],_mm_xor_si128(c2[5289],_mm_xor_si128(c2[1857],_mm_xor_si128(c2[8746],_mm_xor_si128(c2[2451],_mm_xor_si128(c2[157],_mm_xor_si128(c2[8772],_mm_xor_si128(c2[8762],_mm_xor_si128(c2[2482],_mm_xor_si128(c2[8798],_mm_xor_si128(c2[3068],_mm_xor_si128(c2[3079],_mm_xor_si128(c2[8824],_mm_xor_si128(c2[817],_mm_xor_si128(c2[5387],_mm_xor_si128(c2[8850],_mm_xor_si128(c2[1414],_mm_xor_si128(c2[3699],_mm_xor_si128(c2[8876],_mm_xor_si128(c2[8877],_mm_xor_si128(c2[1433],_mm_xor_si128(c2[8902],_mm_xor_si128(c2[6605],_mm_xor_si128(c2[5467],_mm_xor_si128(c2[888],_mm_xor_si128(c2[8928],_mm_xor_si128(c2[6065],_mm_xor_si128(c2[3773],_mm_xor_si128(c2[8954],_mm_xor_si128(c2[6668],_mm_xor_si128(c2[2663],_mm_xor_si128(c2[8980],_mm_xor_si128(c2[3261],_mm_xor_si128(c2[2681],_mm_xor_si128(c2[9006],_mm_xor_si128(c2[7864],_mm_xor_si128(c2[6711],_mm_xor_si128(c2[9008],_mm_xor_si128(c2[9032],_mm_xor_si128(c2[8450],_mm_xor_si128(c2[7314],_mm_xor_si128(c2[9058],_mm_xor_si128(c2[1621],_mm_xor_si128(c2[4477],_mm_xor_si128(c2[9084],_mm_xor_si128(c2[4506],_mm_xor_si128(c2[5642],_mm_xor_si128(c2[9110],_mm_xor_si128(c2[1094],_mm_xor_si128(c2[1103],_mm_xor_si128(c2[9136],_mm_xor_si128(c2[8566],_mm_xor_si128(c2[7988],c2[1128]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[65]=simde_mm_xor_si128(c2[8590],simde_mm_xor_si128(c2[2872],simde_mm_xor_si128(c2[1722],simde_mm_xor_si128(c2[1727],simde_mm_xor_si128(c2[5159],simde_mm_xor_si128(c2[8616],simde_mm_xor_si128(c2[2314],simde_mm_xor_si128(c2[2897],simde_mm_xor_si128(c2[2320],simde_mm_xor_si128(c2[8642],simde_mm_xor_si128(c2[5783],simde_mm_xor_si128(c2[2352],simde_mm_xor_si128(c2[8668],simde_mm_xor_si128(c2[6375],simde_mm_xor_si128(c2[78],simde_mm_xor_si128(c2[4655],simde_mm_xor_si128(c2[8694],simde_mm_xor_si128(c2[1829],simde_mm_xor_si128(c2[109],simde_mm_xor_si128(c2[8720],simde_mm_xor_si128(c2[5289],simde_mm_xor_si128(c2[1857],simde_mm_xor_si128(c2[8746],simde_mm_xor_si128(c2[2451],simde_mm_xor_si128(c2[157],simde_mm_xor_si128(c2[8772],simde_mm_xor_si128(c2[8762],simde_mm_xor_si128(c2[2482],simde_mm_xor_si128(c2[8798],simde_mm_xor_si128(c2[3068],simde_mm_xor_si128(c2[3079],simde_mm_xor_si128(c2[8824],simde_mm_xor_si128(c2[817],simde_mm_xor_si128(c2[5387],simde_mm_xor_si128(c2[8850],simde_mm_xor_si128(c2[1414],simde_mm_xor_si128(c2[3699],simde_mm_xor_si128(c2[8876],simde_mm_xor_si128(c2[8877],simde_mm_xor_si128(c2[1433],simde_mm_xor_si128(c2[8902],simde_mm_xor_si128(c2[6605],simde_mm_xor_si128(c2[5467],simde_mm_xor_si128(c2[888],simde_mm_xor_si128(c2[8928],simde_mm_xor_si128(c2[6065],simde_mm_xor_si128(c2[3773],simde_mm_xor_si128(c2[8954],simde_mm_xor_si128(c2[6668],simde_mm_xor_si128(c2[2663],simde_mm_xor_si128(c2[8980],simde_mm_xor_si128(c2[3261],simde_mm_xor_si128(c2[2681],simde_mm_xor_si128(c2[9006],simde_mm_xor_si128(c2[7864],simde_mm_xor_si128(c2[6711],simde_mm_xor_si128(c2[9008],simde_mm_xor_si128(c2[9032],simde_mm_xor_si128(c2[8450],simde_mm_xor_si128(c2[7314],simde_mm_xor_si128(c2[9058],simde_mm_xor_si128(c2[1621],simde_mm_xor_si128(c2[4477],simde_mm_xor_si128(c2[9084],simde_mm_xor_si128(c2[4506],simde_mm_xor_si128(c2[5642],simde_mm_xor_si128(c2[9110],simde_mm_xor_si128(c2[1094],simde_mm_xor_si128(c2[1103],simde_mm_xor_si128(c2[9136],simde_mm_xor_si128(c2[8566],simde_mm_xor_si128(c2[7988],c2[1128]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 6
-     d2[78]=_mm_xor_si128(c2[3432],_mm_xor_si128(c2[6449],_mm_xor_si128(c2[1986],_mm_xor_si128(c2[1433],_mm_xor_si128(c2[341],_mm_xor_si128(c2[5019],_mm_xor_si128(c2[3902],c2[6817])))))));
+     d2[78]=simde_mm_xor_si128(c2[3432],simde_mm_xor_si128(c2[6449],simde_mm_xor_si128(c2[1986],simde_mm_xor_si128(c2[1433],simde_mm_xor_si128(c2[341],simde_mm_xor_si128(c2[5019],simde_mm_xor_si128(c2[3902],c2[6817])))))));
 
 //row: 7
-     d2[91]=_mm_xor_si128(c2[581],_mm_xor_si128(c2[4607],_mm_xor_si128(c2[111],_mm_xor_si128(c2[5339],_mm_xor_si128(c2[8797],c2[7232])))));
+     d2[91]=simde_mm_xor_si128(c2[581],simde_mm_xor_si128(c2[4607],simde_mm_xor_si128(c2[111],simde_mm_xor_si128(c2[5339],simde_mm_xor_si128(c2[8797],c2[7232])))));
 
 //row: 8
-     d2[104]=_mm_xor_si128(c2[5728],_mm_xor_si128(c2[6292],_mm_xor_si128(c2[10],_mm_xor_si128(c2[574],_mm_xor_si128(c2[8011],_mm_xor_si128(c2[4578],_mm_xor_si128(c2[8588],_mm_xor_si128(c2[8016],_mm_xor_si128(c2[4583],_mm_xor_si128(c2[8580],_mm_xor_si128(c2[6874],_mm_xor_si128(c2[5754],_mm_xor_si128(c2[6318],_mm_xor_si128(c2[8616],_mm_xor_si128(c2[5183],_mm_xor_si128(c2[29],_mm_xor_si128(c2[35],_mm_xor_si128(c2[5753],_mm_xor_si128(c2[599],_mm_xor_si128(c2[1170],_mm_xor_si128(c2[5780],_mm_xor_si128(c2[6344],_mm_xor_si128(c2[2921],_mm_xor_si128(c2[3485],_mm_xor_si128(c2[8641],_mm_xor_si128(c2[5208],_mm_xor_si128(c2[54],_mm_xor_si128(c2[5806],_mm_xor_si128(c2[6370],_mm_xor_si128(c2[3513],_mm_xor_si128(c2[4090],_mm_xor_si128(c2[6380],_mm_xor_si128(c2[2947],_mm_xor_si128(c2[6944],_mm_xor_si128(c2[1802],_mm_xor_si128(c2[5832],_mm_xor_si128(c2[6396],_mm_xor_si128(c2[8118],_mm_xor_si128(c2[4685],_mm_xor_si128(c2[8695],_mm_xor_si128(c2[6398],_mm_xor_si128(c2[2965],_mm_xor_si128(c2[6975],_mm_xor_si128(c2[5858],_mm_xor_si128(c2[6422],_mm_xor_si128(c2[2427],_mm_xor_si128(c2[2991],_mm_xor_si128(c2[8146],_mm_xor_si128(c2[4713],_mm_xor_si128(c2[8710],_mm_xor_si128(c2[5884],_mm_xor_si128(c2[6448],_mm_xor_si128(c2[8740],_mm_xor_si128(c2[5307],_mm_xor_si128(c2[166],_mm_xor_si128(c2[6459],_mm_xor_si128(c2[3026],_mm_xor_si128(c2[7023],_mm_xor_si128(c2[5910],_mm_xor_si128(c2[6474],_mm_xor_si128(c2[5913],_mm_xor_si128(c2[2480],_mm_xor_si128(c2[6477],_mm_xor_si128(c2[8771],_mm_xor_si128(c2[5338],_mm_xor_si128(c2[184],_mm_xor_si128(c2[5936],_mm_xor_si128(c2[6500],_mm_xor_si128(c2[219],_mm_xor_si128(c2[5937],_mm_xor_si128(c2[783],_mm_xor_si128(c2[217],_mm_xor_si128(c2[5935],_mm_xor_si128(c2[781],_mm_xor_si128(c2[5962],_mm_xor_si128(c2[6526],_mm_xor_si128(c2[7106],_mm_xor_si128(c2[7670],_mm_xor_si128(c2[2525],_mm_xor_si128(c2[8243],_mm_xor_si128(c2[3102],_mm_xor_si128(c2[5988],_mm_xor_si128(c2[6552],_mm_xor_si128(c2[7703],_mm_xor_si128(c2[4270],_mm_xor_si128(c2[8280],_mm_xor_si128(c2[837],_mm_xor_si128(c2[6555],_mm_xor_si128(c2[1414],_mm_xor_si128(c2[6014],_mm_xor_si128(c2[6578],_mm_xor_si128(c2[6015],_mm_xor_si128(c2[6579],_mm_xor_si128(c2[7722],_mm_xor_si128(c2[4302],_mm_xor_si128(c2[8299],_mm_xor_si128(c2[6040],_mm_xor_si128(c2[6604],_mm_xor_si128(c2[3756],_mm_xor_si128(c2[4320],_mm_xor_si128(c2[2605],_mm_xor_si128(c2[8323],_mm_xor_si128(c2[3182],_mm_xor_si128(c2[7756],_mm_xor_si128(c2[6066],_mm_xor_si128(c2[6630],_mm_xor_si128(c2[3203],_mm_xor_si128(c2[8921],_mm_xor_si128(c2[3780],_mm_xor_si128(c2[911],_mm_xor_si128(c2[6642],_mm_xor_si128(c2[1488],_mm_xor_si128(c2[6092],_mm_xor_si128(c2[6656],_mm_xor_si128(c2[3806],_mm_xor_si128(c2[373],_mm_xor_si128(c2[4370],_mm_xor_si128(c2[8952],_mm_xor_si128(c2[5519],_mm_xor_si128(c2[365],_mm_xor_si128(c2[6118],_mm_xor_si128(c2[6682],_mm_xor_si128(c2[399],_mm_xor_si128(c2[963],_mm_xor_si128(c2[8970],_mm_xor_si128(c2[5550],_mm_xor_si128(c2[396],_mm_xor_si128(c2[6144],_mm_xor_si128(c2[6708],_mm_xor_si128(c2[5002],_mm_xor_si128(c2[5566],_mm_xor_si128(c2[3849],_mm_xor_si128(c2[416],_mm_xor_si128(c2[4426],_mm_xor_si128(c2[422],_mm_xor_si128(c2[6170],_mm_xor_si128(c2[6734],_mm_xor_si128(c2[5601],_mm_xor_si128(c2[2168],_mm_xor_si128(c2[6165],_mm_xor_si128(c2[4452],_mm_xor_si128(c2[1019],_mm_xor_si128(c2[5029],_mm_xor_si128(c2[6196],_mm_xor_si128(c2[6760],_mm_xor_si128(c2[7910],_mm_xor_si128(c2[4477],_mm_xor_si128(c2[8487],_mm_xor_si128(c2[1615],_mm_xor_si128(c2[7333],_mm_xor_si128(c2[2192],_mm_xor_si128(c2[6222],_mm_xor_si128(c2[6786],_mm_xor_si128(c2[1644],_mm_xor_si128(c2[2221],_mm_xor_si128(c2[2793],_mm_xor_si128(c2[8511],_mm_xor_si128(c2[3357],_mm_xor_si128(c2[2216],_mm_xor_si128(c2[6248],_mm_xor_si128(c2[6812],_mm_xor_si128(c2[7396],_mm_xor_si128(c2[3963],_mm_xor_si128(c2[7960],_mm_xor_si128(c2[7392],_mm_xor_si128(c2[3959],_mm_xor_si128(c2[7956],_mm_xor_si128(c2[6274],_mm_xor_si128(c2[6838],_mm_xor_si128(c2[5704],_mm_xor_si128(c2[6268],_mm_xor_si128(c2[5126],_mm_xor_si128(c2[1693],_mm_xor_si128(c2[5703],c2[3412]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[104]=simde_mm_xor_si128(c2[5728],simde_mm_xor_si128(c2[6292],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[574],simde_mm_xor_si128(c2[8011],simde_mm_xor_si128(c2[4578],simde_mm_xor_si128(c2[8588],simde_mm_xor_si128(c2[8016],simde_mm_xor_si128(c2[4583],simde_mm_xor_si128(c2[8580],simde_mm_xor_si128(c2[6874],simde_mm_xor_si128(c2[5754],simde_mm_xor_si128(c2[6318],simde_mm_xor_si128(c2[8616],simde_mm_xor_si128(c2[5183],simde_mm_xor_si128(c2[29],simde_mm_xor_si128(c2[35],simde_mm_xor_si128(c2[5753],simde_mm_xor_si128(c2[599],simde_mm_xor_si128(c2[1170],simde_mm_xor_si128(c2[5780],simde_mm_xor_si128(c2[6344],simde_mm_xor_si128(c2[2921],simde_mm_xor_si128(c2[3485],simde_mm_xor_si128(c2[8641],simde_mm_xor_si128(c2[5208],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[5806],simde_mm_xor_si128(c2[6370],simde_mm_xor_si128(c2[3513],simde_mm_xor_si128(c2[4090],simde_mm_xor_si128(c2[6380],simde_mm_xor_si128(c2[2947],simde_mm_xor_si128(c2[6944],simde_mm_xor_si128(c2[1802],simde_mm_xor_si128(c2[5832],simde_mm_xor_si128(c2[6396],simde_mm_xor_si128(c2[8118],simde_mm_xor_si128(c2[4685],simde_mm_xor_si128(c2[8695],simde_mm_xor_si128(c2[6398],simde_mm_xor_si128(c2[2965],simde_mm_xor_si128(c2[6975],simde_mm_xor_si128(c2[5858],simde_mm_xor_si128(c2[6422],simde_mm_xor_si128(c2[2427],simde_mm_xor_si128(c2[2991],simde_mm_xor_si128(c2[8146],simde_mm_xor_si128(c2[4713],simde_mm_xor_si128(c2[8710],simde_mm_xor_si128(c2[5884],simde_mm_xor_si128(c2[6448],simde_mm_xor_si128(c2[8740],simde_mm_xor_si128(c2[5307],simde_mm_xor_si128(c2[166],simde_mm_xor_si128(c2[6459],simde_mm_xor_si128(c2[3026],simde_mm_xor_si128(c2[7023],simde_mm_xor_si128(c2[5910],simde_mm_xor_si128(c2[6474],simde_mm_xor_si128(c2[5913],simde_mm_xor_si128(c2[2480],simde_mm_xor_si128(c2[6477],simde_mm_xor_si128(c2[8771],simde_mm_xor_si128(c2[5338],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[5936],simde_mm_xor_si128(c2[6500],simde_mm_xor_si128(c2[219],simde_mm_xor_si128(c2[5937],simde_mm_xor_si128(c2[783],simde_mm_xor_si128(c2[217],simde_mm_xor_si128(c2[5935],simde_mm_xor_si128(c2[781],simde_mm_xor_si128(c2[5962],simde_mm_xor_si128(c2[6526],simde_mm_xor_si128(c2[7106],simde_mm_xor_si128(c2[7670],simde_mm_xor_si128(c2[2525],simde_mm_xor_si128(c2[8243],simde_mm_xor_si128(c2[3102],simde_mm_xor_si128(c2[5988],simde_mm_xor_si128(c2[6552],simde_mm_xor_si128(c2[7703],simde_mm_xor_si128(c2[4270],simde_mm_xor_si128(c2[8280],simde_mm_xor_si128(c2[837],simde_mm_xor_si128(c2[6555],simde_mm_xor_si128(c2[1414],simde_mm_xor_si128(c2[6014],simde_mm_xor_si128(c2[6578],simde_mm_xor_si128(c2[6015],simde_mm_xor_si128(c2[6579],simde_mm_xor_si128(c2[7722],simde_mm_xor_si128(c2[4302],simde_mm_xor_si128(c2[8299],simde_mm_xor_si128(c2[6040],simde_mm_xor_si128(c2[6604],simde_mm_xor_si128(c2[3756],simde_mm_xor_si128(c2[4320],simde_mm_xor_si128(c2[2605],simde_mm_xor_si128(c2[8323],simde_mm_xor_si128(c2[3182],simde_mm_xor_si128(c2[7756],simde_mm_xor_si128(c2[6066],simde_mm_xor_si128(c2[6630],simde_mm_xor_si128(c2[3203],simde_mm_xor_si128(c2[8921],simde_mm_xor_si128(c2[3780],simde_mm_xor_si128(c2[911],simde_mm_xor_si128(c2[6642],simde_mm_xor_si128(c2[1488],simde_mm_xor_si128(c2[6092],simde_mm_xor_si128(c2[6656],simde_mm_xor_si128(c2[3806],simde_mm_xor_si128(c2[373],simde_mm_xor_si128(c2[4370],simde_mm_xor_si128(c2[8952],simde_mm_xor_si128(c2[5519],simde_mm_xor_si128(c2[365],simde_mm_xor_si128(c2[6118],simde_mm_xor_si128(c2[6682],simde_mm_xor_si128(c2[399],simde_mm_xor_si128(c2[963],simde_mm_xor_si128(c2[8970],simde_mm_xor_si128(c2[5550],simde_mm_xor_si128(c2[396],simde_mm_xor_si128(c2[6144],simde_mm_xor_si128(c2[6708],simde_mm_xor_si128(c2[5002],simde_mm_xor_si128(c2[5566],simde_mm_xor_si128(c2[3849],simde_mm_xor_si128(c2[416],simde_mm_xor_si128(c2[4426],simde_mm_xor_si128(c2[422],simde_mm_xor_si128(c2[6170],simde_mm_xor_si128(c2[6734],simde_mm_xor_si128(c2[5601],simde_mm_xor_si128(c2[2168],simde_mm_xor_si128(c2[6165],simde_mm_xor_si128(c2[4452],simde_mm_xor_si128(c2[1019],simde_mm_xor_si128(c2[5029],simde_mm_xor_si128(c2[6196],simde_mm_xor_si128(c2[6760],simde_mm_xor_si128(c2[7910],simde_mm_xor_si128(c2[4477],simde_mm_xor_si128(c2[8487],simde_mm_xor_si128(c2[1615],simde_mm_xor_si128(c2[7333],simde_mm_xor_si128(c2[2192],simde_mm_xor_si128(c2[6222],simde_mm_xor_si128(c2[6786],simde_mm_xor_si128(c2[1644],simde_mm_xor_si128(c2[2221],simde_mm_xor_si128(c2[2793],simde_mm_xor_si128(c2[8511],simde_mm_xor_si128(c2[3357],simde_mm_xor_si128(c2[2216],simde_mm_xor_si128(c2[6248],simde_mm_xor_si128(c2[6812],simde_mm_xor_si128(c2[7396],simde_mm_xor_si128(c2[3963],simde_mm_xor_si128(c2[7960],simde_mm_xor_si128(c2[7392],simde_mm_xor_si128(c2[3959],simde_mm_xor_si128(c2[7956],simde_mm_xor_si128(c2[6274],simde_mm_xor_si128(c2[6838],simde_mm_xor_si128(c2[5704],simde_mm_xor_si128(c2[6268],simde_mm_xor_si128(c2[5126],simde_mm_xor_si128(c2[1693],simde_mm_xor_si128(c2[5703],c2[3412]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 9
-     d2[117]=_mm_xor_si128(c2[3432],_mm_xor_si128(c2[5746],_mm_xor_si128(c2[841],_mm_xor_si128(c2[3149],_mm_xor_si128(c2[5498],_mm_xor_si128(c2[2730],_mm_xor_si128(c2[2766],c2[7966])))))));
+     d2[117]=simde_mm_xor_si128(c2[3432],simde_mm_xor_si128(c2[5746],simde_mm_xor_si128(c2[841],simde_mm_xor_si128(c2[3149],simde_mm_xor_si128(c2[5498],simde_mm_xor_si128(c2[2730],simde_mm_xor_si128(c2[2766],c2[7966])))))));
 
 //row: 10
-     d2[130]=_mm_xor_si128(c2[8041],_mm_xor_si128(c2[7492],_mm_xor_si128(c2[6977],_mm_xor_si128(c2[183],_mm_xor_si128(c2[7072],c2[3800])))));
+     d2[130]=simde_mm_xor_si128(c2[8041],simde_mm_xor_si128(c2[7492],simde_mm_xor_si128(c2[6977],simde_mm_xor_si128(c2[183],simde_mm_xor_si128(c2[7072],c2[3800])))));
 
 //row: 11
-     d2[143]=_mm_xor_si128(c2[2288],_mm_xor_si128(c2[6871],_mm_xor_si128(c2[1717],_mm_xor_si128(c2[5721],_mm_xor_si128(c2[5150],_mm_xor_si128(c2[4584],_mm_xor_si128(c2[4013],_mm_xor_si128(c2[4576],_mm_xor_si128(c2[4005],_mm_xor_si128(c2[4587],_mm_xor_si128(c2[2314],_mm_xor_si128(c2[6897],_mm_xor_si128(c2[1743],_mm_xor_si128(c2[5176],_mm_xor_si128(c2[4605],_mm_xor_si128(c2[5746],_mm_xor_si128(c2[5175],_mm_xor_si128(c2[1182],_mm_xor_si128(c2[2340],_mm_xor_si128(c2[6923],_mm_xor_si128(c2[1769],_mm_xor_si128(c2[8632],_mm_xor_si128(c2[8061],_mm_xor_si128(c2[5201],_mm_xor_si128(c2[4630],_mm_xor_si128(c2[2366],_mm_xor_si128(c2[6949],_mm_xor_si128(c2[1795],_mm_xor_si128(c2[86],_mm_xor_si128(c2[8666],_mm_xor_si128(c2[2940],_mm_xor_si128(c2[2369],_mm_xor_si128(c2[2392],_mm_xor_si128(c2[1821],_mm_xor_si128(c2[4691],_mm_xor_si128(c2[4120],_mm_xor_si128(c2[2971],_mm_xor_si128(c2[2400],_mm_xor_si128(c2[2418],_mm_xor_si128(c2[7001],_mm_xor_si128(c2[1847],_mm_xor_si128(c2[8138],_mm_xor_si128(c2[7567],_mm_xor_si128(c2[4706],_mm_xor_si128(c2[4135],_mm_xor_si128(c2[2444],_mm_xor_si128(c2[7027],_mm_xor_si128(c2[1873],_mm_xor_si128(c2[5313],_mm_xor_si128(c2[4742],_mm_xor_si128(c2[3019],_mm_xor_si128(c2[2448],_mm_xor_si128(c2[2470],_mm_xor_si128(c2[1899],_mm_xor_si128(c2[2473],_mm_xor_si128(c2[1902],_mm_xor_si128(c2[5331],_mm_xor_si128(c2[4760],_mm_xor_si128(c2[2496],_mm_xor_si128(c2[1925],_mm_xor_si128(c2[5930],_mm_xor_si128(c2[5359],_mm_xor_si128(c2[5928],_mm_xor_si128(c2[5357],_mm_xor_si128(c2[2522],_mm_xor_si128(c2[7105],_mm_xor_si128(c2[1951],_mm_xor_si128(c2[3666],_mm_xor_si128(c2[3095],_mm_xor_si128(c2[8249],_mm_xor_si128(c2[7678],_mm_xor_si128(c2[2548],_mm_xor_si128(c2[7131],_mm_xor_si128(c2[1977],_mm_xor_si128(c2[4276],_mm_xor_si128(c2[3692],_mm_xor_si128(c2[6561],_mm_xor_si128(c2[5990],_mm_xor_si128(c2[2574],_mm_xor_si128(c2[7157],_mm_xor_si128(c2[2003],_mm_xor_si128(c2[2575],_mm_xor_si128(c2[2004],_mm_xor_si128(c2[4295],_mm_xor_si128(c2[3724],_mm_xor_si128(c2[2600],_mm_xor_si128(c2[7183],_mm_xor_si128(c2[2029],_mm_xor_si128(c2[316],_mm_xor_si128(c2[8896],_mm_xor_si128(c2[8329],_mm_xor_si128(c2[7758],_mm_xor_si128(c2[6611],_mm_xor_si128(c2[2626],_mm_xor_si128(c2[7209],_mm_xor_si128(c2[2055],_mm_xor_si128(c2[8927],_mm_xor_si128(c2[8356],_mm_xor_si128(c2[6635],_mm_xor_si128(c2[6064],_mm_xor_si128(c2[2652],_mm_xor_si128(c2[2081],_mm_xor_si128(c2[366],_mm_xor_si128(c2[8946],_mm_xor_si128(c2[5512],_mm_xor_si128(c2[4941],_mm_xor_si128(c2[2678],_mm_xor_si128(c2[7261],_mm_xor_si128(c2[2107],_mm_xor_si128(c2[6110],_mm_xor_si128(c2[5539],_mm_xor_si128(c2[5543],_mm_xor_si128(c2[4972],_mm_xor_si128(c2[2704],_mm_xor_si128(c2[7287],_mm_xor_si128(c2[2133],_mm_xor_si128(c2[1562],_mm_xor_si128(c2[991],_mm_xor_si128(c2[422],_mm_xor_si128(c2[9002],_mm_xor_si128(c2[417],_mm_xor_si128(c2[2730],_mm_xor_si128(c2[2159],_mm_xor_si128(c2[2161],_mm_xor_si128(c2[1590],_mm_xor_si128(c2[1025],_mm_xor_si128(c2[454],_mm_xor_si128(c2[2756],_mm_xor_si128(c2[7339],_mm_xor_si128(c2[2185],_mm_xor_si128(c2[4483],_mm_xor_si128(c2[3912],_mm_xor_si128(c2[7339],_mm_xor_si128(c2[6768],_mm_xor_si128(c2[2782],_mm_xor_si128(c2[7365],_mm_xor_si128(c2[2211],_mm_xor_si128(c2[7368],_mm_xor_si128(c2[6797],_mm_xor_si128(c2[8504],_mm_xor_si128(c2[7933],_mm_xor_si128(c2[2808],_mm_xor_si128(c2[7391],_mm_xor_si128(c2[2237],_mm_xor_si128(c2[3956],_mm_xor_si128(c2[3385],_mm_xor_si128(c2[3952],_mm_xor_si128(c2[3381],_mm_xor_si128(c2[2834],_mm_xor_si128(c2[7417],_mm_xor_si128(c2[2263],_mm_xor_si128(c2[2264],_mm_xor_si128(c2[1693],_mm_xor_si128(c2[1699],_mm_xor_si128(c2[1128],c2[5128])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[143]=simde_mm_xor_si128(c2[2288],simde_mm_xor_si128(c2[6871],simde_mm_xor_si128(c2[1717],simde_mm_xor_si128(c2[5721],simde_mm_xor_si128(c2[5150],simde_mm_xor_si128(c2[4584],simde_mm_xor_si128(c2[4013],simde_mm_xor_si128(c2[4576],simde_mm_xor_si128(c2[4005],simde_mm_xor_si128(c2[4587],simde_mm_xor_si128(c2[2314],simde_mm_xor_si128(c2[6897],simde_mm_xor_si128(c2[1743],simde_mm_xor_si128(c2[5176],simde_mm_xor_si128(c2[4605],simde_mm_xor_si128(c2[5746],simde_mm_xor_si128(c2[5175],simde_mm_xor_si128(c2[1182],simde_mm_xor_si128(c2[2340],simde_mm_xor_si128(c2[6923],simde_mm_xor_si128(c2[1769],simde_mm_xor_si128(c2[8632],simde_mm_xor_si128(c2[8061],simde_mm_xor_si128(c2[5201],simde_mm_xor_si128(c2[4630],simde_mm_xor_si128(c2[2366],simde_mm_xor_si128(c2[6949],simde_mm_xor_si128(c2[1795],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[8666],simde_mm_xor_si128(c2[2940],simde_mm_xor_si128(c2[2369],simde_mm_xor_si128(c2[2392],simde_mm_xor_si128(c2[1821],simde_mm_xor_si128(c2[4691],simde_mm_xor_si128(c2[4120],simde_mm_xor_si128(c2[2971],simde_mm_xor_si128(c2[2400],simde_mm_xor_si128(c2[2418],simde_mm_xor_si128(c2[7001],simde_mm_xor_si128(c2[1847],simde_mm_xor_si128(c2[8138],simde_mm_xor_si128(c2[7567],simde_mm_xor_si128(c2[4706],simde_mm_xor_si128(c2[4135],simde_mm_xor_si128(c2[2444],simde_mm_xor_si128(c2[7027],simde_mm_xor_si128(c2[1873],simde_mm_xor_si128(c2[5313],simde_mm_xor_si128(c2[4742],simde_mm_xor_si128(c2[3019],simde_mm_xor_si128(c2[2448],simde_mm_xor_si128(c2[2470],simde_mm_xor_si128(c2[1899],simde_mm_xor_si128(c2[2473],simde_mm_xor_si128(c2[1902],simde_mm_xor_si128(c2[5331],simde_mm_xor_si128(c2[4760],simde_mm_xor_si128(c2[2496],simde_mm_xor_si128(c2[1925],simde_mm_xor_si128(c2[5930],simde_mm_xor_si128(c2[5359],simde_mm_xor_si128(c2[5928],simde_mm_xor_si128(c2[5357],simde_mm_xor_si128(c2[2522],simde_mm_xor_si128(c2[7105],simde_mm_xor_si128(c2[1951],simde_mm_xor_si128(c2[3666],simde_mm_xor_si128(c2[3095],simde_mm_xor_si128(c2[8249],simde_mm_xor_si128(c2[7678],simde_mm_xor_si128(c2[2548],simde_mm_xor_si128(c2[7131],simde_mm_xor_si128(c2[1977],simde_mm_xor_si128(c2[4276],simde_mm_xor_si128(c2[3692],simde_mm_xor_si128(c2[6561],simde_mm_xor_si128(c2[5990],simde_mm_xor_si128(c2[2574],simde_mm_xor_si128(c2[7157],simde_mm_xor_si128(c2[2003],simde_mm_xor_si128(c2[2575],simde_mm_xor_si128(c2[2004],simde_mm_xor_si128(c2[4295],simde_mm_xor_si128(c2[3724],simde_mm_xor_si128(c2[2600],simde_mm_xor_si128(c2[7183],simde_mm_xor_si128(c2[2029],simde_mm_xor_si128(c2[316],simde_mm_xor_si128(c2[8896],simde_mm_xor_si128(c2[8329],simde_mm_xor_si128(c2[7758],simde_mm_xor_si128(c2[6611],simde_mm_xor_si128(c2[2626],simde_mm_xor_si128(c2[7209],simde_mm_xor_si128(c2[2055],simde_mm_xor_si128(c2[8927],simde_mm_xor_si128(c2[8356],simde_mm_xor_si128(c2[6635],simde_mm_xor_si128(c2[6064],simde_mm_xor_si128(c2[2652],simde_mm_xor_si128(c2[2081],simde_mm_xor_si128(c2[366],simde_mm_xor_si128(c2[8946],simde_mm_xor_si128(c2[5512],simde_mm_xor_si128(c2[4941],simde_mm_xor_si128(c2[2678],simde_mm_xor_si128(c2[7261],simde_mm_xor_si128(c2[2107],simde_mm_xor_si128(c2[6110],simde_mm_xor_si128(c2[5539],simde_mm_xor_si128(c2[5543],simde_mm_xor_si128(c2[4972],simde_mm_xor_si128(c2[2704],simde_mm_xor_si128(c2[7287],simde_mm_xor_si128(c2[2133],simde_mm_xor_si128(c2[1562],simde_mm_xor_si128(c2[991],simde_mm_xor_si128(c2[422],simde_mm_xor_si128(c2[9002],simde_mm_xor_si128(c2[417],simde_mm_xor_si128(c2[2730],simde_mm_xor_si128(c2[2159],simde_mm_xor_si128(c2[2161],simde_mm_xor_si128(c2[1590],simde_mm_xor_si128(c2[1025],simde_mm_xor_si128(c2[454],simde_mm_xor_si128(c2[2756],simde_mm_xor_si128(c2[7339],simde_mm_xor_si128(c2[2185],simde_mm_xor_si128(c2[4483],simde_mm_xor_si128(c2[3912],simde_mm_xor_si128(c2[7339],simde_mm_xor_si128(c2[6768],simde_mm_xor_si128(c2[2782],simde_mm_xor_si128(c2[7365],simde_mm_xor_si128(c2[2211],simde_mm_xor_si128(c2[7368],simde_mm_xor_si128(c2[6797],simde_mm_xor_si128(c2[8504],simde_mm_xor_si128(c2[7933],simde_mm_xor_si128(c2[2808],simde_mm_xor_si128(c2[7391],simde_mm_xor_si128(c2[2237],simde_mm_xor_si128(c2[3956],simde_mm_xor_si128(c2[3385],simde_mm_xor_si128(c2[3952],simde_mm_xor_si128(c2[3381],simde_mm_xor_si128(c2[2834],simde_mm_xor_si128(c2[7417],simde_mm_xor_si128(c2[2263],simde_mm_xor_si128(c2[2264],simde_mm_xor_si128(c2[1693],simde_mm_xor_si128(c2[1699],simde_mm_xor_si128(c2[1128],c2[5128])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 12
-     d2[156]=_mm_xor_si128(c2[3432],_mm_xor_si128(c2[2315],_mm_xor_si128(c2[6564],_mm_xor_si128(c2[5443],_mm_xor_si128(c2[4920],c2[9060])))));
+     d2[156]=simde_mm_xor_si128(c2[3432],simde_mm_xor_si128(c2[2315],simde_mm_xor_si128(c2[6564],simde_mm_xor_si128(c2[5443],simde_mm_xor_si128(c2[4920],c2[9060])))));
 
 //row: 13
-     d2[169]=_mm_xor_si128(c2[8010],_mm_xor_si128(c2[2869],_mm_xor_si128(c2[6302],_mm_xor_si128(c2[5152],_mm_xor_si128(c2[5157],_mm_xor_si128(c2[2291],_mm_xor_si128(c2[8036],_mm_xor_si128(c2[2895],_mm_xor_si128(c2[5757],_mm_xor_si128(c2[6327],_mm_xor_si128(c2[8062],_mm_xor_si128(c2[2921],_mm_xor_si128(c2[62],_mm_xor_si128(c2[5782],_mm_xor_si128(c2[8088],_mm_xor_si128(c2[2947],_mm_xor_si128(c2[654],_mm_xor_si128(c2[3521],_mm_xor_si128(c2[1803],_mm_xor_si128(c2[2973],_mm_xor_si128(c2[5259],_mm_xor_si128(c2[3539],_mm_xor_si128(c2[8140],_mm_xor_si128(c2[2999],_mm_xor_si128(c2[8719],_mm_xor_si128(c2[5287],_mm_xor_si128(c2[8166],_mm_xor_si128(c2[3025],_mm_xor_si128(c2[5881],_mm_xor_si128(c2[3600],_mm_xor_si128(c2[3051],_mm_xor_si128(c2[3054],_mm_xor_si128(c2[5912],_mm_xor_si128(c2[754],_mm_xor_si128(c2[3077],_mm_xor_si128(c2[6511],_mm_xor_si128(c2[6509],_mm_xor_si128(c2[8244],_mm_xor_si128(c2[3103],_mm_xor_si128(c2[4247],_mm_xor_si128(c2[8817],_mm_xor_si128(c2[8270],_mm_xor_si128(c2[3129],_mm_xor_si128(c2[4844],_mm_xor_si128(c2[7129],_mm_xor_si128(c2[8296],_mm_xor_si128(c2[3155],_mm_xor_si128(c2[3156],_mm_xor_si128(c2[4863],_mm_xor_si128(c2[8322],_mm_xor_si128(c2[3181],_mm_xor_si128(c2[884],_mm_xor_si128(c2[8897],_mm_xor_si128(c2[8348],_mm_xor_si128(c2[3207],_mm_xor_si128(c2[344],_mm_xor_si128(c2[7203],_mm_xor_si128(c2[3233],_mm_xor_si128(c2[947],_mm_xor_si128(c2[6093],_mm_xor_si128(c2[8400],_mm_xor_si128(c2[3259],_mm_xor_si128(c2[6691],_mm_xor_si128(c2[6111],_mm_xor_si128(c2[8426],_mm_xor_si128(c2[3285],_mm_xor_si128(c2[2143],_mm_xor_si128(c2[990],_mm_xor_si128(c2[3311],_mm_xor_si128(c2[2742],_mm_xor_si128(c2[1593],_mm_xor_si128(c2[8478],_mm_xor_si128(c2[3337],_mm_xor_si128(c2[5051],_mm_xor_si128(c2[7907],_mm_xor_si128(c2[8504],_mm_xor_si128(c2[3363],_mm_xor_si128(c2[7936],_mm_xor_si128(c2[9085],_mm_xor_si128(c2[8530],_mm_xor_si128(c2[3389],_mm_xor_si128(c2[4524],_mm_xor_si128(c2[4533],_mm_xor_si128(c2[521],_mm_xor_si128(c2[8556],_mm_xor_si128(c2[3415],_mm_xor_si128(c2[2845],c2[2267])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[169]=simde_mm_xor_si128(c2[8010],simde_mm_xor_si128(c2[2869],simde_mm_xor_si128(c2[6302],simde_mm_xor_si128(c2[5152],simde_mm_xor_si128(c2[5157],simde_mm_xor_si128(c2[2291],simde_mm_xor_si128(c2[8036],simde_mm_xor_si128(c2[2895],simde_mm_xor_si128(c2[5757],simde_mm_xor_si128(c2[6327],simde_mm_xor_si128(c2[8062],simde_mm_xor_si128(c2[2921],simde_mm_xor_si128(c2[62],simde_mm_xor_si128(c2[5782],simde_mm_xor_si128(c2[8088],simde_mm_xor_si128(c2[2947],simde_mm_xor_si128(c2[654],simde_mm_xor_si128(c2[3521],simde_mm_xor_si128(c2[1803],simde_mm_xor_si128(c2[2973],simde_mm_xor_si128(c2[5259],simde_mm_xor_si128(c2[3539],simde_mm_xor_si128(c2[8140],simde_mm_xor_si128(c2[2999],simde_mm_xor_si128(c2[8719],simde_mm_xor_si128(c2[5287],simde_mm_xor_si128(c2[8166],simde_mm_xor_si128(c2[3025],simde_mm_xor_si128(c2[5881],simde_mm_xor_si128(c2[3600],simde_mm_xor_si128(c2[3051],simde_mm_xor_si128(c2[3054],simde_mm_xor_si128(c2[5912],simde_mm_xor_si128(c2[754],simde_mm_xor_si128(c2[3077],simde_mm_xor_si128(c2[6511],simde_mm_xor_si128(c2[6509],simde_mm_xor_si128(c2[8244],simde_mm_xor_si128(c2[3103],simde_mm_xor_si128(c2[4247],simde_mm_xor_si128(c2[8817],simde_mm_xor_si128(c2[8270],simde_mm_xor_si128(c2[3129],simde_mm_xor_si128(c2[4844],simde_mm_xor_si128(c2[7129],simde_mm_xor_si128(c2[8296],simde_mm_xor_si128(c2[3155],simde_mm_xor_si128(c2[3156],simde_mm_xor_si128(c2[4863],simde_mm_xor_si128(c2[8322],simde_mm_xor_si128(c2[3181],simde_mm_xor_si128(c2[884],simde_mm_xor_si128(c2[8897],simde_mm_xor_si128(c2[8348],simde_mm_xor_si128(c2[3207],simde_mm_xor_si128(c2[344],simde_mm_xor_si128(c2[7203],simde_mm_xor_si128(c2[3233],simde_mm_xor_si128(c2[947],simde_mm_xor_si128(c2[6093],simde_mm_xor_si128(c2[8400],simde_mm_xor_si128(c2[3259],simde_mm_xor_si128(c2[6691],simde_mm_xor_si128(c2[6111],simde_mm_xor_si128(c2[8426],simde_mm_xor_si128(c2[3285],simde_mm_xor_si128(c2[2143],simde_mm_xor_si128(c2[990],simde_mm_xor_si128(c2[3311],simde_mm_xor_si128(c2[2742],simde_mm_xor_si128(c2[1593],simde_mm_xor_si128(c2[8478],simde_mm_xor_si128(c2[3337],simde_mm_xor_si128(c2[5051],simde_mm_xor_si128(c2[7907],simde_mm_xor_si128(c2[8504],simde_mm_xor_si128(c2[3363],simde_mm_xor_si128(c2[7936],simde_mm_xor_si128(c2[9085],simde_mm_xor_si128(c2[8530],simde_mm_xor_si128(c2[3389],simde_mm_xor_si128(c2[4524],simde_mm_xor_si128(c2[4533],simde_mm_xor_si128(c2[521],simde_mm_xor_si128(c2[8556],simde_mm_xor_si128(c2[3415],simde_mm_xor_si128(c2[2845],c2[2267])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 14
-     d2[182]=_mm_xor_si128(c2[572],_mm_xor_si128(c2[6044],_mm_xor_si128(c2[3829],_mm_xor_si128(c2[1568],_mm_xor_si128(c2[1014],c2[1690])))));
+     d2[182]=simde_mm_xor_si128(c2[572],simde_mm_xor_si128(c2[6044],simde_mm_xor_si128(c2[3829],simde_mm_xor_si128(c2[1568],simde_mm_xor_si128(c2[1014],c2[1690])))));
 
 //row: 15
-     d2[195]=_mm_xor_si128(c2[2869],_mm_xor_si128(c2[6302],_mm_xor_si128(c2[5152],_mm_xor_si128(c2[1147],_mm_xor_si128(c2[5157],_mm_xor_si128(c2[7446],_mm_xor_si128(c2[2895],_mm_xor_si128(c2[5757],_mm_xor_si128(c2[2317],_mm_xor_si128(c2[6327],_mm_xor_si128(c2[3458],_mm_xor_si128(c2[2921],_mm_xor_si128(c2[62],_mm_xor_si128(c2[5782],_mm_xor_si128(c2[2947],_mm_xor_si128(c2[654],_mm_xor_si128(c2[8662],_mm_xor_si128(c2[3521],_mm_xor_si128(c2[2973],_mm_xor_si128(c2[5259],_mm_xor_si128(c2[8693],_mm_xor_si128(c2[3539],_mm_xor_si128(c2[2999],_mm_xor_si128(c2[8719],_mm_xor_si128(c2[5287],_mm_xor_si128(c2[3025],_mm_xor_si128(c2[5881],_mm_xor_si128(c2[8741],_mm_xor_si128(c2[3600],_mm_xor_si128(c2[3051],_mm_xor_si128(c2[3054],_mm_xor_si128(c2[1902],_mm_xor_si128(c2[5912],_mm_xor_si128(c2[3077],_mm_xor_si128(c2[6511],_mm_xor_si128(c2[2499],_mm_xor_si128(c2[6509],_mm_xor_si128(c2[3103],_mm_xor_si128(c2[4247],_mm_xor_si128(c2[8817],_mm_xor_si128(c2[3129],_mm_xor_si128(c2[4844],_mm_xor_si128(c2[3132],_mm_xor_si128(c2[7129],_mm_xor_si128(c2[837],_mm_xor_si128(c2[3155],_mm_xor_si128(c2[3156],_mm_xor_si128(c2[866],_mm_xor_si128(c2[4863],_mm_xor_si128(c2[3181],_mm_xor_si128(c2[884],_mm_xor_si128(c2[4900],_mm_xor_si128(c2[8897],_mm_xor_si128(c2[3207],_mm_xor_si128(c2[344],_mm_xor_si128(c2[3206],_mm_xor_si128(c2[7203],_mm_xor_si128(c2[3781],_mm_xor_si128(c2[3233],_mm_xor_si128(c2[947],_mm_xor_si128(c2[2083],_mm_xor_si128(c2[6093],_mm_xor_si128(c2[3259],_mm_xor_si128(c2[6691],_mm_xor_si128(c2[6111],_mm_xor_si128(c2[3285],_mm_xor_si128(c2[2143],_mm_xor_si128(c2[6144],_mm_xor_si128(c2[990],_mm_xor_si128(c2[3311],_mm_xor_si128(c2[2742],_mm_xor_si128(c2[6734],_mm_xor_si128(c2[1593],_mm_xor_si128(c2[3337],_mm_xor_si128(c2[5051],_mm_xor_si128(c2[3910],_mm_xor_si128(c2[7907],_mm_xor_si128(c2[3331],_mm_xor_si128(c2[3363],_mm_xor_si128(c2[7936],_mm_xor_si128(c2[9085],_mm_xor_si128(c2[3389],_mm_xor_si128(c2[4524],_mm_xor_si128(c2[523],_mm_xor_si128(c2[4533],_mm_xor_si128(c2[3415],_mm_xor_si128(c2[2845],_mm_xor_si128(c2[7421],c2[2267]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[195]=simde_mm_xor_si128(c2[2869],simde_mm_xor_si128(c2[6302],simde_mm_xor_si128(c2[5152],simde_mm_xor_si128(c2[1147],simde_mm_xor_si128(c2[5157],simde_mm_xor_si128(c2[7446],simde_mm_xor_si128(c2[2895],simde_mm_xor_si128(c2[5757],simde_mm_xor_si128(c2[2317],simde_mm_xor_si128(c2[6327],simde_mm_xor_si128(c2[3458],simde_mm_xor_si128(c2[2921],simde_mm_xor_si128(c2[62],simde_mm_xor_si128(c2[5782],simde_mm_xor_si128(c2[2947],simde_mm_xor_si128(c2[654],simde_mm_xor_si128(c2[8662],simde_mm_xor_si128(c2[3521],simde_mm_xor_si128(c2[2973],simde_mm_xor_si128(c2[5259],simde_mm_xor_si128(c2[8693],simde_mm_xor_si128(c2[3539],simde_mm_xor_si128(c2[2999],simde_mm_xor_si128(c2[8719],simde_mm_xor_si128(c2[5287],simde_mm_xor_si128(c2[3025],simde_mm_xor_si128(c2[5881],simde_mm_xor_si128(c2[8741],simde_mm_xor_si128(c2[3600],simde_mm_xor_si128(c2[3051],simde_mm_xor_si128(c2[3054],simde_mm_xor_si128(c2[1902],simde_mm_xor_si128(c2[5912],simde_mm_xor_si128(c2[3077],simde_mm_xor_si128(c2[6511],simde_mm_xor_si128(c2[2499],simde_mm_xor_si128(c2[6509],simde_mm_xor_si128(c2[3103],simde_mm_xor_si128(c2[4247],simde_mm_xor_si128(c2[8817],simde_mm_xor_si128(c2[3129],simde_mm_xor_si128(c2[4844],simde_mm_xor_si128(c2[3132],simde_mm_xor_si128(c2[7129],simde_mm_xor_si128(c2[837],simde_mm_xor_si128(c2[3155],simde_mm_xor_si128(c2[3156],simde_mm_xor_si128(c2[866],simde_mm_xor_si128(c2[4863],simde_mm_xor_si128(c2[3181],simde_mm_xor_si128(c2[884],simde_mm_xor_si128(c2[4900],simde_mm_xor_si128(c2[8897],simde_mm_xor_si128(c2[3207],simde_mm_xor_si128(c2[344],simde_mm_xor_si128(c2[3206],simde_mm_xor_si128(c2[7203],simde_mm_xor_si128(c2[3781],simde_mm_xor_si128(c2[3233],simde_mm_xor_si128(c2[947],simde_mm_xor_si128(c2[2083],simde_mm_xor_si128(c2[6093],simde_mm_xor_si128(c2[3259],simde_mm_xor_si128(c2[6691],simde_mm_xor_si128(c2[6111],simde_mm_xor_si128(c2[3285],simde_mm_xor_si128(c2[2143],simde_mm_xor_si128(c2[6144],simde_mm_xor_si128(c2[990],simde_mm_xor_si128(c2[3311],simde_mm_xor_si128(c2[2742],simde_mm_xor_si128(c2[6734],simde_mm_xor_si128(c2[1593],simde_mm_xor_si128(c2[3337],simde_mm_xor_si128(c2[5051],simde_mm_xor_si128(c2[3910],simde_mm_xor_si128(c2[7907],simde_mm_xor_si128(c2[3331],simde_mm_xor_si128(c2[3363],simde_mm_xor_si128(c2[7936],simde_mm_xor_si128(c2[9085],simde_mm_xor_si128(c2[3389],simde_mm_xor_si128(c2[4524],simde_mm_xor_si128(c2[523],simde_mm_xor_si128(c2[4533],simde_mm_xor_si128(c2[3415],simde_mm_xor_si128(c2[2845],simde_mm_xor_si128(c2[7421],c2[2267]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 16
-     d2[208]=_mm_xor_si128(c2[7440],_mm_xor_si128(c2[1722],_mm_xor_si128(c2[572],_mm_xor_si128(c2[577],_mm_xor_si128(c2[7466],_mm_xor_si128(c2[1177],_mm_xor_si128(c2[1747],_mm_xor_si128(c2[4607],_mm_xor_si128(c2[7492],_mm_xor_si128(c2[4633],_mm_xor_si128(c2[1202],_mm_xor_si128(c2[7518],_mm_xor_si128(c2[5238],_mm_xor_si128(c2[8092],_mm_xor_si128(c2[3522],_mm_xor_si128(c2[7544],_mm_xor_si128(c2[679],_mm_xor_si128(c2[8123],_mm_xor_si128(c2[7570],_mm_xor_si128(c2[4139],_mm_xor_si128(c2[707],_mm_xor_si128(c2[7596],_mm_xor_si128(c2[1301],_mm_xor_si128(c2[8171],_mm_xor_si128(c2[7622],_mm_xor_si128(c2[7625],_mm_xor_si128(c2[1332],_mm_xor_si128(c2[7648],_mm_xor_si128(c2[1931],_mm_xor_si128(c2[1929],_mm_xor_si128(c2[7674],_mm_xor_si128(c2[8818],_mm_xor_si128(c2[4250],_mm_xor_si128(c2[7700],_mm_xor_si128(c2[264],_mm_xor_si128(c2[2549],_mm_xor_si128(c2[7726],_mm_xor_si128(c2[7727],_mm_xor_si128(c2[296],_mm_xor_si128(c2[296],_mm_xor_si128(c2[7752],_mm_xor_si128(c2[5468],_mm_xor_si128(c2[4317],_mm_xor_si128(c2[7778],_mm_xor_si128(c2[4915],_mm_xor_si128(c2[2636],_mm_xor_si128(c2[7804],_mm_xor_si128(c2[5518],_mm_xor_si128(c2[1513],_mm_xor_si128(c2[7830],_mm_xor_si128(c2[2111],_mm_xor_si128(c2[1544],_mm_xor_si128(c2[7856],_mm_xor_si128(c2[6714],_mm_xor_si128(c2[5574],_mm_xor_si128(c2[7882],_mm_xor_si128(c2[7313],_mm_xor_si128(c2[6164],_mm_xor_si128(c2[7908],_mm_xor_si128(c2[471],_mm_xor_si128(c2[3340],_mm_xor_si128(c2[7934],_mm_xor_si128(c2[3356],_mm_xor_si128(c2[4505],_mm_xor_si128(c2[7960],_mm_xor_si128(c2[9108],_mm_xor_si128(c2[9104],_mm_xor_si128(c2[6247],_mm_xor_si128(c2[7986],_mm_xor_si128(c2[7416],c2[6838]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[208]=simde_mm_xor_si128(c2[7440],simde_mm_xor_si128(c2[1722],simde_mm_xor_si128(c2[572],simde_mm_xor_si128(c2[577],simde_mm_xor_si128(c2[7466],simde_mm_xor_si128(c2[1177],simde_mm_xor_si128(c2[1747],simde_mm_xor_si128(c2[4607],simde_mm_xor_si128(c2[7492],simde_mm_xor_si128(c2[4633],simde_mm_xor_si128(c2[1202],simde_mm_xor_si128(c2[7518],simde_mm_xor_si128(c2[5238],simde_mm_xor_si128(c2[8092],simde_mm_xor_si128(c2[3522],simde_mm_xor_si128(c2[7544],simde_mm_xor_si128(c2[679],simde_mm_xor_si128(c2[8123],simde_mm_xor_si128(c2[7570],simde_mm_xor_si128(c2[4139],simde_mm_xor_si128(c2[707],simde_mm_xor_si128(c2[7596],simde_mm_xor_si128(c2[1301],simde_mm_xor_si128(c2[8171],simde_mm_xor_si128(c2[7622],simde_mm_xor_si128(c2[7625],simde_mm_xor_si128(c2[1332],simde_mm_xor_si128(c2[7648],simde_mm_xor_si128(c2[1931],simde_mm_xor_si128(c2[1929],simde_mm_xor_si128(c2[7674],simde_mm_xor_si128(c2[8818],simde_mm_xor_si128(c2[4250],simde_mm_xor_si128(c2[7700],simde_mm_xor_si128(c2[264],simde_mm_xor_si128(c2[2549],simde_mm_xor_si128(c2[7726],simde_mm_xor_si128(c2[7727],simde_mm_xor_si128(c2[296],simde_mm_xor_si128(c2[296],simde_mm_xor_si128(c2[7752],simde_mm_xor_si128(c2[5468],simde_mm_xor_si128(c2[4317],simde_mm_xor_si128(c2[7778],simde_mm_xor_si128(c2[4915],simde_mm_xor_si128(c2[2636],simde_mm_xor_si128(c2[7804],simde_mm_xor_si128(c2[5518],simde_mm_xor_si128(c2[1513],simde_mm_xor_si128(c2[7830],simde_mm_xor_si128(c2[2111],simde_mm_xor_si128(c2[1544],simde_mm_xor_si128(c2[7856],simde_mm_xor_si128(c2[6714],simde_mm_xor_si128(c2[5574],simde_mm_xor_si128(c2[7882],simde_mm_xor_si128(c2[7313],simde_mm_xor_si128(c2[6164],simde_mm_xor_si128(c2[7908],simde_mm_xor_si128(c2[471],simde_mm_xor_si128(c2[3340],simde_mm_xor_si128(c2[7934],simde_mm_xor_si128(c2[3356],simde_mm_xor_si128(c2[4505],simde_mm_xor_si128(c2[7960],simde_mm_xor_si128(c2[9108],simde_mm_xor_si128(c2[9104],simde_mm_xor_si128(c2[6247],simde_mm_xor_si128(c2[7986],simde_mm_xor_si128(c2[7416],c2[6838]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 17
-     d2[221]=_mm_xor_si128(c2[6297],_mm_xor_si128(c2[4951],_mm_xor_si128(c2[8425],_mm_xor_si128(c2[2158],c2[6847]))));
+     d2[221]=simde_mm_xor_si128(c2[6297],simde_mm_xor_si128(c2[4951],simde_mm_xor_si128(c2[8425],simde_mm_xor_si128(c2[2158],c2[6847]))));
 
 //row: 18
-     d2[234]=_mm_xor_si128(c2[598],_mm_xor_si128(c2[5462],_mm_xor_si128(c2[4352],_mm_xor_si128(c2[2760],c2[2791]))));
+     d2[234]=simde_mm_xor_si128(c2[598],simde_mm_xor_si128(c2[5462],simde_mm_xor_si128(c2[4352],simde_mm_xor_si128(c2[2760],c2[2791]))));
 
 //row: 19
-     d2[247]=_mm_xor_si128(c2[6864],_mm_xor_si128(c2[3458],_mm_xor_si128(c2[3624],_mm_xor_si128(c2[4795],c2[8851]))));
+     d2[247]=simde_mm_xor_si128(c2[6864],simde_mm_xor_si128(c2[3458],simde_mm_xor_si128(c2[3624],simde_mm_xor_si128(c2[4795],c2[8851]))));
 
 //row: 20
-     d2[260]=_mm_xor_si128(c2[2868],_mm_xor_si128(c2[6301],_mm_xor_si128(c2[5151],_mm_xor_si128(c2[5156],_mm_xor_si128(c2[3432],_mm_xor_si128(c2[2894],_mm_xor_si128(c2[5756],_mm_xor_si128(c2[6326],_mm_xor_si128(c2[2920],_mm_xor_si128(c2[61],_mm_xor_si128(c2[5781],_mm_xor_si128(c2[2946],_mm_xor_si128(c2[653],_mm_xor_si128(c2[3520],_mm_xor_si128(c2[6942],_mm_xor_si128(c2[2972],_mm_xor_si128(c2[5258],_mm_xor_si128(c2[3538],_mm_xor_si128(c2[2998],_mm_xor_si128(c2[8718],_mm_xor_si128(c2[5286],_mm_xor_si128(c2[3024],_mm_xor_si128(c2[5880],_mm_xor_si128(c2[3599],_mm_xor_si128(c2[3050],_mm_xor_si128(c2[3053],_mm_xor_si128(c2[5911],_mm_xor_si128(c2[3076],_mm_xor_si128(c2[6510],_mm_xor_si128(c2[6508],_mm_xor_si128(c2[3102],_mm_xor_si128(c2[4246],_mm_xor_si128(c2[8816],_mm_xor_si128(c2[8814],_mm_xor_si128(c2[3128],_mm_xor_si128(c2[4843],_mm_xor_si128(c2[7128],_mm_xor_si128(c2[3154],_mm_xor_si128(c2[3155],_mm_xor_si128(c2[4862],_mm_xor_si128(c2[3146],_mm_xor_si128(c2[3180],_mm_xor_si128(c2[896],_mm_xor_si128(c2[8896],_mm_xor_si128(c2[3206],_mm_xor_si128(c2[343],_mm_xor_si128(c2[7202],_mm_xor_si128(c2[3232],_mm_xor_si128(c2[946],_mm_xor_si128(c2[6092],_mm_xor_si128(c2[3258],_mm_xor_si128(c2[6690],_mm_xor_si128(c2[6110],_mm_xor_si128(c2[3284],_mm_xor_si128(c2[2142],_mm_xor_si128(c2[989],_mm_xor_si128(c2[3310],_mm_xor_si128(c2[2741],_mm_xor_si128(c2[1592],_mm_xor_si128(c2[3336],_mm_xor_si128(c2[5050],_mm_xor_si128(c2[7906],_mm_xor_si128(c2[3362],_mm_xor_si128(c2[7935],_mm_xor_si128(c2[9084],_mm_xor_si128(c2[3388],_mm_xor_si128(c2[4536],_mm_xor_si128(c2[4532],_mm_xor_si128(c2[3414],_mm_xor_si128(c2[2844],c2[2266]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[260]=simde_mm_xor_si128(c2[2868],simde_mm_xor_si128(c2[6301],simde_mm_xor_si128(c2[5151],simde_mm_xor_si128(c2[5156],simde_mm_xor_si128(c2[3432],simde_mm_xor_si128(c2[2894],simde_mm_xor_si128(c2[5756],simde_mm_xor_si128(c2[6326],simde_mm_xor_si128(c2[2920],simde_mm_xor_si128(c2[61],simde_mm_xor_si128(c2[5781],simde_mm_xor_si128(c2[2946],simde_mm_xor_si128(c2[653],simde_mm_xor_si128(c2[3520],simde_mm_xor_si128(c2[6942],simde_mm_xor_si128(c2[2972],simde_mm_xor_si128(c2[5258],simde_mm_xor_si128(c2[3538],simde_mm_xor_si128(c2[2998],simde_mm_xor_si128(c2[8718],simde_mm_xor_si128(c2[5286],simde_mm_xor_si128(c2[3024],simde_mm_xor_si128(c2[5880],simde_mm_xor_si128(c2[3599],simde_mm_xor_si128(c2[3050],simde_mm_xor_si128(c2[3053],simde_mm_xor_si128(c2[5911],simde_mm_xor_si128(c2[3076],simde_mm_xor_si128(c2[6510],simde_mm_xor_si128(c2[6508],simde_mm_xor_si128(c2[3102],simde_mm_xor_si128(c2[4246],simde_mm_xor_si128(c2[8816],simde_mm_xor_si128(c2[8814],simde_mm_xor_si128(c2[3128],simde_mm_xor_si128(c2[4843],simde_mm_xor_si128(c2[7128],simde_mm_xor_si128(c2[3154],simde_mm_xor_si128(c2[3155],simde_mm_xor_si128(c2[4862],simde_mm_xor_si128(c2[3146],simde_mm_xor_si128(c2[3180],simde_mm_xor_si128(c2[896],simde_mm_xor_si128(c2[8896],simde_mm_xor_si128(c2[3206],simde_mm_xor_si128(c2[343],simde_mm_xor_si128(c2[7202],simde_mm_xor_si128(c2[3232],simde_mm_xor_si128(c2[946],simde_mm_xor_si128(c2[6092],simde_mm_xor_si128(c2[3258],simde_mm_xor_si128(c2[6690],simde_mm_xor_si128(c2[6110],simde_mm_xor_si128(c2[3284],simde_mm_xor_si128(c2[2142],simde_mm_xor_si128(c2[989],simde_mm_xor_si128(c2[3310],simde_mm_xor_si128(c2[2741],simde_mm_xor_si128(c2[1592],simde_mm_xor_si128(c2[3336],simde_mm_xor_si128(c2[5050],simde_mm_xor_si128(c2[7906],simde_mm_xor_si128(c2[3362],simde_mm_xor_si128(c2[7935],simde_mm_xor_si128(c2[9084],simde_mm_xor_si128(c2[3388],simde_mm_xor_si128(c2[4536],simde_mm_xor_si128(c2[4532],simde_mm_xor_si128(c2[3414],simde_mm_xor_si128(c2[2844],c2[2266]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 21
-     d2[273]=_mm_xor_si128(c2[3458],_mm_xor_si128(c2[3567],_mm_xor_si128(c2[422],_mm_xor_si128(c2[6242],c2[4562]))));
+     d2[273]=simde_mm_xor_si128(c2[3458],simde_mm_xor_si128(c2[3567],simde_mm_xor_si128(c2[422],simde_mm_xor_si128(c2[6242],c2[4562]))));
 
 //row: 22
-     d2[286]=_mm_xor_si128(c2[6866],_mm_xor_si128(c2[6035],_mm_xor_si128(c2[1490],c2[2166])));
+     d2[286]=simde_mm_xor_si128(c2[6866],simde_mm_xor_si128(c2[6035],simde_mm_xor_si128(c2[1490],c2[2166])));
 
 //row: 23
-     d2[299]=_mm_xor_si128(c2[7464],_mm_xor_si128(c2[1197],_mm_xor_si128(c2[2556],c2[2762])));
+     d2[299]=simde_mm_xor_si128(c2[7464],simde_mm_xor_si128(c2[1197],simde_mm_xor_si128(c2[2556],c2[2762])));
 
 //row: 24
-     d2[312]=_mm_xor_si128(c2[7442],_mm_xor_si128(c2[1724],_mm_xor_si128(c2[574],_mm_xor_si128(c2[579],_mm_xor_si128(c2[5148],_mm_xor_si128(c2[7468],_mm_xor_si128(c2[1179],_mm_xor_si128(c2[1749],_mm_xor_si128(c2[7494],_mm_xor_si128(c2[4635],_mm_xor_si128(c2[1204],_mm_xor_si128(c2[7520],_mm_xor_si128(c2[5227],_mm_xor_si128(c2[8094],_mm_xor_si128(c2[7521],_mm_xor_si128(c2[7546],_mm_xor_si128(c2[681],_mm_xor_si128(c2[8112],_mm_xor_si128(c2[8695],_mm_xor_si128(c2[7572],_mm_xor_si128(c2[4141],_mm_xor_si128(c2[709],_mm_xor_si128(c2[7598],_mm_xor_si128(c2[1303],_mm_xor_si128(c2[8173],_mm_xor_si128(c2[7624],_mm_xor_si128(c2[7627],_mm_xor_si128(c2[1334],_mm_xor_si128(c2[7650],_mm_xor_si128(c2[1933],_mm_xor_si128(c2[1931],_mm_xor_si128(c2[7676],_mm_xor_si128(c2[8820],_mm_xor_si128(c2[4239],_mm_xor_si128(c2[7702],_mm_xor_si128(c2[266],_mm_xor_si128(c2[2551],_mm_xor_si128(c2[7728],_mm_xor_si128(c2[7729],_mm_xor_si128(c2[298],_mm_xor_si128(c2[7151],_mm_xor_si128(c2[7754],_mm_xor_si128(c2[5470],_mm_xor_si128(c2[4319],_mm_xor_si128(c2[7780],_mm_xor_si128(c2[4917],_mm_xor_si128(c2[2638],_mm_xor_si128(c2[7806],_mm_xor_si128(c2[5520],_mm_xor_si128(c2[1515],_mm_xor_si128(c2[7832],_mm_xor_si128(c2[2113],_mm_xor_si128(c2[1546],_mm_xor_si128(c2[7858],_mm_xor_si128(c2[6716],_mm_xor_si128(c2[5576],_mm_xor_si128(c2[7884],_mm_xor_si128(c2[7315],_mm_xor_si128(c2[6166],_mm_xor_si128(c2[7910],_mm_xor_si128(c2[473],_mm_xor_si128(c2[3329],_mm_xor_si128(c2[7936],_mm_xor_si128(c2[3358],_mm_xor_si128(c2[4507],_mm_xor_si128(c2[7962],_mm_xor_si128(c2[9110],_mm_xor_si128(c2[9106],_mm_xor_si128(c2[7988],_mm_xor_si128(c2[7418],c2[6840]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[312]=simde_mm_xor_si128(c2[7442],simde_mm_xor_si128(c2[1724],simde_mm_xor_si128(c2[574],simde_mm_xor_si128(c2[579],simde_mm_xor_si128(c2[5148],simde_mm_xor_si128(c2[7468],simde_mm_xor_si128(c2[1179],simde_mm_xor_si128(c2[1749],simde_mm_xor_si128(c2[7494],simde_mm_xor_si128(c2[4635],simde_mm_xor_si128(c2[1204],simde_mm_xor_si128(c2[7520],simde_mm_xor_si128(c2[5227],simde_mm_xor_si128(c2[8094],simde_mm_xor_si128(c2[7521],simde_mm_xor_si128(c2[7546],simde_mm_xor_si128(c2[681],simde_mm_xor_si128(c2[8112],simde_mm_xor_si128(c2[8695],simde_mm_xor_si128(c2[7572],simde_mm_xor_si128(c2[4141],simde_mm_xor_si128(c2[709],simde_mm_xor_si128(c2[7598],simde_mm_xor_si128(c2[1303],simde_mm_xor_si128(c2[8173],simde_mm_xor_si128(c2[7624],simde_mm_xor_si128(c2[7627],simde_mm_xor_si128(c2[1334],simde_mm_xor_si128(c2[7650],simde_mm_xor_si128(c2[1933],simde_mm_xor_si128(c2[1931],simde_mm_xor_si128(c2[7676],simde_mm_xor_si128(c2[8820],simde_mm_xor_si128(c2[4239],simde_mm_xor_si128(c2[7702],simde_mm_xor_si128(c2[266],simde_mm_xor_si128(c2[2551],simde_mm_xor_si128(c2[7728],simde_mm_xor_si128(c2[7729],simde_mm_xor_si128(c2[298],simde_mm_xor_si128(c2[7151],simde_mm_xor_si128(c2[7754],simde_mm_xor_si128(c2[5470],simde_mm_xor_si128(c2[4319],simde_mm_xor_si128(c2[7780],simde_mm_xor_si128(c2[4917],simde_mm_xor_si128(c2[2638],simde_mm_xor_si128(c2[7806],simde_mm_xor_si128(c2[5520],simde_mm_xor_si128(c2[1515],simde_mm_xor_si128(c2[7832],simde_mm_xor_si128(c2[2113],simde_mm_xor_si128(c2[1546],simde_mm_xor_si128(c2[7858],simde_mm_xor_si128(c2[6716],simde_mm_xor_si128(c2[5576],simde_mm_xor_si128(c2[7884],simde_mm_xor_si128(c2[7315],simde_mm_xor_si128(c2[6166],simde_mm_xor_si128(c2[7910],simde_mm_xor_si128(c2[473],simde_mm_xor_si128(c2[3329],simde_mm_xor_si128(c2[7936],simde_mm_xor_si128(c2[3358],simde_mm_xor_si128(c2[4507],simde_mm_xor_si128(c2[7962],simde_mm_xor_si128(c2[9110],simde_mm_xor_si128(c2[9106],simde_mm_xor_si128(c2[7988],simde_mm_xor_si128(c2[7418],c2[6840]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 25
-     d2[325]=_mm_xor_si128(c2[2314],_mm_xor_si128(c2[5880],_mm_xor_si128(c2[183],c2[7229])));
+     d2[325]=simde_mm_xor_si128(c2[2314],simde_mm_xor_si128(c2[5880],simde_mm_xor_si128(c2[183],c2[7229])));
 
 //row: 26
-     d2[338]=_mm_xor_si128(c2[2861],_mm_xor_si128(c2[8068],_mm_xor_si128(c2[116],c2[3262])));
+     d2[338]=simde_mm_xor_si128(c2[2861],simde_mm_xor_si128(c2[8068],simde_mm_xor_si128(c2[116],c2[3262])));
 
 //row: 27
-     d2[351]=_mm_xor_si128(c2[1176],_mm_xor_si128(c2[7028],c2[3641]));
+     d2[351]=simde_mm_xor_si128(c2[1176],simde_mm_xor_si128(c2[7028],c2[3641]));
 
 //row: 28
-     d2[364]=_mm_xor_si128(c2[2288],_mm_xor_si128(c2[676],_mm_xor_si128(c2[5072],c2[7987])));
+     d2[364]=simde_mm_xor_si128(c2[2288],simde_mm_xor_si128(c2[676],simde_mm_xor_si128(c2[5072],c2[7987])));
 
 //row: 29
-     d2[377]=_mm_xor_si128(c2[7442],_mm_xor_si128(c2[1724],_mm_xor_si128(c2[574],_mm_xor_si128(c2[5720],_mm_xor_si128(c2[579],_mm_xor_si128(c2[7468],_mm_xor_si128(c2[1179],_mm_xor_si128(c2[6890],_mm_xor_si128(c2[1749],_mm_xor_si128(c2[6895],_mm_xor_si128(c2[7494],_mm_xor_si128(c2[4635],_mm_xor_si128(c2[1204],_mm_xor_si128(c2[7520],_mm_xor_si128(c2[5227],_mm_xor_si128(c2[4084],_mm_xor_si128(c2[8094],_mm_xor_si128(c2[7546],_mm_xor_si128(c2[681],_mm_xor_si128(c2[4115],_mm_xor_si128(c2[8112],_mm_xor_si128(c2[7572],_mm_xor_si128(c2[4141],_mm_xor_si128(c2[709],_mm_xor_si128(c2[7598],_mm_xor_si128(c2[1303],_mm_xor_si128(c2[4163],_mm_xor_si128(c2[8173],_mm_xor_si128(c2[7624],_mm_xor_si128(c2[7627],_mm_xor_si128(c2[6475],_mm_xor_si128(c2[1334],_mm_xor_si128(c2[7650],_mm_xor_si128(c2[1933],_mm_xor_si128(c2[7072],_mm_xor_si128(c2[1931],_mm_xor_si128(c2[7676],_mm_xor_si128(c2[8820],_mm_xor_si128(c2[4239],_mm_xor_si128(c2[7702],_mm_xor_si128(c2[266],_mm_xor_si128(c2[7705],_mm_xor_si128(c2[2551],_mm_xor_si128(c2[7728],_mm_xor_si128(c2[7729],_mm_xor_si128(c2[5439],_mm_xor_si128(c2[298],_mm_xor_si128(c2[7754],_mm_xor_si128(c2[5470],_mm_xor_si128(c2[322],_mm_xor_si128(c2[4319],_mm_xor_si128(c2[7780],_mm_xor_si128(c2[4917],_mm_xor_si128(c2[7779],_mm_xor_si128(c2[2638],_mm_xor_si128(c2[7806],_mm_xor_si128(c2[5520],_mm_xor_si128(c2[6656],_mm_xor_si128(c2[1515],_mm_xor_si128(c2[4948],_mm_xor_si128(c2[7832],_mm_xor_si128(c2[2113],_mm_xor_si128(c2[1546],_mm_xor_si128(c2[7858],_mm_xor_si128(c2[6716],_mm_xor_si128(c2[1566],_mm_xor_si128(c2[5576],_mm_xor_si128(c2[7884],_mm_xor_si128(c2[7315],_mm_xor_si128(c2[2169],_mm_xor_si128(c2[6166],_mm_xor_si128(c2[7910],_mm_xor_si128(c2[473],_mm_xor_si128(c2[8483],_mm_xor_si128(c2[3329],_mm_xor_si128(c2[6194],_mm_xor_si128(c2[7936],_mm_xor_si128(c2[3358],_mm_xor_si128(c2[4507],_mm_xor_si128(c2[7962],_mm_xor_si128(c2[9110],_mm_xor_si128(c2[5096],_mm_xor_si128(c2[9106],_mm_xor_si128(c2[7988],_mm_xor_si128(c2[7418],_mm_xor_si128(c2[2843],c2[6840]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[377]=simde_mm_xor_si128(c2[7442],simde_mm_xor_si128(c2[1724],simde_mm_xor_si128(c2[574],simde_mm_xor_si128(c2[5720],simde_mm_xor_si128(c2[579],simde_mm_xor_si128(c2[7468],simde_mm_xor_si128(c2[1179],simde_mm_xor_si128(c2[6890],simde_mm_xor_si128(c2[1749],simde_mm_xor_si128(c2[6895],simde_mm_xor_si128(c2[7494],simde_mm_xor_si128(c2[4635],simde_mm_xor_si128(c2[1204],simde_mm_xor_si128(c2[7520],simde_mm_xor_si128(c2[5227],simde_mm_xor_si128(c2[4084],simde_mm_xor_si128(c2[8094],simde_mm_xor_si128(c2[7546],simde_mm_xor_si128(c2[681],simde_mm_xor_si128(c2[4115],simde_mm_xor_si128(c2[8112],simde_mm_xor_si128(c2[7572],simde_mm_xor_si128(c2[4141],simde_mm_xor_si128(c2[709],simde_mm_xor_si128(c2[7598],simde_mm_xor_si128(c2[1303],simde_mm_xor_si128(c2[4163],simde_mm_xor_si128(c2[8173],simde_mm_xor_si128(c2[7624],simde_mm_xor_si128(c2[7627],simde_mm_xor_si128(c2[6475],simde_mm_xor_si128(c2[1334],simde_mm_xor_si128(c2[7650],simde_mm_xor_si128(c2[1933],simde_mm_xor_si128(c2[7072],simde_mm_xor_si128(c2[1931],simde_mm_xor_si128(c2[7676],simde_mm_xor_si128(c2[8820],simde_mm_xor_si128(c2[4239],simde_mm_xor_si128(c2[7702],simde_mm_xor_si128(c2[266],simde_mm_xor_si128(c2[7705],simde_mm_xor_si128(c2[2551],simde_mm_xor_si128(c2[7728],simde_mm_xor_si128(c2[7729],simde_mm_xor_si128(c2[5439],simde_mm_xor_si128(c2[298],simde_mm_xor_si128(c2[7754],simde_mm_xor_si128(c2[5470],simde_mm_xor_si128(c2[322],simde_mm_xor_si128(c2[4319],simde_mm_xor_si128(c2[7780],simde_mm_xor_si128(c2[4917],simde_mm_xor_si128(c2[7779],simde_mm_xor_si128(c2[2638],simde_mm_xor_si128(c2[7806],simde_mm_xor_si128(c2[5520],simde_mm_xor_si128(c2[6656],simde_mm_xor_si128(c2[1515],simde_mm_xor_si128(c2[4948],simde_mm_xor_si128(c2[7832],simde_mm_xor_si128(c2[2113],simde_mm_xor_si128(c2[1546],simde_mm_xor_si128(c2[7858],simde_mm_xor_si128(c2[6716],simde_mm_xor_si128(c2[1566],simde_mm_xor_si128(c2[5576],simde_mm_xor_si128(c2[7884],simde_mm_xor_si128(c2[7315],simde_mm_xor_si128(c2[2169],simde_mm_xor_si128(c2[6166],simde_mm_xor_si128(c2[7910],simde_mm_xor_si128(c2[473],simde_mm_xor_si128(c2[8483],simde_mm_xor_si128(c2[3329],simde_mm_xor_si128(c2[6194],simde_mm_xor_si128(c2[7936],simde_mm_xor_si128(c2[3358],simde_mm_xor_si128(c2[4507],simde_mm_xor_si128(c2[7962],simde_mm_xor_si128(c2[9110],simde_mm_xor_si128(c2[5096],simde_mm_xor_si128(c2[9106],simde_mm_xor_si128(c2[7988],simde_mm_xor_si128(c2[7418],simde_mm_xor_si128(c2[2843],c2[6840]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 30
-     d2[390]=_mm_xor_si128(c2[6299],_mm_xor_si128(c2[581],_mm_xor_si128(c2[4585],_mm_xor_si128(c2[8582],_mm_xor_si128(c2[4577],_mm_xor_si128(c2[8587],_mm_xor_si128(c2[1144],_mm_xor_si128(c2[6325],_mm_xor_si128(c2[5177],_mm_xor_si128(c2[36],_mm_xor_si128(c2[5747],_mm_xor_si128(c2[606],_mm_xor_si128(c2[6351],_mm_xor_si128(c2[3492],_mm_xor_si128(c2[5202],_mm_xor_si128(c2[61],_mm_xor_si128(c2[6377],_mm_xor_si128(c2[4084],_mm_xor_si128(c2[2941],_mm_xor_si128(c2[6951],_mm_xor_si128(c2[6403],_mm_xor_si128(c2[4692],_mm_xor_si128(c2[8689],_mm_xor_si128(c2[2972],_mm_xor_si128(c2[6969],_mm_xor_si128(c2[6429],_mm_xor_si128(c2[2998],_mm_xor_si128(c2[4707],_mm_xor_si128(c2[8717],_mm_xor_si128(c2[6455],_mm_xor_si128(c2[5314],_mm_xor_si128(c2[160],_mm_xor_si128(c2[3020],_mm_xor_si128(c2[7030],_mm_xor_si128(c2[6481],_mm_xor_si128(c2[2474],_mm_xor_si128(c2[6484],_mm_xor_si128(c2[5332],_mm_xor_si128(c2[191],_mm_xor_si128(c2[6507],_mm_xor_si128(c2[5931],_mm_xor_si128(c2[790],_mm_xor_si128(c2[5929],_mm_xor_si128(c2[788],_mm_xor_si128(c2[6533],_mm_xor_si128(c2[7677],_mm_xor_si128(c2[8250],_mm_xor_si128(c2[3096],_mm_xor_si128(c2[6559],_mm_xor_si128(c2[4264],_mm_xor_si128(c2[8274],_mm_xor_si128(c2[6562],_mm_xor_si128(c2[1408],_mm_xor_si128(c2[4841],_mm_xor_si128(c2[6585],_mm_xor_si128(c2[6586],_mm_xor_si128(c2[4296],_mm_xor_si128(c2[8306],_mm_xor_si128(c2[6611],_mm_xor_si128(c2[4327],_mm_xor_si128(c2[8330],_mm_xor_si128(c2[3176],_mm_xor_si128(c2[6637],_mm_xor_si128(c2[8928],_mm_xor_si128(c2[3774],_mm_xor_si128(c2[6636],_mm_xor_si128(c2[1482],_mm_xor_si128(c2[345],_mm_xor_si128(c2[6663],_mm_xor_si128(c2[367],_mm_xor_si128(c2[4377],_mm_xor_si128(c2[5513],_mm_xor_si128(c2[372],_mm_xor_si128(c2[6689],_mm_xor_si128(c2[970],_mm_xor_si128(c2[5544],_mm_xor_si128(c2[390],_mm_xor_si128(c2[6715],_mm_xor_si128(c2[5573],_mm_xor_si128(c2[423],_mm_xor_si128(c2[4420],_mm_xor_si128(c2[6741],_mm_xor_si128(c2[2162],_mm_xor_si128(c2[6172],_mm_xor_si128(c2[1026],_mm_xor_si128(c2[5023],_mm_xor_si128(c2[6767],_mm_xor_si128(c2[4484],_mm_xor_si128(c2[8481],_mm_xor_si128(c2[7340],_mm_xor_si128(c2[2186],_mm_xor_si128(c2[6793],_mm_xor_si128(c2[2215],_mm_xor_si128(c2[8505],_mm_xor_si128(c2[3364],_mm_xor_si128(c2[6819],_mm_xor_si128(c2[3957],_mm_xor_si128(c2[7967],_mm_xor_si128(c2[3953],_mm_xor_si128(c2[7963],_mm_xor_si128(c2[6845],_mm_xor_si128(c2[6275],_mm_xor_si128(c2[1700],c2[5697])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[390]=simde_mm_xor_si128(c2[6299],simde_mm_xor_si128(c2[581],simde_mm_xor_si128(c2[4585],simde_mm_xor_si128(c2[8582],simde_mm_xor_si128(c2[4577],simde_mm_xor_si128(c2[8587],simde_mm_xor_si128(c2[1144],simde_mm_xor_si128(c2[6325],simde_mm_xor_si128(c2[5177],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[5747],simde_mm_xor_si128(c2[606],simde_mm_xor_si128(c2[6351],simde_mm_xor_si128(c2[3492],simde_mm_xor_si128(c2[5202],simde_mm_xor_si128(c2[61],simde_mm_xor_si128(c2[6377],simde_mm_xor_si128(c2[4084],simde_mm_xor_si128(c2[2941],simde_mm_xor_si128(c2[6951],simde_mm_xor_si128(c2[6403],simde_mm_xor_si128(c2[4692],simde_mm_xor_si128(c2[8689],simde_mm_xor_si128(c2[2972],simde_mm_xor_si128(c2[6969],simde_mm_xor_si128(c2[6429],simde_mm_xor_si128(c2[2998],simde_mm_xor_si128(c2[4707],simde_mm_xor_si128(c2[8717],simde_mm_xor_si128(c2[6455],simde_mm_xor_si128(c2[5314],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[3020],simde_mm_xor_si128(c2[7030],simde_mm_xor_si128(c2[6481],simde_mm_xor_si128(c2[2474],simde_mm_xor_si128(c2[6484],simde_mm_xor_si128(c2[5332],simde_mm_xor_si128(c2[191],simde_mm_xor_si128(c2[6507],simde_mm_xor_si128(c2[5931],simde_mm_xor_si128(c2[790],simde_mm_xor_si128(c2[5929],simde_mm_xor_si128(c2[788],simde_mm_xor_si128(c2[6533],simde_mm_xor_si128(c2[7677],simde_mm_xor_si128(c2[8250],simde_mm_xor_si128(c2[3096],simde_mm_xor_si128(c2[6559],simde_mm_xor_si128(c2[4264],simde_mm_xor_si128(c2[8274],simde_mm_xor_si128(c2[6562],simde_mm_xor_si128(c2[1408],simde_mm_xor_si128(c2[4841],simde_mm_xor_si128(c2[6585],simde_mm_xor_si128(c2[6586],simde_mm_xor_si128(c2[4296],simde_mm_xor_si128(c2[8306],simde_mm_xor_si128(c2[6611],simde_mm_xor_si128(c2[4327],simde_mm_xor_si128(c2[8330],simde_mm_xor_si128(c2[3176],simde_mm_xor_si128(c2[6637],simde_mm_xor_si128(c2[8928],simde_mm_xor_si128(c2[3774],simde_mm_xor_si128(c2[6636],simde_mm_xor_si128(c2[1482],simde_mm_xor_si128(c2[345],simde_mm_xor_si128(c2[6663],simde_mm_xor_si128(c2[367],simde_mm_xor_si128(c2[4377],simde_mm_xor_si128(c2[5513],simde_mm_xor_si128(c2[372],simde_mm_xor_si128(c2[6689],simde_mm_xor_si128(c2[970],simde_mm_xor_si128(c2[5544],simde_mm_xor_si128(c2[390],simde_mm_xor_si128(c2[6715],simde_mm_xor_si128(c2[5573],simde_mm_xor_si128(c2[423],simde_mm_xor_si128(c2[4420],simde_mm_xor_si128(c2[6741],simde_mm_xor_si128(c2[2162],simde_mm_xor_si128(c2[6172],simde_mm_xor_si128(c2[1026],simde_mm_xor_si128(c2[5023],simde_mm_xor_si128(c2[6767],simde_mm_xor_si128(c2[4484],simde_mm_xor_si128(c2[8481],simde_mm_xor_si128(c2[7340],simde_mm_xor_si128(c2[2186],simde_mm_xor_si128(c2[6793],simde_mm_xor_si128(c2[2215],simde_mm_xor_si128(c2[8505],simde_mm_xor_si128(c2[3364],simde_mm_xor_si128(c2[6819],simde_mm_xor_si128(c2[3957],simde_mm_xor_si128(c2[7967],simde_mm_xor_si128(c2[3953],simde_mm_xor_si128(c2[7963],simde_mm_xor_si128(c2[6845],simde_mm_xor_si128(c2[6275],simde_mm_xor_si128(c2[1700],c2[5697])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 31
-     d2[403]=_mm_xor_si128(c2[2289],_mm_xor_si128(c2[5153],_mm_xor_si128(c2[5722],_mm_xor_si128(c2[8586],_mm_xor_si128(c2[4585],_mm_xor_si128(c2[7436],_mm_xor_si128(c2[4577],_mm_xor_si128(c2[3444],_mm_xor_si128(c2[7441],_mm_xor_si128(c2[2315],_mm_xor_si128(c2[5179],_mm_xor_si128(c2[5177],_mm_xor_si128(c2[8041],_mm_xor_si128(c2[5747],_mm_xor_si128(c2[4614],_mm_xor_si128(c2[8611],_mm_xor_si128(c2[2314],_mm_xor_si128(c2[2341],_mm_xor_si128(c2[5205],_mm_xor_si128(c2[8633],_mm_xor_si128(c2[2346],_mm_xor_si128(c2[5202],_mm_xor_si128(c2[8066],_mm_xor_si128(c2[2367],_mm_xor_si128(c2[5231],_mm_xor_si128(c2[87],_mm_xor_si128(c2[2938],_mm_xor_si128(c2[2941],_mm_xor_si128(c2[1795],_mm_xor_si128(c2[5805],_mm_xor_si128(c2[2393],_mm_xor_si128(c2[5257],_mm_xor_si128(c2[4692],_mm_xor_si128(c2[7543],_mm_xor_si128(c2[2972],_mm_xor_si128(c2[1826],_mm_xor_si128(c2[5836],_mm_xor_si128(c2[2419],_mm_xor_si128(c2[5283],_mm_xor_si128(c2[8139],_mm_xor_si128(c2[1852],_mm_xor_si128(c2[4707],_mm_xor_si128(c2[7571],_mm_xor_si128(c2[2445],_mm_xor_si128(c2[5309],_mm_xor_si128(c2[5314],_mm_xor_si128(c2[8165],_mm_xor_si128(c2[3020],_mm_xor_si128(c2[1874],_mm_xor_si128(c2[5884],_mm_xor_si128(c2[2471],_mm_xor_si128(c2[5335],_mm_xor_si128(c2[2474],_mm_xor_si128(c2[5338],_mm_xor_si128(c2[5332],_mm_xor_si128(c2[4186],_mm_xor_si128(c2[8196],_mm_xor_si128(c2[757],_mm_xor_si128(c2[2497],_mm_xor_si128(c2[5361],_mm_xor_si128(c2[5931],_mm_xor_si128(c2[8795],_mm_xor_si128(c2[5929],_mm_xor_si128(c2[4796],_mm_xor_si128(c2[8793],_mm_xor_si128(c2[2523],_mm_xor_si128(c2[5387],_mm_xor_si128(c2[3667],_mm_xor_si128(c2[6531],_mm_xor_si128(c2[8250],_mm_xor_si128(c2[1950],_mm_xor_si128(c2[2549],_mm_xor_si128(c2[5413],_mm_xor_si128(c2[4264],_mm_xor_si128(c2[7128],_mm_xor_si128(c2[6562],_mm_xor_si128(c2[5416],_mm_xor_si128(c2[262],_mm_xor_si128(c2[2575],_mm_xor_si128(c2[5439],_mm_xor_si128(c2[2576],_mm_xor_si128(c2[5440],_mm_xor_si128(c2[4296],_mm_xor_si128(c2[3150],_mm_xor_si128(c2[7160],_mm_xor_si128(c2[2601],_mm_xor_si128(c2[5465],_mm_xor_si128(c2[317],_mm_xor_si128(c2[3181],_mm_xor_si128(c2[8330],_mm_xor_si128(c2[7184],_mm_xor_si128(c2[2030],_mm_xor_si128(c2[2627],_mm_xor_si128(c2[5491],_mm_xor_si128(c2[8928],_mm_xor_si128(c2[2628],_mm_xor_si128(c2[6636],_mm_xor_si128(c2[5490],_mm_xor_si128(c2[349],_mm_xor_si128(c2[2653],_mm_xor_si128(c2[5517],_mm_xor_si128(c2[367],_mm_xor_si128(c2[3231],_mm_xor_si128(c2[5513],_mm_xor_si128(c2[4380],_mm_xor_si128(c2[8377],_mm_xor_si128(c2[2679],_mm_xor_si128(c2[5543],_mm_xor_si128(c2[6111],_mm_xor_si128(c2[8975],_mm_xor_si128(c2[5544],_mm_xor_si128(c2[8408],_mm_xor_si128(c2[2705],_mm_xor_si128(c2[5569],_mm_xor_si128(c2[1563],_mm_xor_si128(c2[4427],_mm_xor_si128(c2[423],_mm_xor_si128(c2[8428],_mm_xor_si128(c2[3287],_mm_xor_si128(c2[2731],_mm_xor_si128(c2[5595],_mm_xor_si128(c2[2162],_mm_xor_si128(c2[5026],_mm_xor_si128(c2[1026],_mm_xor_si128(c2[9031],_mm_xor_si128(c2[3877],_mm_xor_si128(c2[2757],_mm_xor_si128(c2[5621],_mm_xor_si128(c2[4484],_mm_xor_si128(c2[7335],_mm_xor_si128(c2[7340],_mm_xor_si128(c2[6194],_mm_xor_si128(c2[1040],_mm_xor_si128(c2[2783],_mm_xor_si128(c2[5647],_mm_xor_si128(c2[7369],_mm_xor_si128(c2[1069],_mm_xor_si128(c2[8505],_mm_xor_si128(c2[2218],_mm_xor_si128(c2[2809],_mm_xor_si128(c2[5673],_mm_xor_si128(c2[3957],_mm_xor_si128(c2[6821],_mm_xor_si128(c2[3953],_mm_xor_si128(c2[2820],_mm_xor_si128(c2[6817],_mm_xor_si128(c2[2835],_mm_xor_si128(c2[5699],_mm_xor_si128(c2[2265],_mm_xor_si128(c2[5129],_mm_xor_si128(c2[1700],_mm_xor_si128(c2[554],c2[4551]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[403]=simde_mm_xor_si128(c2[2289],simde_mm_xor_si128(c2[5153],simde_mm_xor_si128(c2[5722],simde_mm_xor_si128(c2[8586],simde_mm_xor_si128(c2[4585],simde_mm_xor_si128(c2[7436],simde_mm_xor_si128(c2[4577],simde_mm_xor_si128(c2[3444],simde_mm_xor_si128(c2[7441],simde_mm_xor_si128(c2[2315],simde_mm_xor_si128(c2[5179],simde_mm_xor_si128(c2[5177],simde_mm_xor_si128(c2[8041],simde_mm_xor_si128(c2[5747],simde_mm_xor_si128(c2[4614],simde_mm_xor_si128(c2[8611],simde_mm_xor_si128(c2[2314],simde_mm_xor_si128(c2[2341],simde_mm_xor_si128(c2[5205],simde_mm_xor_si128(c2[8633],simde_mm_xor_si128(c2[2346],simde_mm_xor_si128(c2[5202],simde_mm_xor_si128(c2[8066],simde_mm_xor_si128(c2[2367],simde_mm_xor_si128(c2[5231],simde_mm_xor_si128(c2[87],simde_mm_xor_si128(c2[2938],simde_mm_xor_si128(c2[2941],simde_mm_xor_si128(c2[1795],simde_mm_xor_si128(c2[5805],simde_mm_xor_si128(c2[2393],simde_mm_xor_si128(c2[5257],simde_mm_xor_si128(c2[4692],simde_mm_xor_si128(c2[7543],simde_mm_xor_si128(c2[2972],simde_mm_xor_si128(c2[1826],simde_mm_xor_si128(c2[5836],simde_mm_xor_si128(c2[2419],simde_mm_xor_si128(c2[5283],simde_mm_xor_si128(c2[8139],simde_mm_xor_si128(c2[1852],simde_mm_xor_si128(c2[4707],simde_mm_xor_si128(c2[7571],simde_mm_xor_si128(c2[2445],simde_mm_xor_si128(c2[5309],simde_mm_xor_si128(c2[5314],simde_mm_xor_si128(c2[8165],simde_mm_xor_si128(c2[3020],simde_mm_xor_si128(c2[1874],simde_mm_xor_si128(c2[5884],simde_mm_xor_si128(c2[2471],simde_mm_xor_si128(c2[5335],simde_mm_xor_si128(c2[2474],simde_mm_xor_si128(c2[5338],simde_mm_xor_si128(c2[5332],simde_mm_xor_si128(c2[4186],simde_mm_xor_si128(c2[8196],simde_mm_xor_si128(c2[757],simde_mm_xor_si128(c2[2497],simde_mm_xor_si128(c2[5361],simde_mm_xor_si128(c2[5931],simde_mm_xor_si128(c2[8795],simde_mm_xor_si128(c2[5929],simde_mm_xor_si128(c2[4796],simde_mm_xor_si128(c2[8793],simde_mm_xor_si128(c2[2523],simde_mm_xor_si128(c2[5387],simde_mm_xor_si128(c2[3667],simde_mm_xor_si128(c2[6531],simde_mm_xor_si128(c2[8250],simde_mm_xor_si128(c2[1950],simde_mm_xor_si128(c2[2549],simde_mm_xor_si128(c2[5413],simde_mm_xor_si128(c2[4264],simde_mm_xor_si128(c2[7128],simde_mm_xor_si128(c2[6562],simde_mm_xor_si128(c2[5416],simde_mm_xor_si128(c2[262],simde_mm_xor_si128(c2[2575],simde_mm_xor_si128(c2[5439],simde_mm_xor_si128(c2[2576],simde_mm_xor_si128(c2[5440],simde_mm_xor_si128(c2[4296],simde_mm_xor_si128(c2[3150],simde_mm_xor_si128(c2[7160],simde_mm_xor_si128(c2[2601],simde_mm_xor_si128(c2[5465],simde_mm_xor_si128(c2[317],simde_mm_xor_si128(c2[3181],simde_mm_xor_si128(c2[8330],simde_mm_xor_si128(c2[7184],simde_mm_xor_si128(c2[2030],simde_mm_xor_si128(c2[2627],simde_mm_xor_si128(c2[5491],simde_mm_xor_si128(c2[8928],simde_mm_xor_si128(c2[2628],simde_mm_xor_si128(c2[6636],simde_mm_xor_si128(c2[5490],simde_mm_xor_si128(c2[349],simde_mm_xor_si128(c2[2653],simde_mm_xor_si128(c2[5517],simde_mm_xor_si128(c2[367],simde_mm_xor_si128(c2[3231],simde_mm_xor_si128(c2[5513],simde_mm_xor_si128(c2[4380],simde_mm_xor_si128(c2[8377],simde_mm_xor_si128(c2[2679],simde_mm_xor_si128(c2[5543],simde_mm_xor_si128(c2[6111],simde_mm_xor_si128(c2[8975],simde_mm_xor_si128(c2[5544],simde_mm_xor_si128(c2[8408],simde_mm_xor_si128(c2[2705],simde_mm_xor_si128(c2[5569],simde_mm_xor_si128(c2[1563],simde_mm_xor_si128(c2[4427],simde_mm_xor_si128(c2[423],simde_mm_xor_si128(c2[8428],simde_mm_xor_si128(c2[3287],simde_mm_xor_si128(c2[2731],simde_mm_xor_si128(c2[5595],simde_mm_xor_si128(c2[2162],simde_mm_xor_si128(c2[5026],simde_mm_xor_si128(c2[1026],simde_mm_xor_si128(c2[9031],simde_mm_xor_si128(c2[3877],simde_mm_xor_si128(c2[2757],simde_mm_xor_si128(c2[5621],simde_mm_xor_si128(c2[4484],simde_mm_xor_si128(c2[7335],simde_mm_xor_si128(c2[7340],simde_mm_xor_si128(c2[6194],simde_mm_xor_si128(c2[1040],simde_mm_xor_si128(c2[2783],simde_mm_xor_si128(c2[5647],simde_mm_xor_si128(c2[7369],simde_mm_xor_si128(c2[1069],simde_mm_xor_si128(c2[8505],simde_mm_xor_si128(c2[2218],simde_mm_xor_si128(c2[2809],simde_mm_xor_si128(c2[5673],simde_mm_xor_si128(c2[3957],simde_mm_xor_si128(c2[6821],simde_mm_xor_si128(c2[3953],simde_mm_xor_si128(c2[2820],simde_mm_xor_si128(c2[6817],simde_mm_xor_si128(c2[2835],simde_mm_xor_si128(c2[5699],simde_mm_xor_si128(c2[2265],simde_mm_xor_si128(c2[5129],simde_mm_xor_si128(c2[1700],simde_mm_xor_si128(c2[554],c2[4551]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 32
-     d2[416]=_mm_xor_si128(c2[6299],_mm_xor_si128(c2[581],_mm_xor_si128(c2[4585],_mm_xor_si128(c2[8582],_mm_xor_si128(c2[4577],_mm_xor_si128(c2[8587],_mm_xor_si128(c2[3432],_mm_xor_si128(c2[6325],_mm_xor_si128(c2[5177],_mm_xor_si128(c2[36],_mm_xor_si128(c2[5747],_mm_xor_si128(c2[606],_mm_xor_si128(c2[6351],_mm_xor_si128(c2[3492],_mm_xor_si128(c2[5202],_mm_xor_si128(c2[61],_mm_xor_si128(c2[6377],_mm_xor_si128(c2[4084],_mm_xor_si128(c2[2941],_mm_xor_si128(c2[6951],_mm_xor_si128(c2[6403],_mm_xor_si128(c2[4692],_mm_xor_si128(c2[8689],_mm_xor_si128(c2[2972],_mm_xor_si128(c2[6969],_mm_xor_si128(c2[6429],_mm_xor_si128(c2[2998],_mm_xor_si128(c2[4707],_mm_xor_si128(c2[8717],_mm_xor_si128(c2[6455],_mm_xor_si128(c2[5314],_mm_xor_si128(c2[160],_mm_xor_si128(c2[3020],_mm_xor_si128(c2[7030],_mm_xor_si128(c2[6481],_mm_xor_si128(c2[2474],_mm_xor_si128(c2[6484],_mm_xor_si128(c2[5332],_mm_xor_si128(c2[191],_mm_xor_si128(c2[6507],_mm_xor_si128(c2[5931],_mm_xor_si128(c2[790],_mm_xor_si128(c2[5929],_mm_xor_si128(c2[788],_mm_xor_si128(c2[6533],_mm_xor_si128(c2[7677],_mm_xor_si128(c2[8250],_mm_xor_si128(c2[3096],_mm_xor_si128(c2[6559],_mm_xor_si128(c2[4264],_mm_xor_si128(c2[8274],_mm_xor_si128(c2[6562],_mm_xor_si128(c2[1408],_mm_xor_si128(c2[6585],_mm_xor_si128(c2[6586],_mm_xor_si128(c2[4296],_mm_xor_si128(c2[8306],_mm_xor_si128(c2[6611],_mm_xor_si128(c2[4327],_mm_xor_si128(c2[8330],_mm_xor_si128(c2[3176],_mm_xor_si128(c2[8327],_mm_xor_si128(c2[6637],_mm_xor_si128(c2[8928],_mm_xor_si128(c2[3774],_mm_xor_si128(c2[6636],_mm_xor_si128(c2[1482],_mm_xor_si128(c2[6663],_mm_xor_si128(c2[367],_mm_xor_si128(c2[4377],_mm_xor_si128(c2[5513],_mm_xor_si128(c2[372],_mm_xor_si128(c2[8947],_mm_xor_si128(c2[6689],_mm_xor_si128(c2[970],_mm_xor_si128(c2[5544],_mm_xor_si128(c2[390],_mm_xor_si128(c2[6715],_mm_xor_si128(c2[5573],_mm_xor_si128(c2[423],_mm_xor_si128(c2[4420],_mm_xor_si128(c2[6741],_mm_xor_si128(c2[2162],_mm_xor_si128(c2[6172],_mm_xor_si128(c2[1026],_mm_xor_si128(c2[5023],_mm_xor_si128(c2[6767],_mm_xor_si128(c2[4484],_mm_xor_si128(c2[8481],_mm_xor_si128(c2[7340],_mm_xor_si128(c2[2186],_mm_xor_si128(c2[6793],_mm_xor_si128(c2[2215],_mm_xor_si128(c2[8505],_mm_xor_si128(c2[3364],_mm_xor_si128(c2[6819],_mm_xor_si128(c2[3957],_mm_xor_si128(c2[7967],_mm_xor_si128(c2[3953],_mm_xor_si128(c2[7963],_mm_xor_si128(c2[6845],_mm_xor_si128(c2[6275],_mm_xor_si128(c2[1700],c2[5697])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[416]=simde_mm_xor_si128(c2[6299],simde_mm_xor_si128(c2[581],simde_mm_xor_si128(c2[4585],simde_mm_xor_si128(c2[8582],simde_mm_xor_si128(c2[4577],simde_mm_xor_si128(c2[8587],simde_mm_xor_si128(c2[3432],simde_mm_xor_si128(c2[6325],simde_mm_xor_si128(c2[5177],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[5747],simde_mm_xor_si128(c2[606],simde_mm_xor_si128(c2[6351],simde_mm_xor_si128(c2[3492],simde_mm_xor_si128(c2[5202],simde_mm_xor_si128(c2[61],simde_mm_xor_si128(c2[6377],simde_mm_xor_si128(c2[4084],simde_mm_xor_si128(c2[2941],simde_mm_xor_si128(c2[6951],simde_mm_xor_si128(c2[6403],simde_mm_xor_si128(c2[4692],simde_mm_xor_si128(c2[8689],simde_mm_xor_si128(c2[2972],simde_mm_xor_si128(c2[6969],simde_mm_xor_si128(c2[6429],simde_mm_xor_si128(c2[2998],simde_mm_xor_si128(c2[4707],simde_mm_xor_si128(c2[8717],simde_mm_xor_si128(c2[6455],simde_mm_xor_si128(c2[5314],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[3020],simde_mm_xor_si128(c2[7030],simde_mm_xor_si128(c2[6481],simde_mm_xor_si128(c2[2474],simde_mm_xor_si128(c2[6484],simde_mm_xor_si128(c2[5332],simde_mm_xor_si128(c2[191],simde_mm_xor_si128(c2[6507],simde_mm_xor_si128(c2[5931],simde_mm_xor_si128(c2[790],simde_mm_xor_si128(c2[5929],simde_mm_xor_si128(c2[788],simde_mm_xor_si128(c2[6533],simde_mm_xor_si128(c2[7677],simde_mm_xor_si128(c2[8250],simde_mm_xor_si128(c2[3096],simde_mm_xor_si128(c2[6559],simde_mm_xor_si128(c2[4264],simde_mm_xor_si128(c2[8274],simde_mm_xor_si128(c2[6562],simde_mm_xor_si128(c2[1408],simde_mm_xor_si128(c2[6585],simde_mm_xor_si128(c2[6586],simde_mm_xor_si128(c2[4296],simde_mm_xor_si128(c2[8306],simde_mm_xor_si128(c2[6611],simde_mm_xor_si128(c2[4327],simde_mm_xor_si128(c2[8330],simde_mm_xor_si128(c2[3176],simde_mm_xor_si128(c2[8327],simde_mm_xor_si128(c2[6637],simde_mm_xor_si128(c2[8928],simde_mm_xor_si128(c2[3774],simde_mm_xor_si128(c2[6636],simde_mm_xor_si128(c2[1482],simde_mm_xor_si128(c2[6663],simde_mm_xor_si128(c2[367],simde_mm_xor_si128(c2[4377],simde_mm_xor_si128(c2[5513],simde_mm_xor_si128(c2[372],simde_mm_xor_si128(c2[8947],simde_mm_xor_si128(c2[6689],simde_mm_xor_si128(c2[970],simde_mm_xor_si128(c2[5544],simde_mm_xor_si128(c2[390],simde_mm_xor_si128(c2[6715],simde_mm_xor_si128(c2[5573],simde_mm_xor_si128(c2[423],simde_mm_xor_si128(c2[4420],simde_mm_xor_si128(c2[6741],simde_mm_xor_si128(c2[2162],simde_mm_xor_si128(c2[6172],simde_mm_xor_si128(c2[1026],simde_mm_xor_si128(c2[5023],simde_mm_xor_si128(c2[6767],simde_mm_xor_si128(c2[4484],simde_mm_xor_si128(c2[8481],simde_mm_xor_si128(c2[7340],simde_mm_xor_si128(c2[2186],simde_mm_xor_si128(c2[6793],simde_mm_xor_si128(c2[2215],simde_mm_xor_si128(c2[8505],simde_mm_xor_si128(c2[3364],simde_mm_xor_si128(c2[6819],simde_mm_xor_si128(c2[3957],simde_mm_xor_si128(c2[7967],simde_mm_xor_si128(c2[3953],simde_mm_xor_si128(c2[7963],simde_mm_xor_si128(c2[6845],simde_mm_xor_si128(c2[6275],simde_mm_xor_si128(c2[1700],c2[5697])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 33
-     d2[429]=_mm_xor_si128(c2[5746],_mm_xor_si128(c2[8061],_mm_xor_si128(c2[3718],c2[7415])));
+     d2[429]=simde_mm_xor_si128(c2[5746],simde_mm_xor_si128(c2[8061],simde_mm_xor_si128(c2[3718],c2[7415])));
 
 //row: 34
-     d2[442]=_mm_xor_si128(c2[2288],_mm_xor_si128(c2[5339],_mm_xor_si128(c2[3262],c2[6743])));
+     d2[442]=simde_mm_xor_si128(c2[2288],simde_mm_xor_si128(c2[5339],simde_mm_xor_si128(c2[3262],c2[6743])));
 
 //row: 35
-     d2[455]=_mm_xor_si128(c2[5],_mm_xor_si128(c2[3438],_mm_xor_si128(c2[2288],_mm_xor_si128(c2[2293],_mm_xor_si128(c2[31],_mm_xor_si128(c2[2893],_mm_xor_si128(c2[3463],_mm_xor_si128(c2[2314],_mm_xor_si128(c2[57],_mm_xor_si128(c2[6349],_mm_xor_si128(c2[2918],_mm_xor_si128(c2[83],_mm_xor_si128(c2[6954],_mm_xor_si128(c2[657],_mm_xor_si128(c2[109],_mm_xor_si128(c2[2395],_mm_xor_si128(c2[688],_mm_xor_si128(c2[135],_mm_xor_si128(c2[5855],_mm_xor_si128(c2[2423],_mm_xor_si128(c2[161],_mm_xor_si128(c2[3017],_mm_xor_si128(c2[736],_mm_xor_si128(c2[7594],_mm_xor_si128(c2[187],_mm_xor_si128(c2[190],_mm_xor_si128(c2[3048],_mm_xor_si128(c2[213],_mm_xor_si128(c2[3647],_mm_xor_si128(c2[3645],_mm_xor_si128(c2[239],_mm_xor_si128(c2[1383],_mm_xor_si128(c2[5966],_mm_xor_si128(c2[265],_mm_xor_si128(c2[1980],_mm_xor_si128(c2[4265],_mm_xor_si128(c2[291],_mm_xor_si128(c2[292],_mm_xor_si128(c2[2012],_mm_xor_si128(c2[317],_mm_xor_si128(c2[7184],_mm_xor_si128(c2[6033],_mm_xor_si128(c2[4898],_mm_xor_si128(c2[343],_mm_xor_si128(c2[6631],_mm_xor_si128(c2[4352],_mm_xor_si128(c2[369],_mm_xor_si128(c2[7234],_mm_xor_si128(c2[3229],_mm_xor_si128(c2[395],_mm_xor_si128(c2[3827],_mm_xor_si128(c2[3260],_mm_xor_si128(c2[421],_mm_xor_si128(c2[8430],_mm_xor_si128(c2[7290],_mm_xor_si128(c2[447],_mm_xor_si128(c2[9029],_mm_xor_si128(c2[7880],_mm_xor_si128(c2[473],_mm_xor_si128(c2[2187],_mm_xor_si128(c2[5056],_mm_xor_si128(c2[499],_mm_xor_si128(c2[5072],_mm_xor_si128(c2[6221],_mm_xor_si128(c2[525],_mm_xor_si128(c2[1673],_mm_xor_si128(c2[1669],_mm_xor_si128(c2[551],_mm_xor_si128(c2[9132],c2[8554])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[455]=simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[3438],simde_mm_xor_si128(c2[2288],simde_mm_xor_si128(c2[2293],simde_mm_xor_si128(c2[31],simde_mm_xor_si128(c2[2893],simde_mm_xor_si128(c2[3463],simde_mm_xor_si128(c2[2314],simde_mm_xor_si128(c2[57],simde_mm_xor_si128(c2[6349],simde_mm_xor_si128(c2[2918],simde_mm_xor_si128(c2[83],simde_mm_xor_si128(c2[6954],simde_mm_xor_si128(c2[657],simde_mm_xor_si128(c2[109],simde_mm_xor_si128(c2[2395],simde_mm_xor_si128(c2[688],simde_mm_xor_si128(c2[135],simde_mm_xor_si128(c2[5855],simde_mm_xor_si128(c2[2423],simde_mm_xor_si128(c2[161],simde_mm_xor_si128(c2[3017],simde_mm_xor_si128(c2[736],simde_mm_xor_si128(c2[7594],simde_mm_xor_si128(c2[187],simde_mm_xor_si128(c2[190],simde_mm_xor_si128(c2[3048],simde_mm_xor_si128(c2[213],simde_mm_xor_si128(c2[3647],simde_mm_xor_si128(c2[3645],simde_mm_xor_si128(c2[239],simde_mm_xor_si128(c2[1383],simde_mm_xor_si128(c2[5966],simde_mm_xor_si128(c2[265],simde_mm_xor_si128(c2[1980],simde_mm_xor_si128(c2[4265],simde_mm_xor_si128(c2[291],simde_mm_xor_si128(c2[292],simde_mm_xor_si128(c2[2012],simde_mm_xor_si128(c2[317],simde_mm_xor_si128(c2[7184],simde_mm_xor_si128(c2[6033],simde_mm_xor_si128(c2[4898],simde_mm_xor_si128(c2[343],simde_mm_xor_si128(c2[6631],simde_mm_xor_si128(c2[4352],simde_mm_xor_si128(c2[369],simde_mm_xor_si128(c2[7234],simde_mm_xor_si128(c2[3229],simde_mm_xor_si128(c2[395],simde_mm_xor_si128(c2[3827],simde_mm_xor_si128(c2[3260],simde_mm_xor_si128(c2[421],simde_mm_xor_si128(c2[8430],simde_mm_xor_si128(c2[7290],simde_mm_xor_si128(c2[447],simde_mm_xor_si128(c2[9029],simde_mm_xor_si128(c2[7880],simde_mm_xor_si128(c2[473],simde_mm_xor_si128(c2[2187],simde_mm_xor_si128(c2[5056],simde_mm_xor_si128(c2[499],simde_mm_xor_si128(c2[5072],simde_mm_xor_si128(c2[6221],simde_mm_xor_si128(c2[525],simde_mm_xor_si128(c2[1673],simde_mm_xor_si128(c2[1669],simde_mm_xor_si128(c2[551],simde_mm_xor_si128(c2[9132],c2[8554])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 36
-     d2[468]=_mm_xor_si128(c2[3432],_mm_xor_si128(c2[4952],_mm_xor_si128(c2[973],c2[6762])));
+     d2[468]=simde_mm_xor_si128(c2[3432],simde_mm_xor_si128(c2[4952],simde_mm_xor_si128(c2[973],c2[6762])));
 
 //row: 37
-     d2[481]=_mm_xor_si128(c2[4012],_mm_xor_si128(c2[8009],_mm_xor_si128(c2[2291],_mm_xor_si128(c2[1154],_mm_xor_si128(c2[1146],_mm_xor_si128(c2[4038],_mm_xor_si128(c2[8035],_mm_xor_si128(c2[1746],_mm_xor_si128(c2[2316],_mm_xor_si128(c2[1175],_mm_xor_si128(c2[4064],_mm_xor_si128(c2[8061],_mm_xor_si128(c2[5202],_mm_xor_si128(c2[1771],_mm_xor_si128(c2[4090],_mm_xor_si128(c2[8087],_mm_xor_si128(c2[5807],_mm_xor_si128(c2[8661],_mm_xor_si128(c2[8113],_mm_xor_si128(c2[1248],_mm_xor_si128(c2[8692],_mm_xor_si128(c2[4142],_mm_xor_si128(c2[8139],_mm_xor_si128(c2[4708],_mm_xor_si128(c2[1276],_mm_xor_si128(c2[4168],_mm_xor_si128(c2[8165],_mm_xor_si128(c2[1883],_mm_xor_si128(c2[8740],_mm_xor_si128(c2[8191],_mm_xor_si128(c2[8194],_mm_xor_si128(c2[1901],_mm_xor_si128(c2[8217],_mm_xor_si128(c2[2500],_mm_xor_si128(c2[2498],_mm_xor_si128(c2[4246],_mm_xor_si128(c2[8243],_mm_xor_si128(c2[236],_mm_xor_si128(c2[4819],_mm_xor_si128(c2[4272],_mm_xor_si128(c2[8269],_mm_xor_si128(c2[833],_mm_xor_si128(c2[3131],_mm_xor_si128(c2[4298],_mm_xor_si128(c2[8295],_mm_xor_si128(c2[8296],_mm_xor_si128(c2[865],_mm_xor_si128(c2[4324],_mm_xor_si128(c2[8321],_mm_xor_si128(c2[6037],_mm_xor_si128(c2[4899],_mm_xor_si128(c2[4350],_mm_xor_si128(c2[8347],_mm_xor_si128(c2[5497],_mm_xor_si128(c2[3205],_mm_xor_si128(c2[1482],_mm_xor_si128(c2[8373],_mm_xor_si128(c2[6087],_mm_xor_si128(c2[2082],_mm_xor_si128(c2[4402],_mm_xor_si128(c2[8399],_mm_xor_si128(c2[2680],_mm_xor_si128(c2[2113],_mm_xor_si128(c2[4428],_mm_xor_si128(c2[8425],_mm_xor_si128(c2[7283],_mm_xor_si128(c2[6143],_mm_xor_si128(c2[8451],_mm_xor_si128(c2[7882],_mm_xor_si128(c2[6746],_mm_xor_si128(c2[4480],_mm_xor_si128(c2[8477],_mm_xor_si128(c2[1040],_mm_xor_si128(c2[3909],_mm_xor_si128(c2[4506],_mm_xor_si128(c2[8503],_mm_xor_si128(c2[3938],_mm_xor_si128(c2[5074],_mm_xor_si128(c2[4532],_mm_xor_si128(c2[8529],_mm_xor_si128(c2[526],_mm_xor_si128(c2[522],_mm_xor_si128(c2[4558],_mm_xor_si128(c2[8555],_mm_xor_si128(c2[7985],c2[7420])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[481]=simde_mm_xor_si128(c2[4012],simde_mm_xor_si128(c2[8009],simde_mm_xor_si128(c2[2291],simde_mm_xor_si128(c2[1154],simde_mm_xor_si128(c2[1146],simde_mm_xor_si128(c2[4038],simde_mm_xor_si128(c2[8035],simde_mm_xor_si128(c2[1746],simde_mm_xor_si128(c2[2316],simde_mm_xor_si128(c2[1175],simde_mm_xor_si128(c2[4064],simde_mm_xor_si128(c2[8061],simde_mm_xor_si128(c2[5202],simde_mm_xor_si128(c2[1771],simde_mm_xor_si128(c2[4090],simde_mm_xor_si128(c2[8087],simde_mm_xor_si128(c2[5807],simde_mm_xor_si128(c2[8661],simde_mm_xor_si128(c2[8113],simde_mm_xor_si128(c2[1248],simde_mm_xor_si128(c2[8692],simde_mm_xor_si128(c2[4142],simde_mm_xor_si128(c2[8139],simde_mm_xor_si128(c2[4708],simde_mm_xor_si128(c2[1276],simde_mm_xor_si128(c2[4168],simde_mm_xor_si128(c2[8165],simde_mm_xor_si128(c2[1883],simde_mm_xor_si128(c2[8740],simde_mm_xor_si128(c2[8191],simde_mm_xor_si128(c2[8194],simde_mm_xor_si128(c2[1901],simde_mm_xor_si128(c2[8217],simde_mm_xor_si128(c2[2500],simde_mm_xor_si128(c2[2498],simde_mm_xor_si128(c2[4246],simde_mm_xor_si128(c2[8243],simde_mm_xor_si128(c2[236],simde_mm_xor_si128(c2[4819],simde_mm_xor_si128(c2[4272],simde_mm_xor_si128(c2[8269],simde_mm_xor_si128(c2[833],simde_mm_xor_si128(c2[3131],simde_mm_xor_si128(c2[4298],simde_mm_xor_si128(c2[8295],simde_mm_xor_si128(c2[8296],simde_mm_xor_si128(c2[865],simde_mm_xor_si128(c2[4324],simde_mm_xor_si128(c2[8321],simde_mm_xor_si128(c2[6037],simde_mm_xor_si128(c2[4899],simde_mm_xor_si128(c2[4350],simde_mm_xor_si128(c2[8347],simde_mm_xor_si128(c2[5497],simde_mm_xor_si128(c2[3205],simde_mm_xor_si128(c2[1482],simde_mm_xor_si128(c2[8373],simde_mm_xor_si128(c2[6087],simde_mm_xor_si128(c2[2082],simde_mm_xor_si128(c2[4402],simde_mm_xor_si128(c2[8399],simde_mm_xor_si128(c2[2680],simde_mm_xor_si128(c2[2113],simde_mm_xor_si128(c2[4428],simde_mm_xor_si128(c2[8425],simde_mm_xor_si128(c2[7283],simde_mm_xor_si128(c2[6143],simde_mm_xor_si128(c2[8451],simde_mm_xor_si128(c2[7882],simde_mm_xor_si128(c2[6746],simde_mm_xor_si128(c2[4480],simde_mm_xor_si128(c2[8477],simde_mm_xor_si128(c2[1040],simde_mm_xor_si128(c2[3909],simde_mm_xor_si128(c2[4506],simde_mm_xor_si128(c2[8503],simde_mm_xor_si128(c2[3938],simde_mm_xor_si128(c2[5074],simde_mm_xor_si128(c2[4532],simde_mm_xor_si128(c2[8529],simde_mm_xor_si128(c2[526],simde_mm_xor_si128(c2[522],simde_mm_xor_si128(c2[4558],simde_mm_xor_si128(c2[8555],simde_mm_xor_si128(c2[7985],c2[7420])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 38
-     d2[494]=_mm_xor_si128(c2[6297],_mm_xor_si128(c2[238],_mm_xor_si128(c2[3704],c2[2606])));
+     d2[494]=simde_mm_xor_si128(c2[6297],simde_mm_xor_si128(c2[238],simde_mm_xor_si128(c2[3704],c2[2606])));
 
 //row: 39
-     d2[507]=_mm_xor_si128(c2[2314],_mm_xor_si128(c2[6943],_mm_xor_si128(c2[7624],c2[7369])));
+     d2[507]=simde_mm_xor_si128(c2[2314],simde_mm_xor_si128(c2[6943],simde_mm_xor_si128(c2[7624],c2[7369])));
 
 //row: 40
-     d2[520]=_mm_xor_si128(c2[5720],_mm_xor_si128(c2[2501],c2[7306]));
+     d2[520]=simde_mm_xor_si128(c2[5720],simde_mm_xor_si128(c2[2501],c2[7306]));
 
 //row: 41
-     d2[533]=_mm_xor_si128(c2[1170],_mm_xor_si128(c2[6374],_mm_xor_si128(c2[8250],c2[476])));
+     d2[533]=simde_mm_xor_si128(c2[1170],simde_mm_xor_si128(c2[6374],simde_mm_xor_si128(c2[8250],c2[476])));
 
 //row: 42
-     d2[546]=_mm_xor_si128(c2[5151],_mm_xor_si128(c2[8584],_mm_xor_si128(c2[3437],_mm_xor_si128(c2[7447],_mm_xor_si128(c2[3442],_mm_xor_si128(c2[7439],_mm_xor_si128(c2[1726],_mm_xor_si128(c2[5177],_mm_xor_si128(c2[4042],_mm_xor_si128(c2[8039],_mm_xor_si128(c2[4612],_mm_xor_si128(c2[8609],_mm_xor_si128(c2[5203],_mm_xor_si128(c2[2344],_mm_xor_si128(c2[4067],_mm_xor_si128(c2[8064],_mm_xor_si128(c2[5229],_mm_xor_si128(c2[2949],_mm_xor_si128(c2[1806],_mm_xor_si128(c2[5803],_mm_xor_si128(c2[5255],_mm_xor_si128(c2[3544],_mm_xor_si128(c2[7541],_mm_xor_si128(c2[1824],_mm_xor_si128(c2[5834],_mm_xor_si128(c2[5824],_mm_xor_si128(c2[5281],_mm_xor_si128(c2[1850],_mm_xor_si128(c2[3572],_mm_xor_si128(c2[7569],_mm_xor_si128(c2[5307],_mm_xor_si128(c2[4166],_mm_xor_si128(c2[8176],_mm_xor_si128(c2[1872],_mm_xor_si128(c2[5882],_mm_xor_si128(c2[5333],_mm_xor_si128(c2[1326],_mm_xor_si128(c2[5336],_mm_xor_si128(c2[4197],_mm_xor_si128(c2[8194],_mm_xor_si128(c2[5359],_mm_xor_si128(c2[4796],_mm_xor_si128(c2[8793],_mm_xor_si128(c2[4794],_mm_xor_si128(c2[8791],_mm_xor_si128(c2[5385],_mm_xor_si128(c2[6529],_mm_xor_si128(c2[7102],_mm_xor_si128(c2[1961],_mm_xor_si128(c2[5411],_mm_xor_si128(c2[3129],_mm_xor_si128(c2[7126],_mm_xor_si128(c2[5414],_mm_xor_si128(c2[260],_mm_xor_si128(c2[5437],_mm_xor_si128(c2[5438],_mm_xor_si128(c2[3148],_mm_xor_si128(c2[7158],_mm_xor_si128(c2[5463],_mm_xor_si128(c2[3179],_mm_xor_si128(c2[7182],_mm_xor_si128(c2[2028],_mm_xor_si128(c2[5489],_mm_xor_si128(c2[7780],_mm_xor_si128(c2[2626],_mm_xor_si128(c2[5488],_mm_xor_si128(c2[347],_mm_xor_si128(c2[5515],_mm_xor_si128(c2[8383],_mm_xor_si128(c2[3229],_mm_xor_si128(c2[4378],_mm_xor_si128(c2[8375],_mm_xor_si128(c2[5541],_mm_xor_si128(c2[8973],_mm_xor_si128(c2[4396],_mm_xor_si128(c2[8406],_mm_xor_si128(c2[5567],_mm_xor_si128(c2[4425],_mm_xor_si128(c2[8426],_mm_xor_si128(c2[3285],_mm_xor_si128(c2[5593],_mm_xor_si128(c2[1014],_mm_xor_si128(c2[5024],_mm_xor_si128(c2[9029],_mm_xor_si128(c2[3875],_mm_xor_si128(c2[5619],_mm_xor_si128(c2[3336],_mm_xor_si128(c2[7333],_mm_xor_si128(c2[6192],_mm_xor_si128(c2[1051],_mm_xor_si128(c2[5645],_mm_xor_si128(c2[1067],_mm_xor_si128(c2[7370],_mm_xor_si128(c2[2216],_mm_xor_si128(c2[5671],_mm_xor_si128(c2[2809],_mm_xor_si128(c2[6819],_mm_xor_si128(c2[2818],_mm_xor_si128(c2[6815],_mm_xor_si128(c2[5697],_mm_xor_si128(c2[5127],_mm_xor_si128(c2[552],c2[4562]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[546]=simde_mm_xor_si128(c2[5151],simde_mm_xor_si128(c2[8584],simde_mm_xor_si128(c2[3437],simde_mm_xor_si128(c2[7447],simde_mm_xor_si128(c2[3442],simde_mm_xor_si128(c2[7439],simde_mm_xor_si128(c2[1726],simde_mm_xor_si128(c2[5177],simde_mm_xor_si128(c2[4042],simde_mm_xor_si128(c2[8039],simde_mm_xor_si128(c2[4612],simde_mm_xor_si128(c2[8609],simde_mm_xor_si128(c2[5203],simde_mm_xor_si128(c2[2344],simde_mm_xor_si128(c2[4067],simde_mm_xor_si128(c2[8064],simde_mm_xor_si128(c2[5229],simde_mm_xor_si128(c2[2949],simde_mm_xor_si128(c2[1806],simde_mm_xor_si128(c2[5803],simde_mm_xor_si128(c2[5255],simde_mm_xor_si128(c2[3544],simde_mm_xor_si128(c2[7541],simde_mm_xor_si128(c2[1824],simde_mm_xor_si128(c2[5834],simde_mm_xor_si128(c2[5824],simde_mm_xor_si128(c2[5281],simde_mm_xor_si128(c2[1850],simde_mm_xor_si128(c2[3572],simde_mm_xor_si128(c2[7569],simde_mm_xor_si128(c2[5307],simde_mm_xor_si128(c2[4166],simde_mm_xor_si128(c2[8176],simde_mm_xor_si128(c2[1872],simde_mm_xor_si128(c2[5882],simde_mm_xor_si128(c2[5333],simde_mm_xor_si128(c2[1326],simde_mm_xor_si128(c2[5336],simde_mm_xor_si128(c2[4197],simde_mm_xor_si128(c2[8194],simde_mm_xor_si128(c2[5359],simde_mm_xor_si128(c2[4796],simde_mm_xor_si128(c2[8793],simde_mm_xor_si128(c2[4794],simde_mm_xor_si128(c2[8791],simde_mm_xor_si128(c2[5385],simde_mm_xor_si128(c2[6529],simde_mm_xor_si128(c2[7102],simde_mm_xor_si128(c2[1961],simde_mm_xor_si128(c2[5411],simde_mm_xor_si128(c2[3129],simde_mm_xor_si128(c2[7126],simde_mm_xor_si128(c2[5414],simde_mm_xor_si128(c2[260],simde_mm_xor_si128(c2[5437],simde_mm_xor_si128(c2[5438],simde_mm_xor_si128(c2[3148],simde_mm_xor_si128(c2[7158],simde_mm_xor_si128(c2[5463],simde_mm_xor_si128(c2[3179],simde_mm_xor_si128(c2[7182],simde_mm_xor_si128(c2[2028],simde_mm_xor_si128(c2[5489],simde_mm_xor_si128(c2[7780],simde_mm_xor_si128(c2[2626],simde_mm_xor_si128(c2[5488],simde_mm_xor_si128(c2[347],simde_mm_xor_si128(c2[5515],simde_mm_xor_si128(c2[8383],simde_mm_xor_si128(c2[3229],simde_mm_xor_si128(c2[4378],simde_mm_xor_si128(c2[8375],simde_mm_xor_si128(c2[5541],simde_mm_xor_si128(c2[8973],simde_mm_xor_si128(c2[4396],simde_mm_xor_si128(c2[8406],simde_mm_xor_si128(c2[5567],simde_mm_xor_si128(c2[4425],simde_mm_xor_si128(c2[8426],simde_mm_xor_si128(c2[3285],simde_mm_xor_si128(c2[5593],simde_mm_xor_si128(c2[1014],simde_mm_xor_si128(c2[5024],simde_mm_xor_si128(c2[9029],simde_mm_xor_si128(c2[3875],simde_mm_xor_si128(c2[5619],simde_mm_xor_si128(c2[3336],simde_mm_xor_si128(c2[7333],simde_mm_xor_si128(c2[6192],simde_mm_xor_si128(c2[1051],simde_mm_xor_si128(c2[5645],simde_mm_xor_si128(c2[1067],simde_mm_xor_si128(c2[7370],simde_mm_xor_si128(c2[2216],simde_mm_xor_si128(c2[5671],simde_mm_xor_si128(c2[2809],simde_mm_xor_si128(c2[6819],simde_mm_xor_si128(c2[2818],simde_mm_xor_si128(c2[6815],simde_mm_xor_si128(c2[5697],simde_mm_xor_si128(c2[5127],simde_mm_xor_si128(c2[552],c2[4562]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 43
-     d2[559]=_mm_xor_si128(c2[5160],_mm_xor_si128(c2[8580],_mm_xor_si128(c2[7443],_mm_xor_si128(c2[3438],_mm_xor_si128(c2[7448],_mm_xor_si128(c2[5186],_mm_xor_si128(c2[8035],_mm_xor_si128(c2[4608],_mm_xor_si128(c2[8618],_mm_xor_si128(c2[598],_mm_xor_si128(c2[5212],_mm_xor_si128(c2[2340],_mm_xor_si128(c2[8060],_mm_xor_si128(c2[5238],_mm_xor_si128(c2[2945],_mm_xor_si128(c2[1802],_mm_xor_si128(c2[5799],_mm_xor_si128(c2[5264],_mm_xor_si128(c2[7550],_mm_xor_si128(c2[1820],_mm_xor_si128(c2[5830],_mm_xor_si128(c2[5290],_mm_xor_si128(c2[1846],_mm_xor_si128(c2[7578],_mm_xor_si128(c2[5316],_mm_xor_si128(c2[8172],_mm_xor_si128(c2[1881],_mm_xor_si128(c2[5878],_mm_xor_si128(c2[5342],_mm_xor_si128(c2[5332],_mm_xor_si128(c2[4193],_mm_xor_si128(c2[8190],_mm_xor_si128(c2[5368],_mm_xor_si128(c2[8789],_mm_xor_si128(c2[4790],_mm_xor_si128(c2[8800],_mm_xor_si128(c2[5394],_mm_xor_si128(c2[6538],_mm_xor_si128(c2[1957],_mm_xor_si128(c2[5420],_mm_xor_si128(c2[7135],_mm_xor_si128(c2[5410],_mm_xor_si128(c2[269],_mm_xor_si128(c2[5446],_mm_xor_si128(c2[5434],_mm_xor_si128(c2[3157],_mm_xor_si128(c2[7154],_mm_xor_si128(c2[5472],_mm_xor_si128(c2[3175],_mm_xor_si128(c2[7178],_mm_xor_si128(c2[2037],_mm_xor_si128(c2[5498],_mm_xor_si128(c2[2635],_mm_xor_si128(c2[5497],_mm_xor_si128(c2[343],_mm_xor_si128(c2[5524],_mm_xor_si128(c2[3225],_mm_xor_si128(c2[4374],_mm_xor_si128(c2[8384],_mm_xor_si128(c2[5550],_mm_xor_si128(c2[8982],_mm_xor_si128(c2[8402],_mm_xor_si128(c2[5576],_mm_xor_si128(c2[4421],_mm_xor_si128(c2[8435],_mm_xor_si128(c2[3281],_mm_xor_si128(c2[2142],_mm_xor_si128(c2[5602],_mm_xor_si128(c2[5020],_mm_xor_si128(c2[9025],_mm_xor_si128(c2[3884],_mm_xor_si128(c2[5628],_mm_xor_si128(c2[7342],_mm_xor_si128(c2[6188],_mm_xor_si128(c2[1047],_mm_xor_si128(c2[2190],_mm_xor_si128(c2[5654],_mm_xor_si128(c2[1076],_mm_xor_si128(c2[2212],_mm_xor_si128(c2[5680],_mm_xor_si128(c2[6815],_mm_xor_si128(c2[2814],_mm_xor_si128(c2[6824],_mm_xor_si128(c2[5706],_mm_xor_si128(c2[5123],_mm_xor_si128(c2[548],c2[4558]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[559]=simde_mm_xor_si128(c2[5160],simde_mm_xor_si128(c2[8580],simde_mm_xor_si128(c2[7443],simde_mm_xor_si128(c2[3438],simde_mm_xor_si128(c2[7448],simde_mm_xor_si128(c2[5186],simde_mm_xor_si128(c2[8035],simde_mm_xor_si128(c2[4608],simde_mm_xor_si128(c2[8618],simde_mm_xor_si128(c2[598],simde_mm_xor_si128(c2[5212],simde_mm_xor_si128(c2[2340],simde_mm_xor_si128(c2[8060],simde_mm_xor_si128(c2[5238],simde_mm_xor_si128(c2[2945],simde_mm_xor_si128(c2[1802],simde_mm_xor_si128(c2[5799],simde_mm_xor_si128(c2[5264],simde_mm_xor_si128(c2[7550],simde_mm_xor_si128(c2[1820],simde_mm_xor_si128(c2[5830],simde_mm_xor_si128(c2[5290],simde_mm_xor_si128(c2[1846],simde_mm_xor_si128(c2[7578],simde_mm_xor_si128(c2[5316],simde_mm_xor_si128(c2[8172],simde_mm_xor_si128(c2[1881],simde_mm_xor_si128(c2[5878],simde_mm_xor_si128(c2[5342],simde_mm_xor_si128(c2[5332],simde_mm_xor_si128(c2[4193],simde_mm_xor_si128(c2[8190],simde_mm_xor_si128(c2[5368],simde_mm_xor_si128(c2[8789],simde_mm_xor_si128(c2[4790],simde_mm_xor_si128(c2[8800],simde_mm_xor_si128(c2[5394],simde_mm_xor_si128(c2[6538],simde_mm_xor_si128(c2[1957],simde_mm_xor_si128(c2[5420],simde_mm_xor_si128(c2[7135],simde_mm_xor_si128(c2[5410],simde_mm_xor_si128(c2[269],simde_mm_xor_si128(c2[5446],simde_mm_xor_si128(c2[5434],simde_mm_xor_si128(c2[3157],simde_mm_xor_si128(c2[7154],simde_mm_xor_si128(c2[5472],simde_mm_xor_si128(c2[3175],simde_mm_xor_si128(c2[7178],simde_mm_xor_si128(c2[2037],simde_mm_xor_si128(c2[5498],simde_mm_xor_si128(c2[2635],simde_mm_xor_si128(c2[5497],simde_mm_xor_si128(c2[343],simde_mm_xor_si128(c2[5524],simde_mm_xor_si128(c2[3225],simde_mm_xor_si128(c2[4374],simde_mm_xor_si128(c2[8384],simde_mm_xor_si128(c2[5550],simde_mm_xor_si128(c2[8982],simde_mm_xor_si128(c2[8402],simde_mm_xor_si128(c2[5576],simde_mm_xor_si128(c2[4421],simde_mm_xor_si128(c2[8435],simde_mm_xor_si128(c2[3281],simde_mm_xor_si128(c2[2142],simde_mm_xor_si128(c2[5602],simde_mm_xor_si128(c2[5020],simde_mm_xor_si128(c2[9025],simde_mm_xor_si128(c2[3884],simde_mm_xor_si128(c2[5628],simde_mm_xor_si128(c2[7342],simde_mm_xor_si128(c2[6188],simde_mm_xor_si128(c2[1047],simde_mm_xor_si128(c2[2190],simde_mm_xor_si128(c2[5654],simde_mm_xor_si128(c2[1076],simde_mm_xor_si128(c2[2212],simde_mm_xor_si128(c2[5680],simde_mm_xor_si128(c2[6815],simde_mm_xor_si128(c2[2814],simde_mm_xor_si128(c2[6824],simde_mm_xor_si128(c2[5706],simde_mm_xor_si128(c2[5123],simde_mm_xor_si128(c2[548],c2[4558]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 44
-     d2[572]=_mm_xor_si128(c2[5726],_mm_xor_si128(c2[8],_mm_xor_si128(c2[8009],_mm_xor_si128(c2[8014],_mm_xor_si128(c2[2288],_mm_xor_si128(c2[5752],_mm_xor_si128(c2[8614],_mm_xor_si128(c2[33],_mm_xor_si128(c2[5778],_mm_xor_si128(c2[2919],_mm_xor_si128(c2[8639],_mm_xor_si128(c2[5804],_mm_xor_si128(c2[3511],_mm_xor_si128(c2[6378],_mm_xor_si128(c2[5830],_mm_xor_si128(c2[8116],_mm_xor_si128(c2[6396],_mm_xor_si128(c2[5856],_mm_xor_si128(c2[2425],_mm_xor_si128(c2[8144],_mm_xor_si128(c2[5882],_mm_xor_si128(c2[8738],_mm_xor_si128(c2[6457],_mm_xor_si128(c2[5908],_mm_xor_si128(c2[5911],_mm_xor_si128(c2[8769],_mm_xor_si128(c2[3614],_mm_xor_si128(c2[5934],_mm_xor_si128(c2[217],_mm_xor_si128(c2[215],_mm_xor_si128(c2[5960],_mm_xor_si128(c2[7104],_mm_xor_si128(c2[2523],_mm_xor_si128(c2[8250],_mm_xor_si128(c2[5986],_mm_xor_si128(c2[7701],_mm_xor_si128(c2[835],_mm_xor_si128(c2[6012],_mm_xor_si128(c2[6013],_mm_xor_si128(c2[7733],_mm_xor_si128(c2[6038],_mm_xor_si128(c2[3754],_mm_xor_si128(c2[2603],_mm_xor_si128(c2[6064],_mm_xor_si128(c2[3201],_mm_xor_si128(c2[922],_mm_xor_si128(c2[6090],_mm_xor_si128(c2[3804],_mm_xor_si128(c2[8950],_mm_xor_si128(c2[6116],_mm_xor_si128(c2[397],_mm_xor_si128(c2[8981],_mm_xor_si128(c2[6142],_mm_xor_si128(c2[5000],_mm_xor_si128(c2[3860],_mm_xor_si128(c2[6168],_mm_xor_si128(c2[5599],_mm_xor_si128(c2[4450],_mm_xor_si128(c2[6194],_mm_xor_si128(c2[7908],_mm_xor_si128(c2[1613],_mm_xor_si128(c2[6220],_mm_xor_si128(c2[1642],_mm_xor_si128(c2[2791],_mm_xor_si128(c2[6246],_mm_xor_si128(c2[7394],_mm_xor_si128(c2[7390],_mm_xor_si128(c2[6272],_mm_xor_si128(c2[5702],c2[5124])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[572]=simde_mm_xor_si128(c2[5726],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[8009],simde_mm_xor_si128(c2[8014],simde_mm_xor_si128(c2[2288],simde_mm_xor_si128(c2[5752],simde_mm_xor_si128(c2[8614],simde_mm_xor_si128(c2[33],simde_mm_xor_si128(c2[5778],simde_mm_xor_si128(c2[2919],simde_mm_xor_si128(c2[8639],simde_mm_xor_si128(c2[5804],simde_mm_xor_si128(c2[3511],simde_mm_xor_si128(c2[6378],simde_mm_xor_si128(c2[5830],simde_mm_xor_si128(c2[8116],simde_mm_xor_si128(c2[6396],simde_mm_xor_si128(c2[5856],simde_mm_xor_si128(c2[2425],simde_mm_xor_si128(c2[8144],simde_mm_xor_si128(c2[5882],simde_mm_xor_si128(c2[8738],simde_mm_xor_si128(c2[6457],simde_mm_xor_si128(c2[5908],simde_mm_xor_si128(c2[5911],simde_mm_xor_si128(c2[8769],simde_mm_xor_si128(c2[3614],simde_mm_xor_si128(c2[5934],simde_mm_xor_si128(c2[217],simde_mm_xor_si128(c2[215],simde_mm_xor_si128(c2[5960],simde_mm_xor_si128(c2[7104],simde_mm_xor_si128(c2[2523],simde_mm_xor_si128(c2[8250],simde_mm_xor_si128(c2[5986],simde_mm_xor_si128(c2[7701],simde_mm_xor_si128(c2[835],simde_mm_xor_si128(c2[6012],simde_mm_xor_si128(c2[6013],simde_mm_xor_si128(c2[7733],simde_mm_xor_si128(c2[6038],simde_mm_xor_si128(c2[3754],simde_mm_xor_si128(c2[2603],simde_mm_xor_si128(c2[6064],simde_mm_xor_si128(c2[3201],simde_mm_xor_si128(c2[922],simde_mm_xor_si128(c2[6090],simde_mm_xor_si128(c2[3804],simde_mm_xor_si128(c2[8950],simde_mm_xor_si128(c2[6116],simde_mm_xor_si128(c2[397],simde_mm_xor_si128(c2[8981],simde_mm_xor_si128(c2[6142],simde_mm_xor_si128(c2[5000],simde_mm_xor_si128(c2[3860],simde_mm_xor_si128(c2[6168],simde_mm_xor_si128(c2[5599],simde_mm_xor_si128(c2[4450],simde_mm_xor_si128(c2[6194],simde_mm_xor_si128(c2[7908],simde_mm_xor_si128(c2[1613],simde_mm_xor_si128(c2[6220],simde_mm_xor_si128(c2[1642],simde_mm_xor_si128(c2[2791],simde_mm_xor_si128(c2[6246],simde_mm_xor_si128(c2[7394],simde_mm_xor_si128(c2[7390],simde_mm_xor_si128(c2[6272],simde_mm_xor_si128(c2[5702],c2[5124])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 45
-     d2[585]=_mm_xor_si128(c2[2897],_mm_xor_si128(c2[7594],c2[5417]));
+     d2[585]=simde_mm_xor_si128(c2[2897],simde_mm_xor_si128(c2[7594],c2[5417]));
   }
 }
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc224_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc224_byte.c
index 1a109e64f17609f7b2d506b085dc1228fc18705b..1a6f2264c2482fae658f5be59f0a7393d1bc7f98 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc224_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc224_byte.c
@@ -1,9 +1,10 @@
+#ifdef __AVX2__
 #include "PHY/sse_intrin.h"
 // generated code for Zc=224, byte encoding
 static inline void ldpc224_byte(uint8_t *c,uint8_t *d) {
-  __m256i *csimd=(__m256i *)c,*dsimd=(__m256i *)d;
+  simde__m256i *csimd=(simde__m256i *)c,*dsimd=(simde__m256i *)d;
 
-  __m256i *c2,*d2;
+  simde__m256i *c2,*d2;
 
   int i2;
   for (i2=0; i2<7; i2++) {
@@ -149,3 +150,4 @@ static inline void ldpc224_byte(uint8_t *c,uint8_t *d) {
      d2[315]=simde_mm256_xor_si256(c2[7411],simde_mm256_xor_si256(c2[7787],c2[9072]));
   }
 }
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc224_byte_128.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc224_byte_128.c
new file mode 100644
index 0000000000000000000000000000000000000000..1972280d0dc0a8d03052feb082d70a6b0d09f292
--- /dev/null
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc224_byte_128.c
@@ -0,0 +1,153 @@
+#ifndef __AVX2__
+#include "PHY/sse_intrin.h"
+// generated code for Zc=224, byte encoding
+static inline void ldpc224_byte(uint8_t *c,uint8_t *d) {
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
+
+  simde__m128i *c2,*d2;
+
+  int i2;
+  for (i2=0; i2<14; i2++) {
+     c2=&csimd[i2];
+     d2=&dsimd[i2];
+
+//row: 0
+     d2[0]=simde_mm_xor_si128(c2[9253],simde_mm_xor_si128(c2[8016],simde_mm_xor_si128(c2[9252],simde_mm_xor_si128(c2[5556],simde_mm_xor_si128(c2[29],simde_mm_xor_si128(c2[6816],simde_mm_xor_si128(c2[1261],simde_mm_xor_si128(c2[8685],simde_mm_xor_si128(c2[8066],simde_mm_xor_si128(c2[9297],simde_mm_xor_si128(c2[6865],simde_mm_xor_si128(c2[4405],simde_mm_xor_si128(c2[3174],simde_mm_xor_si128(c2[8738],simde_mm_xor_si128(c2[123],simde_mm_xor_si128(c2[3192],simde_mm_xor_si128(c2[6304],simde_mm_xor_si128(c2[4459],simde_mm_xor_si128(c2[2615],simde_mm_xor_si128(c2[6328],simde_mm_xor_si128(c2[6339],simde_mm_xor_si128(c2[8178],simde_mm_xor_si128(c2[8213],simde_mm_xor_si128(c2[9441],simde_mm_xor_si128(c2[8828],simde_mm_xor_si128(c2[3312],simde_mm_xor_si128(c2[5777],simde_mm_xor_si128(c2[225],simde_mm_xor_si128(c2[252],simde_mm_xor_si128(c2[4569],simde_mm_xor_si128(c2[879],simde_mm_xor_si128(c2[8300],simde_mm_xor_si128(c2[3980],simde_mm_xor_si128(c2[1514],simde_mm_xor_si128(c2[5249],simde_mm_xor_si128(c2[8944],simde_mm_xor_si128(c2[7093],simde_mm_xor_si128(c2[3417],simde_mm_xor_si128(c2[8349],simde_mm_xor_si128(c2[3429],simde_mm_xor_si128(c2[4689],simde_mm_xor_si128(c2[8376],simde_mm_xor_si128(c2[2221],simde_mm_xor_si128(c2[9636],simde_mm_xor_si128(c2[4101],simde_mm_xor_si128(c2[3476],simde_mm_xor_si128(c2[9044],simde_mm_xor_si128(c2[5964],simde_mm_xor_si128(c2[8432],simde_mm_xor_si128(c2[4148],simde_mm_xor_si128(c2[4151],simde_mm_xor_si128(c2[454],simde_mm_xor_si128(c2[1720],simde_mm_xor_si128(c2[4184],simde_mm_xor_si128(c2[6640],simde_mm_xor_si128(c2[8520],simde_mm_xor_si128(c2[3591],simde_mm_xor_si128(c2[2358],simde_mm_xor_si128(c2[4240],simde_mm_xor_si128(c2[9773],simde_mm_xor_si128(c2[9785],simde_mm_xor_si128(c2[5494],simde_mm_xor_si128(c2[6725],simde_mm_xor_si128(c2[9185],simde_mm_xor_si128(c2[1209],simde_mm_xor_si128(c2[7375],c2[9221]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 1
+     d2[14]=simde_mm_xor_si128(c2[9253],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[8632],simde_mm_xor_si128(c2[13],simde_mm_xor_si128(c2[6172],simde_mm_xor_si128(c2[29],simde_mm_xor_si128(c2[645],simde_mm_xor_si128(c2[7432],simde_mm_xor_si128(c2[1877],simde_mm_xor_si128(c2[8685],simde_mm_xor_si128(c2[9301],simde_mm_xor_si128(c2[8682],simde_mm_xor_si128(c2[58],simde_mm_xor_si128(c2[6865],simde_mm_xor_si128(c2[7481],simde_mm_xor_si128(c2[5021],simde_mm_xor_si128(c2[3790],simde_mm_xor_si128(c2[9354],simde_mm_xor_si128(c2[739],simde_mm_xor_si128(c2[3808],simde_mm_xor_si128(c2[6304],simde_mm_xor_si128(c2[6920],simde_mm_xor_si128(c2[5075],simde_mm_xor_si128(c2[3231],simde_mm_xor_si128(c2[6328],simde_mm_xor_si128(c2[6944],simde_mm_xor_si128(c2[6955],simde_mm_xor_si128(c2[8794],simde_mm_xor_si128(c2[8829],simde_mm_xor_si128(c2[202],simde_mm_xor_si128(c2[9444],simde_mm_xor_si128(c2[3928],simde_mm_xor_si128(c2[6393],simde_mm_xor_si128(c2[841],simde_mm_xor_si128(c2[252],simde_mm_xor_si128(c2[868],simde_mm_xor_si128(c2[5185],simde_mm_xor_si128(c2[1495],simde_mm_xor_si128(c2[8300],simde_mm_xor_si128(c2[8916],simde_mm_xor_si128(c2[4596],simde_mm_xor_si128(c2[2130],simde_mm_xor_si128(c2[5249],simde_mm_xor_si128(c2[5865],simde_mm_xor_si128(c2[9560],simde_mm_xor_si128(c2[7709],simde_mm_xor_si128(c2[3417],simde_mm_xor_si128(c2[4033],simde_mm_xor_si128(c2[8965],simde_mm_xor_si128(c2[4045],simde_mm_xor_si128(c2[4689],simde_mm_xor_si128(c2[5305],simde_mm_xor_si128(c2[8992],simde_mm_xor_si128(c2[2837],simde_mm_xor_si128(c2[397],simde_mm_xor_si128(c2[4717],simde_mm_xor_si128(c2[4092],simde_mm_xor_si128(c2[9044],simde_mm_xor_si128(c2[9660],simde_mm_xor_si128(c2[6580],simde_mm_xor_si128(c2[9048],simde_mm_xor_si128(c2[4148],simde_mm_xor_si128(c2[4764],simde_mm_xor_si128(c2[4767],simde_mm_xor_si128(c2[1070],simde_mm_xor_si128(c2[2336],simde_mm_xor_si128(c2[4800],simde_mm_xor_si128(c2[7256],simde_mm_xor_si128(c2[8520],simde_mm_xor_si128(c2[9136],simde_mm_xor_si128(c2[4207],simde_mm_xor_si128(c2[2974],simde_mm_xor_si128(c2[4240],simde_mm_xor_si128(c2[4856],simde_mm_xor_si128(c2[534],simde_mm_xor_si128(c2[532],simde_mm_xor_si128(c2[5494],simde_mm_xor_si128(c2[6110],simde_mm_xor_si128(c2[7341],simde_mm_xor_si128(c2[9801],simde_mm_xor_si128(c2[1209],simde_mm_xor_si128(c2[1825],simde_mm_xor_si128(c2[7991],c2[9837])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 2
+     d2[28]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[8632],simde_mm_xor_si128(c2[9252],simde_mm_xor_si128(c2[13],simde_mm_xor_si128(c2[5556],simde_mm_xor_si128(c2[6172],simde_mm_xor_si128(c2[645],simde_mm_xor_si128(c2[6816],simde_mm_xor_si128(c2[7432],simde_mm_xor_si128(c2[1261],simde_mm_xor_si128(c2[1877],simde_mm_xor_si128(c2[9301],simde_mm_xor_si128(c2[8682],simde_mm_xor_si128(c2[9297],simde_mm_xor_si128(c2[58],simde_mm_xor_si128(c2[7481],simde_mm_xor_si128(c2[5021],simde_mm_xor_si128(c2[3174],simde_mm_xor_si128(c2[3790],simde_mm_xor_si128(c2[9354],simde_mm_xor_si128(c2[123],simde_mm_xor_si128(c2[739],simde_mm_xor_si128(c2[3192],simde_mm_xor_si128(c2[3808],simde_mm_xor_si128(c2[6920],simde_mm_xor_si128(c2[5075],simde_mm_xor_si128(c2[2615],simde_mm_xor_si128(c2[3231],simde_mm_xor_si128(c2[6944],simde_mm_xor_si128(c2[6339],simde_mm_xor_si128(c2[6955],simde_mm_xor_si128(c2[8178],simde_mm_xor_si128(c2[8794],simde_mm_xor_si128(c2[8829],simde_mm_xor_si128(c2[9441],simde_mm_xor_si128(c2[202],simde_mm_xor_si128(c2[8828],simde_mm_xor_si128(c2[9444],simde_mm_xor_si128(c2[3928],simde_mm_xor_si128(c2[5777],simde_mm_xor_si128(c2[6393],simde_mm_xor_si128(c2[225],simde_mm_xor_si128(c2[841],simde_mm_xor_si128(c2[868],simde_mm_xor_si128(c2[5185],simde_mm_xor_si128(c2[879],simde_mm_xor_si128(c2[1495],simde_mm_xor_si128(c2[8916],simde_mm_xor_si128(c2[3980],simde_mm_xor_si128(c2[4596],simde_mm_xor_si128(c2[1514],simde_mm_xor_si128(c2[2130],simde_mm_xor_si128(c2[5865],simde_mm_xor_si128(c2[9560],simde_mm_xor_si128(c2[7093],simde_mm_xor_si128(c2[7709],simde_mm_xor_si128(c2[4033],simde_mm_xor_si128(c2[8965],simde_mm_xor_si128(c2[3429],simde_mm_xor_si128(c2[4045],simde_mm_xor_si128(c2[5305],simde_mm_xor_si128(c2[8376],simde_mm_xor_si128(c2[8992],simde_mm_xor_si128(c2[2221],simde_mm_xor_si128(c2[2837],simde_mm_xor_si128(c2[397],simde_mm_xor_si128(c2[4101],simde_mm_xor_si128(c2[4717],simde_mm_xor_si128(c2[3476],simde_mm_xor_si128(c2[4092],simde_mm_xor_si128(c2[9660],simde_mm_xor_si128(c2[6580],simde_mm_xor_si128(c2[8432],simde_mm_xor_si128(c2[9048],simde_mm_xor_si128(c2[4764],simde_mm_xor_si128(c2[4767],simde_mm_xor_si128(c2[454],simde_mm_xor_si128(c2[1070],simde_mm_xor_si128(c2[2336],simde_mm_xor_si128(c2[4184],simde_mm_xor_si128(c2[4800],simde_mm_xor_si128(c2[6640],simde_mm_xor_si128(c2[7256],simde_mm_xor_si128(c2[9136],simde_mm_xor_si128(c2[3591],simde_mm_xor_si128(c2[4207],simde_mm_xor_si128(c2[2358],simde_mm_xor_si128(c2[2974],simde_mm_xor_si128(c2[4856],simde_mm_xor_si128(c2[534],simde_mm_xor_si128(c2[9785],simde_mm_xor_si128(c2[532],simde_mm_xor_si128(c2[6110],simde_mm_xor_si128(c2[6725],simde_mm_xor_si128(c2[7341],simde_mm_xor_si128(c2[9185],simde_mm_xor_si128(c2[9801],simde_mm_xor_si128(c2[1825],simde_mm_xor_si128(c2[7991],simde_mm_xor_si128(c2[9221],c2[9837]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 3
+     d2[42]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[8632],simde_mm_xor_si128(c2[13],simde_mm_xor_si128(c2[5556],simde_mm_xor_si128(c2[6172],simde_mm_xor_si128(c2[645],simde_mm_xor_si128(c2[7432],simde_mm_xor_si128(c2[1261],simde_mm_xor_si128(c2[1877],simde_mm_xor_si128(c2[9301],simde_mm_xor_si128(c2[8682],simde_mm_xor_si128(c2[58],simde_mm_xor_si128(c2[7481],simde_mm_xor_si128(c2[5021],simde_mm_xor_si128(c2[3174],simde_mm_xor_si128(c2[3790],simde_mm_xor_si128(c2[9354],simde_mm_xor_si128(c2[739],simde_mm_xor_si128(c2[3192],simde_mm_xor_si128(c2[3808],simde_mm_xor_si128(c2[6920],simde_mm_xor_si128(c2[5075],simde_mm_xor_si128(c2[3231],simde_mm_xor_si128(c2[6944],simde_mm_xor_si128(c2[6955],simde_mm_xor_si128(c2[8178],simde_mm_xor_si128(c2[8794],simde_mm_xor_si128(c2[8829],simde_mm_xor_si128(c2[202],simde_mm_xor_si128(c2[8828],simde_mm_xor_si128(c2[9444],simde_mm_xor_si128(c2[3928],simde_mm_xor_si128(c2[6393],simde_mm_xor_si128(c2[225],simde_mm_xor_si128(c2[841],simde_mm_xor_si128(c2[868],simde_mm_xor_si128(c2[5185],simde_mm_xor_si128(c2[1495],simde_mm_xor_si128(c2[8916],simde_mm_xor_si128(c2[4596],simde_mm_xor_si128(c2[1514],simde_mm_xor_si128(c2[2130],simde_mm_xor_si128(c2[5865],simde_mm_xor_si128(c2[9560],simde_mm_xor_si128(c2[7093],simde_mm_xor_si128(c2[7709],simde_mm_xor_si128(c2[4033],simde_mm_xor_si128(c2[8965],simde_mm_xor_si128(c2[3429],simde_mm_xor_si128(c2[4045],simde_mm_xor_si128(c2[5305],simde_mm_xor_si128(c2[8992],simde_mm_xor_si128(c2[2221],simde_mm_xor_si128(c2[2837],simde_mm_xor_si128(c2[397],simde_mm_xor_si128(c2[4717],simde_mm_xor_si128(c2[3476],simde_mm_xor_si128(c2[4092],simde_mm_xor_si128(c2[9660],simde_mm_xor_si128(c2[6580],simde_mm_xor_si128(c2[9048],simde_mm_xor_si128(c2[4764],simde_mm_xor_si128(c2[4767],simde_mm_xor_si128(c2[454],simde_mm_xor_si128(c2[1070],simde_mm_xor_si128(c2[2336],simde_mm_xor_si128(c2[4800],simde_mm_xor_si128(c2[6640],simde_mm_xor_si128(c2[7256],simde_mm_xor_si128(c2[9136],simde_mm_xor_si128(c2[4207],simde_mm_xor_si128(c2[2358],simde_mm_xor_si128(c2[2974],simde_mm_xor_si128(c2[4856],simde_mm_xor_si128(c2[534],simde_mm_xor_si128(c2[532],simde_mm_xor_si128(c2[6110],simde_mm_xor_si128(c2[7341],simde_mm_xor_si128(c2[9185],simde_mm_xor_si128(c2[9801],simde_mm_xor_si128(c2[1825],simde_mm_xor_si128(c2[7991],simde_mm_xor_si128(c2[9221],c2[9837])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 4
+     d2[56]=simde_mm_xor_si128(c2[6170],c2[6188]);
+
+//row: 5
+     d2[70]=simde_mm_xor_si128(c2[4937],simde_mm_xor_si128(c2[3700],simde_mm_xor_si128(c2[4936],simde_mm_xor_si128(c2[1240],simde_mm_xor_si128(c2[2474],simde_mm_xor_si128(c2[5582],simde_mm_xor_si128(c2[2500],simde_mm_xor_si128(c2[6814],simde_mm_xor_si128(c2[6807],simde_mm_xor_si128(c2[4369],simde_mm_xor_si128(c2[3764],simde_mm_xor_si128(c2[4995],simde_mm_xor_si128(c2[2549],simde_mm_xor_si128(c2[89],simde_mm_xor_si128(c2[8713],simde_mm_xor_si128(c2[3785],simde_mm_xor_si128(c2[4436],simde_mm_xor_si128(c2[5662],simde_mm_xor_si128(c2[8745],simde_mm_xor_si128(c2[1988],simde_mm_xor_si128(c2[143],simde_mm_xor_si128(c2[8154],simde_mm_xor_si128(c2[2026],simde_mm_xor_si128(c2[2023],simde_mm_xor_si128(c2[3876],simde_mm_xor_si128(c2[3897],simde_mm_xor_si128(c2[5125],simde_mm_xor_si128(c2[4512],simde_mm_xor_si128(c2[8851],simde_mm_xor_si128(c2[1461],simde_mm_xor_si128(c2[5778],simde_mm_xor_si128(c2[5805],simde_mm_xor_si128(c2[253],simde_mm_xor_si128(c2[6418],simde_mm_xor_si128(c2[3984],simde_mm_xor_si128(c2[9533],simde_mm_xor_si128(c2[7067],simde_mm_xor_si128(c2[933],simde_mm_xor_si128(c2[4628],simde_mm_xor_si128(c2[2777],simde_mm_xor_si128(c2[8970],simde_mm_xor_si128(c2[4033],simde_mm_xor_si128(c2[8968],simde_mm_xor_si128(c2[341],simde_mm_xor_si128(c2[373],simde_mm_xor_si128(c2[4060],simde_mm_xor_si128(c2[7760],simde_mm_xor_si128(c2[5320],simde_mm_xor_si128(c2[9640],simde_mm_xor_si128(c2[9029],simde_mm_xor_si128(c2[4742],simde_mm_xor_si128(c2[1662],simde_mm_xor_si128(c2[4116],simde_mm_xor_si128(c2[9701],simde_mm_xor_si128(c2[9690],simde_mm_xor_si128(c2[5993],simde_mm_xor_si128(c2[4155],simde_mm_xor_si128(c2[7259],simde_mm_xor_si128(c2[9723],simde_mm_xor_si128(c2[2324],simde_mm_xor_si128(c2[4204],simde_mm_xor_si128(c2[9130],simde_mm_xor_si128(c2[7897],simde_mm_xor_si128(c2[9779],simde_mm_xor_si128(c2[5471],simde_mm_xor_si128(c2[5469],simde_mm_xor_si128(c2[1178],simde_mm_xor_si128(c2[2409],simde_mm_xor_si128(c2[4883],simde_mm_xor_si128(c2[6748],simde_mm_xor_si128(c2[3059],simde_mm_xor_si128(c2[4905],c2[1828]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 6
+     d2[84]=simde_mm_xor_si128(c2[8633],simde_mm_xor_si128(c2[4487],simde_mm_xor_si128(c2[903],simde_mm_xor_si128(c2[3389],simde_mm_xor_si128(c2[9607],simde_mm_xor_si128(c2[2327],simde_mm_xor_si128(c2[5440],c2[3031])))))));
+
+//row: 7
+     d2[98]=simde_mm_xor_si128(c2[617],simde_mm_xor_si128(c2[7424],simde_mm_xor_si128(c2[5046],simde_mm_xor_si128(c2[2666],simde_mm_xor_si128(c2[3929],c2[9025])))));
+
+//row: 8
+     d2[112]=simde_mm_xor_si128(c2[6778],simde_mm_xor_si128(c2[5556],simde_mm_xor_si128(c2[5555],simde_mm_xor_si128(c2[4319],simde_mm_xor_si128(c2[6777],simde_mm_xor_si128(c2[4939],simde_mm_xor_si128(c2[5555],simde_mm_xor_si128(c2[3081],simde_mm_xor_si128(c2[1243],simde_mm_xor_si128(c2[1859],simde_mm_xor_si128(c2[618],simde_mm_xor_si128(c2[7423],simde_mm_xor_si128(c2[6201],simde_mm_xor_si128(c2[4341],simde_mm_xor_si128(c2[2503],simde_mm_xor_si128(c2[3119],simde_mm_xor_si128(c2[8655],simde_mm_xor_si128(c2[6817],simde_mm_xor_si128(c2[7433],simde_mm_xor_si128(c2[9273],simde_mm_xor_si128(c2[6224],simde_mm_xor_si128(c2[4988],simde_mm_xor_si128(c2[5605],simde_mm_xor_si128(c2[4369],simde_mm_xor_si128(c2[6836],simde_mm_xor_si128(c2[4984],simde_mm_xor_si128(c2[5600],simde_mm_xor_si128(c2[4404],simde_mm_xor_si128(c2[3168],simde_mm_xor_si128(c2[1944],simde_mm_xor_si128(c2[708],simde_mm_xor_si128(c2[713],simde_mm_xor_si128(c2[8716],simde_mm_xor_si128(c2[9332],simde_mm_xor_si128(c2[2548],simde_mm_xor_si128(c2[6277],simde_mm_xor_si128(c2[5041],simde_mm_xor_si128(c2[7517],simde_mm_xor_si128(c2[5665],simde_mm_xor_si128(c2[6281],simde_mm_xor_si128(c2[731],simde_mm_xor_si128(c2[8748],simde_mm_xor_si128(c2[9364],simde_mm_xor_si128(c2[3843],simde_mm_xor_si128(c2[2607],simde_mm_xor_si128(c2[1998],simde_mm_xor_si128(c2[762],simde_mm_xor_si128(c2[140],simde_mm_xor_si128(c2[8157],simde_mm_xor_si128(c2[8773],simde_mm_xor_si128(c2[3867],simde_mm_xor_si128(c2[2645],simde_mm_xor_si128(c2[3864],simde_mm_xor_si128(c2[2026],simde_mm_xor_si128(c2[2642],simde_mm_xor_si128(c2[5717],simde_mm_xor_si128(c2[3865],simde_mm_xor_si128(c2[4481],simde_mm_xor_si128(c2[5752],simde_mm_xor_si128(c2[4516],simde_mm_xor_si128(c2[6980],simde_mm_xor_si128(c2[5128],simde_mm_xor_si128(c2[5744],simde_mm_xor_si128(c2[6367],simde_mm_xor_si128(c2[4515],simde_mm_xor_si128(c2[5131],simde_mm_xor_si128(c2[851],simde_mm_xor_si128(c2[9470],simde_mm_xor_si128(c2[3316],simde_mm_xor_si128(c2[1464],simde_mm_xor_si128(c2[2080],simde_mm_xor_si128(c2[7619],simde_mm_xor_si128(c2[5781],simde_mm_xor_si128(c2[6397],simde_mm_xor_si128(c2[7646],simde_mm_xor_si128(c2[6424],simde_mm_xor_si128(c2[2108],simde_mm_xor_si128(c2[872],simde_mm_xor_si128(c2[8273],simde_mm_xor_si128(c2[6421],simde_mm_xor_si128(c2[7037],simde_mm_xor_si128(c2[5825],simde_mm_xor_si128(c2[4603],simde_mm_xor_si128(c2[1519],simde_mm_xor_si128(c2[9522],simde_mm_xor_si128(c2[283],simde_mm_xor_si128(c2[8908],simde_mm_xor_si128(c2[7056],simde_mm_xor_si128(c2[7672],simde_mm_xor_si128(c2[2774],simde_mm_xor_si128(c2[1552],simde_mm_xor_si128(c2[6469],simde_mm_xor_si128(c2[5247],simde_mm_xor_si128(c2[4632],simde_mm_xor_si128(c2[2780],simde_mm_xor_si128(c2[3396],simde_mm_xor_si128(c2[956],simde_mm_xor_si128(c2[9589],simde_mm_xor_si128(c2[5888],simde_mm_xor_si128(c2[4652],simde_mm_xor_si128(c2[954],simde_mm_xor_si128(c2[8971],simde_mm_xor_si128(c2[9587],simde_mm_xor_si128(c2[5893],simde_mm_xor_si128(c2[2214],simde_mm_xor_si128(c2[992],simde_mm_xor_si128(c2[5915],simde_mm_xor_si128(c2[4063],simde_mm_xor_si128(c2[4679],simde_mm_xor_si128(c2[9615],simde_mm_xor_si128(c2[7763],simde_mm_xor_si128(c2[8379],simde_mm_xor_si128(c2[7175],simde_mm_xor_si128(c2[5939],simde_mm_xor_si128(c2[1626],simde_mm_xor_si128(c2[9643],simde_mm_xor_si128(c2[404],simde_mm_xor_si128(c2[1015],simde_mm_xor_si128(c2[9018],simde_mm_xor_si128(c2[9634],simde_mm_xor_si128(c2[6583],simde_mm_xor_si128(c2[5361],simde_mm_xor_si128(c2[3503],simde_mm_xor_si128(c2[2281],simde_mm_xor_si128(c2[5971],simde_mm_xor_si128(c2[4119],simde_mm_xor_si128(c2[4735],simde_mm_xor_si128(c2[1687],simde_mm_xor_si128(c2[451],simde_mm_xor_si128(c2[1690],simde_mm_xor_si128(c2[454],simde_mm_xor_si128(c2[7848],simde_mm_xor_si128(c2[5996],simde_mm_xor_si128(c2[6612],simde_mm_xor_si128(c2[7852],simde_mm_xor_si128(c2[9100],simde_mm_xor_si128(c2[7878],simde_mm_xor_si128(c2[1709],simde_mm_xor_si128(c2[9726],simde_mm_xor_si128(c2[487],simde_mm_xor_si128(c2[4179],simde_mm_xor_si128(c2[2327],simde_mm_xor_si128(c2[2943],simde_mm_xor_si128(c2[6059],simde_mm_xor_si128(c2[4823],simde_mm_xor_si128(c2[1130],simde_mm_xor_si128(c2[9133],simde_mm_xor_si128(c2[9749],simde_mm_xor_si128(c2[9752],simde_mm_xor_si128(c2[7900],simde_mm_xor_si128(c2[8516],simde_mm_xor_si128(c2[1765],simde_mm_xor_si128(c2[543],simde_mm_xor_si128(c2[7312],simde_mm_xor_si128(c2[6076],simde_mm_xor_si128(c2[7310],simde_mm_xor_si128(c2[5472],simde_mm_xor_si128(c2[6088],simde_mm_xor_si128(c2[4846],simde_mm_xor_si128(c2[3033],simde_mm_xor_si128(c2[1797],simde_mm_xor_si128(c2[4264],simde_mm_xor_si128(c2[2412],simde_mm_xor_si128(c2[3028],simde_mm_xor_si128(c2[6724],simde_mm_xor_si128(c2[4872],simde_mm_xor_si128(c2[5488],simde_mm_xor_si128(c2[8603],simde_mm_xor_si128(c2[7367],simde_mm_xor_si128(c2[4900],simde_mm_xor_si128(c2[3678],simde_mm_xor_si128(c2[6760],simde_mm_xor_si128(c2[4908],simde_mm_xor_si128(c2[5524],c2[6751]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 9
+     d2[126]=simde_mm_xor_si128(c2[5544],simde_mm_xor_si128(c2[3110],simde_mm_xor_si128(c2[3373],simde_mm_xor_si128(c2[5858],simde_mm_xor_si128(c2[5913],simde_mm_xor_si128(c2[6031],simde_mm_xor_si128(c2[8518],c2[6733])))))));
+
+//row: 10
+     d2[140]=simde_mm_xor_si128(c2[1265],simde_mm_xor_si128(c2[3146],simde_mm_xor_si128(c2[8746],simde_mm_xor_si128(c2[2045],simde_mm_xor_si128(c2[1468],c2[4710])))));
+
+//row: 11
+     d2[154]=simde_mm_xor_si128(c2[1853],simde_mm_xor_si128(c2[4940],simde_mm_xor_si128(c2[5556],simde_mm_xor_si128(c2[616],simde_mm_xor_si128(c2[4319],simde_mm_xor_si128(c2[1852],simde_mm_xor_si128(c2[5555],simde_mm_xor_si128(c2[8011],simde_mm_xor_si128(c2[1859],simde_mm_xor_si128(c2[2467],simde_mm_xor_si128(c2[2498],simde_mm_xor_si128(c2[5585],simde_mm_xor_si128(c2[6201],simde_mm_xor_si128(c2[9271],simde_mm_xor_si128(c2[3119],simde_mm_xor_si128(c2[3730],simde_mm_xor_si128(c2[7433],simde_mm_xor_si128(c2[6804],simde_mm_xor_si128(c2[1299],simde_mm_xor_si128(c2[4372],simde_mm_xor_si128(c2[4988],simde_mm_xor_si128(c2[680],simde_mm_xor_si128(c2[4369],simde_mm_xor_si128(c2[1911],simde_mm_xor_si128(c2[5600],simde_mm_xor_si128(c2[9334],simde_mm_xor_si128(c2[2552],simde_mm_xor_si128(c2[3168],simde_mm_xor_si128(c2[6860],simde_mm_xor_si128(c2[708],simde_mm_xor_si128(c2[5629],simde_mm_xor_si128(c2[9332],simde_mm_xor_si128(c2[1352],simde_mm_xor_si128(c2[5041],simde_mm_xor_si128(c2[2578],simde_mm_xor_si128(c2[6281],simde_mm_xor_si128(c2[5661],simde_mm_xor_si128(c2[9364],simde_mm_xor_si128(c2[8773],simde_mm_xor_si128(c2[1991],simde_mm_xor_si128(c2[2607],simde_mm_xor_si128(c2[6928],simde_mm_xor_si128(c2[762],simde_mm_xor_si128(c2[5070],simde_mm_xor_si128(c2[8773],simde_mm_xor_si128(c2[8797],simde_mm_xor_si128(c2[2029],simde_mm_xor_si128(c2[2645],simde_mm_xor_si128(c2[8794],simde_mm_xor_si128(c2[2642],simde_mm_xor_si128(c2[792],simde_mm_xor_si128(c2[4481],simde_mm_xor_si128(c2[813],simde_mm_xor_si128(c2[4516],simde_mm_xor_si128(c2[2055],simde_mm_xor_si128(c2[5744],simde_mm_xor_si128(c2[1428],simde_mm_xor_si128(c2[5131],simde_mm_xor_si128(c2[5781],simde_mm_xor_si128(c2[9470],simde_mm_xor_si128(c2[8232],simde_mm_xor_si128(c2[2080],simde_mm_xor_si128(c2[2694],simde_mm_xor_si128(c2[6397],simde_mm_xor_si128(c2[2721],simde_mm_xor_si128(c2[5808],simde_mm_xor_si128(c2[6424],simde_mm_xor_si128(c2[7038],simde_mm_xor_si128(c2[872],simde_mm_xor_si128(c2[3334],simde_mm_xor_si128(c2[7037],simde_mm_xor_si128(c2[900],simde_mm_xor_si128(c2[3987],simde_mm_xor_si128(c2[4603],simde_mm_xor_si128(c2[6449],simde_mm_xor_si128(c2[283],simde_mm_xor_si128(c2[3983],simde_mm_xor_si128(c2[7672],simde_mm_xor_si128(c2[7704],simde_mm_xor_si128(c2[936],simde_mm_xor_si128(c2[1552],simde_mm_xor_si128(c2[1544],simde_mm_xor_si128(c2[5247],simde_mm_xor_si128(c2[9548],simde_mm_xor_si128(c2[3396],simde_mm_xor_si128(c2[5886],simde_mm_xor_si128(c2[8973],simde_mm_xor_si128(c2[9589],simde_mm_xor_si128(c2[963],simde_mm_xor_si128(c2[4652],simde_mm_xor_si128(c2[5884],simde_mm_xor_si128(c2[9587],simde_mm_xor_si128(c2[1568],simde_mm_xor_si128(c2[7144],simde_mm_xor_si128(c2[376],simde_mm_xor_si128(c2[992],simde_mm_xor_si128(c2[990],simde_mm_xor_si128(c2[4679],simde_mm_xor_si128(c2[4676],simde_mm_xor_si128(c2[8379],simde_mm_xor_si128(c2[2250],simde_mm_xor_si128(c2[5939],simde_mm_xor_si128(c2[6556],simde_mm_xor_si128(c2[404],simde_mm_xor_si128(c2[5945],simde_mm_xor_si128(c2[9634],simde_mm_xor_si128(c2[1658],simde_mm_xor_si128(c2[4745],simde_mm_xor_si128(c2[5361],simde_mm_xor_si128(c2[8433],simde_mm_xor_si128(c2[2281],simde_mm_xor_si128(c2[1046],simde_mm_xor_si128(c2[4735],simde_mm_xor_si128(c2[6617],simde_mm_xor_si128(c2[9690],simde_mm_xor_si128(c2[451],simde_mm_xor_si128(c2[6620],simde_mm_xor_si128(c2[454],simde_mm_xor_si128(c2[2923],simde_mm_xor_si128(c2[6612],simde_mm_xor_si128(c2[2298],simde_mm_xor_si128(c2[4175],simde_mm_xor_si128(c2[7878],simde_mm_xor_si128(c2[6639],simde_mm_xor_si128(c2[487],simde_mm_xor_si128(c2[9109],simde_mm_xor_si128(c2[2943],simde_mm_xor_si128(c2[1120],simde_mm_xor_si128(c2[4207],simde_mm_xor_si128(c2[4823],simde_mm_xor_si128(c2[6060],simde_mm_xor_si128(c2[9749],simde_mm_xor_si128(c2[4827],simde_mm_xor_si128(c2[8516],simde_mm_xor_si128(c2[6695],simde_mm_xor_si128(c2[9782],simde_mm_xor_si128(c2[543],simde_mm_xor_si128(c2[2387],simde_mm_xor_si128(c2[6076],simde_mm_xor_si128(c2[2385],simde_mm_xor_si128(c2[6088],simde_mm_xor_si128(c2[7963],simde_mm_xor_si128(c2[1181],simde_mm_xor_si128(c2[1797],simde_mm_xor_si128(c2[9194],simde_mm_xor_si128(c2[3028],simde_mm_xor_si128(c2[1799],simde_mm_xor_si128(c2[5488],simde_mm_xor_si128(c2[3678],simde_mm_xor_si128(c2[6751],simde_mm_xor_si128(c2[7367],simde_mm_xor_si128(c2[9830],simde_mm_xor_si128(c2[3678],simde_mm_xor_si128(c2[1821],simde_mm_xor_si128(c2[5524],c2[590])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 12
+     d2[168]=simde_mm_xor_si128(c2[8632],simde_mm_xor_si128(c2[9278],simde_mm_xor_si128(c2[5216],simde_mm_xor_si128(c2[2156],simde_mm_xor_si128(c2[7757],c2[4211])))));
+
+//row: 13
+     d2[182]=simde_mm_xor_si128(c2[8012],simde_mm_xor_si128(c2[8628],simde_mm_xor_si128(c2[7405],simde_mm_xor_si128(c2[8627],simde_mm_xor_si128(c2[4931],simde_mm_xor_si128(c2[621],simde_mm_xor_si128(c2[8657],simde_mm_xor_si128(c2[9273],simde_mm_xor_si128(c2[6191],simde_mm_xor_si128(c2[650],simde_mm_xor_si128(c2[7458],simde_mm_xor_si128(c2[8074],simde_mm_xor_si128(c2[7455],simde_mm_xor_si128(c2[8686],simde_mm_xor_si128(c2[5638],simde_mm_xor_si128(c2[6254],simde_mm_xor_si128(c2[3780],simde_mm_xor_si128(c2[2549],simde_mm_xor_si128(c2[5015],simde_mm_xor_si128(c2[8127],simde_mm_xor_si128(c2[9353],simde_mm_xor_si128(c2[2581],simde_mm_xor_si128(c2[5077],simde_mm_xor_si128(c2[5693],simde_mm_xor_si128(c2[3848],simde_mm_xor_si128(c2[1990],simde_mm_xor_si128(c2[5101],simde_mm_xor_si128(c2[5717],simde_mm_xor_si128(c2[5714],simde_mm_xor_si128(c2[7567],simde_mm_xor_si128(c2[7588],simde_mm_xor_si128(c2[8830],simde_mm_xor_si128(c2[8217],simde_mm_xor_si128(c2[5128],simde_mm_xor_si128(c2[2701],simde_mm_xor_si128(c2[5152],simde_mm_xor_si128(c2[9469],simde_mm_xor_si128(c2[8880],simde_mm_xor_si128(c2[9496],simde_mm_xor_si128(c2[3958],simde_mm_xor_si128(c2[254],simde_mm_xor_si128(c2[7059],simde_mm_xor_si128(c2[7675],simde_mm_xor_si128(c2[3369],simde_mm_xor_si128(c2[903],simde_mm_xor_si128(c2[4008],simde_mm_xor_si128(c2[4624],simde_mm_xor_si128(c2[8319],simde_mm_xor_si128(c2[6468],simde_mm_xor_si128(c2[2190],simde_mm_xor_si128(c2[2806],simde_mm_xor_si128(c2[7738],simde_mm_xor_si128(c2[2804],simde_mm_xor_si128(c2[3448],simde_mm_xor_si128(c2[4064],simde_mm_xor_si128(c2[7765],simde_mm_xor_si128(c2[1596],simde_mm_xor_si128(c2[9025],simde_mm_xor_si128(c2[3476],simde_mm_xor_si128(c2[2865],simde_mm_xor_si128(c2[7817],simde_mm_xor_si128(c2[8433],simde_mm_xor_si128(c2[5353],simde_mm_xor_si128(c2[7821],simde_mm_xor_si128(c2[2921],simde_mm_xor_si128(c2[3537],simde_mm_xor_si128(c2[3540],simde_mm_xor_si128(c2[9698],simde_mm_xor_si128(c2[1095],simde_mm_xor_si128(c2[3559],simde_mm_xor_si128(c2[6029],simde_mm_xor_si128(c2[7293],simde_mm_xor_si128(c2[7909],simde_mm_xor_si128(c2[2980],simde_mm_xor_si128(c2[1747],simde_mm_xor_si128(c2[2999],simde_mm_xor_si128(c2[3615],simde_mm_xor_si128(c2[9162],simde_mm_xor_si128(c2[9160],simde_mm_xor_si128(c2[4267],simde_mm_xor_si128(c2[4883],simde_mm_xor_si128(c2[6114],simde_mm_xor_si128(c2[8574],simde_mm_xor_si128(c2[6117],simde_mm_xor_si128(c2[9837],simde_mm_xor_si128(c2[598],simde_mm_xor_si128(c2[6750],c2[8596])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 14
+     d2[196]=simde_mm_xor_si128(c2[8624],simde_mm_xor_si128(c2[9586],simde_mm_xor_si128(c2[2281],simde_mm_xor_si128(c2[9699],simde_mm_xor_si128(c2[2327],c2[7366])))));
+
+//row: 15
+     d2[210]=simde_mm_xor_si128(c2[621],simde_mm_xor_si128(c2[9253],simde_mm_xor_si128(c2[620],simde_mm_xor_si128(c2[6163],simde_mm_xor_si128(c2[6779],simde_mm_xor_si128(c2[6165],simde_mm_xor_si128(c2[1266],simde_mm_xor_si128(c2[8039],simde_mm_xor_si128(c2[1882],simde_mm_xor_si128(c2[2498],simde_mm_xor_si128(c2[4963],simde_mm_xor_si128(c2[67],simde_mm_xor_si128(c2[9303],simde_mm_xor_si128(c2[679],simde_mm_xor_si128(c2[8102],simde_mm_xor_si128(c2[5628],simde_mm_xor_si128(c2[3781],simde_mm_xor_si128(c2[4397],simde_mm_xor_si128(c2[120],simde_mm_xor_si128(c2[1346],simde_mm_xor_si128(c2[3813],simde_mm_xor_si128(c2[4429],simde_mm_xor_si128(c2[7541],simde_mm_xor_si128(c2[5696],simde_mm_xor_si128(c2[3838],simde_mm_xor_si128(c2[7565],simde_mm_xor_si128(c2[7562],simde_mm_xor_si128(c2[8799],simde_mm_xor_si128(c2[9415],simde_mm_xor_si128(c2[9436],simde_mm_xor_si128(c2[823],simde_mm_xor_si128(c2[9449],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[4549],simde_mm_xor_si128(c2[7000],simde_mm_xor_si128(c2[846],simde_mm_xor_si128(c2[1462],simde_mm_xor_si128(c2[1489],simde_mm_xor_si128(c2[5806],simde_mm_xor_si128(c2[2102],simde_mm_xor_si128(c2[9523],simde_mm_xor_si128(c2[5217],simde_mm_xor_si128(c2[2135],simde_mm_xor_si128(c2[2751],simde_mm_xor_si128(c2[2136],simde_mm_xor_si128(c2[6472],simde_mm_xor_si128(c2[312],simde_mm_xor_si128(c2[7700],simde_mm_xor_si128(c2[8316],simde_mm_xor_si128(c2[4654],simde_mm_xor_si128(c2[9586],simde_mm_xor_si128(c2[4036],simde_mm_xor_si128(c2[4652],simde_mm_xor_si128(c2[5912],simde_mm_xor_si128(c2[9613],simde_mm_xor_si128(c2[2828],simde_mm_xor_si128(c2[3444],simde_mm_xor_si128(c2[993],simde_mm_xor_si128(c2[1018],simde_mm_xor_si128(c2[5324],simde_mm_xor_si128(c2[4097],simde_mm_xor_si128(c2[4713],simde_mm_xor_si128(c2[426],simde_mm_xor_si128(c2[7201],simde_mm_xor_si128(c2[9669],simde_mm_xor_si128(c2[5385],simde_mm_xor_si128(c2[5388],simde_mm_xor_si128(c2[1075],simde_mm_xor_si128(c2[1691],simde_mm_xor_si128(c2[2943],simde_mm_xor_si128(c2[5407],simde_mm_xor_si128(c2[7261],simde_mm_xor_si128(c2[7877],simde_mm_xor_si128(c2[9757],simde_mm_xor_si128(c2[4828],simde_mm_xor_si128(c2[2979],simde_mm_xor_si128(c2[3595],simde_mm_xor_si128(c2[1133],simde_mm_xor_si128(c2[5463],simde_mm_xor_si128(c2[1155],simde_mm_xor_si128(c2[1153],simde_mm_xor_si128(c2[6731],simde_mm_xor_si128(c2[7962],simde_mm_xor_si128(c2[9806],simde_mm_xor_si128(c2[567],simde_mm_xor_si128(c2[2446],simde_mm_xor_si128(c2[8598],simde_mm_xor_si128(c2[9828],c2[589]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 16
+     d2[224]=simde_mm_xor_si128(c2[2470],simde_mm_xor_si128(c2[1233],simde_mm_xor_si128(c2[2469],simde_mm_xor_si128(c2[8628],simde_mm_xor_si128(c2[3115],simde_mm_xor_si128(c2[33],simde_mm_xor_si128(c2[4347],simde_mm_xor_si128(c2[6197],simde_mm_xor_si128(c2[1916],simde_mm_xor_si128(c2[1297],simde_mm_xor_si128(c2[2528],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[7477],simde_mm_xor_si128(c2[6246],simde_mm_xor_si128(c2[2558],simde_mm_xor_si128(c2[1969],simde_mm_xor_si128(c2[3195],simde_mm_xor_si128(c2[6278],simde_mm_xor_si128(c2[9390],simde_mm_xor_si128(c2[7545],simde_mm_xor_si128(c2[5687],simde_mm_xor_si128(c2[9414],simde_mm_xor_si128(c2[9411],simde_mm_xor_si128(c2[1409],simde_mm_xor_si128(c2[1430],simde_mm_xor_si128(c2[2672],simde_mm_xor_si128(c2[2045],simde_mm_xor_si128(c2[6384],simde_mm_xor_si128(c2[8849],simde_mm_xor_si128(c2[3311],simde_mm_xor_si128(c2[3338],simde_mm_xor_si128(c2[7655],simde_mm_xor_si128(c2[3951],simde_mm_xor_si128(c2[1517],simde_mm_xor_si128(c2[7066],simde_mm_xor_si128(c2[4600],simde_mm_xor_si128(c2[8321],simde_mm_xor_si128(c2[2161],simde_mm_xor_si128(c2[310],simde_mm_xor_si128(c2[7086],simde_mm_xor_si128(c2[6503],simde_mm_xor_si128(c2[1580],simde_mm_xor_si128(c2[6501],simde_mm_xor_si128(c2[7761],simde_mm_xor_si128(c2[1607],simde_mm_xor_si128(c2[5293],simde_mm_xor_si128(c2[2867],simde_mm_xor_si128(c2[7173],simde_mm_xor_si128(c2[6562],simde_mm_xor_si128(c2[2275],simde_mm_xor_si128(c2[9050],simde_mm_xor_si128(c2[1663],simde_mm_xor_si128(c2[7234],simde_mm_xor_si128(c2[7237],simde_mm_xor_si128(c2[3540],simde_mm_xor_si128(c2[4792],simde_mm_xor_si128(c2[7256],simde_mm_xor_si128(c2[9726],simde_mm_xor_si128(c2[1737],simde_mm_xor_si128(c2[6677],simde_mm_xor_si128(c2[5444],simde_mm_xor_si128(c2[7312],simde_mm_xor_si128(c2[3004],simde_mm_xor_si128(c2[3002],simde_mm_xor_si128(c2[8580],simde_mm_xor_si128(c2[9811],simde_mm_xor_si128(c2[2416],simde_mm_xor_si128(c2[8579],simde_mm_xor_si128(c2[4295],simde_mm_xor_si128(c2[592],c2[2438]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 17
+     d2[238]=simde_mm_xor_si128(c2[4931],simde_mm_xor_si128(c2[9022],simde_mm_xor_si128(c2[5388],simde_mm_xor_si128(c2[9719],c2[3052]))));
+
+//row: 18
+     d2[252]=simde_mm_xor_si128(c2[4352],simde_mm_xor_si128(c2[8966],simde_mm_xor_si128(c2[5304],simde_mm_xor_si128(c2[9752],c2[6703]))));
+
+//row: 19
+     d2[266]=simde_mm_xor_si128(c2[4928],simde_mm_xor_si128(c2[3724],simde_mm_xor_si128(c2[4514],simde_mm_xor_si128(c2[3932],c2[5208]))));
+
+//row: 20
+     d2[280]=simde_mm_xor_si128(c2[3707],simde_mm_xor_si128(c2[2470],simde_mm_xor_si128(c2[3706],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[5550],simde_mm_xor_si128(c2[4352],simde_mm_xor_si128(c2[1270],simde_mm_xor_si128(c2[5584],simde_mm_xor_si128(c2[3139],simde_mm_xor_si128(c2[2520],simde_mm_xor_si128(c2[3765],simde_mm_xor_si128(c2[1319],simde_mm_xor_si128(c2[8714],simde_mm_xor_si128(c2[7483],simde_mm_xor_si128(c2[1329],simde_mm_xor_si128(c2[3192],simde_mm_xor_si128(c2[4432],simde_mm_xor_si128(c2[7515],simde_mm_xor_si128(c2[758],simde_mm_xor_si128(c2[8768],simde_mm_xor_si128(c2[6924],simde_mm_xor_si128(c2[796],simde_mm_xor_si128(c2[793],simde_mm_xor_si128(c2[2632],simde_mm_xor_si128(c2[2667],simde_mm_xor_si128(c2[3895],simde_mm_xor_si128(c2[3282],simde_mm_xor_si128(c2[7621],simde_mm_xor_si128(c2[231],simde_mm_xor_si128(c2[4548],simde_mm_xor_si128(c2[4575],simde_mm_xor_si128(c2[8878],simde_mm_xor_si128(c2[5188],simde_mm_xor_si128(c2[5803],simde_mm_xor_si128(c2[2754],simde_mm_xor_si128(c2[8289],simde_mm_xor_si128(c2[5837],simde_mm_xor_si128(c2[9558],simde_mm_xor_si128(c2[3398],simde_mm_xor_si128(c2[1547],simde_mm_xor_si128(c2[4017],simde_mm_xor_si128(c2[7740],simde_mm_xor_si128(c2[2803],simde_mm_xor_si128(c2[7738],simde_mm_xor_si128(c2[8998],simde_mm_xor_si128(c2[2830],simde_mm_xor_si128(c2[6530],simde_mm_xor_si128(c2[4090],simde_mm_xor_si128(c2[8410],simde_mm_xor_si128(c2[7785],simde_mm_xor_si128(c2[3512],simde_mm_xor_si128(c2[432],simde_mm_xor_si128(c2[2886],simde_mm_xor_si128(c2[8457],simde_mm_xor_si128(c2[8460],simde_mm_xor_si128(c2[4763],simde_mm_xor_si128(c2[6029],simde_mm_xor_si128(c2[8493],simde_mm_xor_si128(c2[1094],simde_mm_xor_si128(c2[2974],simde_mm_xor_si128(c2[7900],simde_mm_xor_si128(c2[6667],simde_mm_xor_si128(c2[8549],simde_mm_xor_si128(c2[4241],simde_mm_xor_si128(c2[4239],simde_mm_xor_si128(c2[9803],simde_mm_xor_si128(c2[1179],simde_mm_xor_si128(c2[3653],simde_mm_xor_si128(c2[5518],simde_mm_xor_si128(c2[1829],c2[3675]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 21
+     d2[294]=simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[1996],simde_mm_xor_si128(c2[7853],simde_mm_xor_si128(c2[1795],c2[6754]))));
+
+//row: 22
+     d2[308]=simde_mm_xor_si128(c2[3083],simde_mm_xor_si128(c2[336],simde_mm_xor_si128(c2[2212],c2[2949])));
+
+//row: 23
+     d2[322]=simde_mm_xor_si128(c2[4961],simde_mm_xor_si128(c2[6844],simde_mm_xor_si128(c2[5218],c2[6671])));
+
+//row: 24
+     d2[336]=simde_mm_xor_si128(c2[6783],simde_mm_xor_si128(c2[5546],simde_mm_xor_si128(c2[6782],simde_mm_xor_si128(c2[3086],simde_mm_xor_si128(c2[619],simde_mm_xor_si128(c2[7428],simde_mm_xor_si128(c2[4346],simde_mm_xor_si128(c2[8660],simde_mm_xor_si128(c2[6229],simde_mm_xor_si128(c2[5610],simde_mm_xor_si128(c2[6841],simde_mm_xor_si128(c2[4409],simde_mm_xor_si128(c2[1935],simde_mm_xor_si128(c2[704],simde_mm_xor_si128(c2[8101],simde_mm_xor_si128(c2[6282],simde_mm_xor_si128(c2[7508],simde_mm_xor_si128(c2[736],simde_mm_xor_si128(c2[116],simde_mm_xor_si128(c2[3848],simde_mm_xor_si128(c2[1989],simde_mm_xor_si128(c2[145],simde_mm_xor_si128(c2[3872],simde_mm_xor_si128(c2[3869],simde_mm_xor_si128(c2[5722],simde_mm_xor_si128(c2[5743],simde_mm_xor_si128(c2[6985],simde_mm_xor_si128(c2[6358],simde_mm_xor_si128(c2[842],simde_mm_xor_si128(c2[3307],simde_mm_xor_si128(c2[7624],simde_mm_xor_si128(c2[7651],simde_mm_xor_si128(c2[2113],simde_mm_xor_si128(c2[8264],simde_mm_xor_si128(c2[5830],simde_mm_xor_si128(c2[1524],simde_mm_xor_si128(c2[8913],simde_mm_xor_si128(c2[2779],simde_mm_xor_si128(c2[6474],simde_mm_xor_si128(c2[4623],simde_mm_xor_si128(c2[936],simde_mm_xor_si128(c2[961],simde_mm_xor_si128(c2[5893],simde_mm_xor_si128(c2[959],simde_mm_xor_si128(c2[2219],simde_mm_xor_si128(c2[5920],simde_mm_xor_si128(c2[9606],simde_mm_xor_si128(c2[7180],simde_mm_xor_si128(c2[1631],simde_mm_xor_si128(c2[1020],simde_mm_xor_si128(c2[6588],simde_mm_xor_si128(c2[3508],simde_mm_xor_si128(c2[5976],simde_mm_xor_si128(c2[1692],simde_mm_xor_si128(c2[1681],simde_mm_xor_si128(c2[7853],simde_mm_xor_si128(c2[9105],simde_mm_xor_si128(c2[1714],simde_mm_xor_si128(c2[4184],simde_mm_xor_si128(c2[6050],simde_mm_xor_si128(c2[1121],simde_mm_xor_si128(c2[9757],simde_mm_xor_si128(c2[1770],simde_mm_xor_si128(c2[7317],simde_mm_xor_si128(c2[7315],simde_mm_xor_si128(c2[3024],simde_mm_xor_si128(c2[4269],simde_mm_xor_si128(c2[6729],simde_mm_xor_si128(c2[8608],simde_mm_xor_si128(c2[4905],c2[6751]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 25
+     d2[350]=simde_mm_xor_si128(c2[644],simde_mm_xor_si128(c2[3874],simde_mm_xor_si128(c2[816],c2[1013])));
+
+//row: 26
+     d2[364]=simde_mm_xor_si128(c2[6782],simde_mm_xor_si128(c2[67],simde_mm_xor_si128(c2[2589],c2[9667])));
+
+//row: 27
+     d2[378]=simde_mm_xor_si128(c2[41],simde_mm_xor_si128(c2[8184],c2[8858]));
+
+//row: 28
+     d2[392]=simde_mm_xor_si128(c2[1241],simde_mm_xor_si128(c2[5665],simde_mm_xor_si128(c2[6089],c2[1827])));
+
+//row: 29
+     d2[406]=simde_mm_xor_si128(c2[2468],simde_mm_xor_si128(c2[1245],simde_mm_xor_si128(c2[2467],simde_mm_xor_si128(c2[8010],simde_mm_xor_si128(c2[8626],simde_mm_xor_si128(c2[3113],simde_mm_xor_si128(c2[31],simde_mm_xor_si128(c2[3729],simde_mm_xor_si128(c2[4345],simde_mm_xor_si128(c2[3733],simde_mm_xor_si128(c2[1914],simde_mm_xor_si128(c2[1295],simde_mm_xor_si128(c2[2526],simde_mm_xor_si128(c2[94],simde_mm_xor_si128(c2[7489],simde_mm_xor_si128(c2[5628],simde_mm_xor_si128(c2[6244],simde_mm_xor_si128(c2[1967],simde_mm_xor_si128(c2[3193],simde_mm_xor_si128(c2[5660],simde_mm_xor_si128(c2[6276],simde_mm_xor_si128(c2[9388],simde_mm_xor_si128(c2[7543],simde_mm_xor_si128(c2[5685],simde_mm_xor_si128(c2[9412],simde_mm_xor_si128(c2[9409],simde_mm_xor_si128(c2[791],simde_mm_xor_si128(c2[1407],simde_mm_xor_si128(c2[1428],simde_mm_xor_si128(c2[2670],simde_mm_xor_si128(c2[1441],simde_mm_xor_si128(c2[2057],simde_mm_xor_si128(c2[6396],simde_mm_xor_si128(c2[8861],simde_mm_xor_si128(c2[2693],simde_mm_xor_si128(c2[3309],simde_mm_xor_si128(c2[3336],simde_mm_xor_si128(c2[7653],simde_mm_xor_si128(c2[3949],simde_mm_xor_si128(c2[1515],simde_mm_xor_si128(c2[7064],simde_mm_xor_si128(c2[3982],simde_mm_xor_si128(c2[4598],simde_mm_xor_si128(c2[8319],simde_mm_xor_si128(c2[2159],simde_mm_xor_si128(c2[9561],simde_mm_xor_si128(c2[308],simde_mm_xor_si128(c2[6501],simde_mm_xor_si128(c2[1578],simde_mm_xor_si128(c2[5883],simde_mm_xor_si128(c2[6499],simde_mm_xor_si128(c2[7759],simde_mm_xor_si128(c2[1605],simde_mm_xor_si128(c2[4689],simde_mm_xor_si128(c2[5305],simde_mm_xor_si128(c2[2865],simde_mm_xor_si128(c2[7171],simde_mm_xor_si128(c2[5944],simde_mm_xor_si128(c2[6560],simde_mm_xor_si128(c2[7168],simde_mm_xor_si128(c2[2273],simde_mm_xor_si128(c2[9048],simde_mm_xor_si128(c2[1661],simde_mm_xor_si128(c2[7232],simde_mm_xor_si128(c2[7235],simde_mm_xor_si128(c2[2922],simde_mm_xor_si128(c2[3538],simde_mm_xor_si128(c2[4790],simde_mm_xor_si128(c2[7254],simde_mm_xor_si128(c2[9108],simde_mm_xor_si128(c2[9724],simde_mm_xor_si128(c2[1749],simde_mm_xor_si128(c2[6675],simde_mm_xor_si128(c2[4826],simde_mm_xor_si128(c2[5442],simde_mm_xor_si128(c2[3587],simde_mm_xor_si128(c2[7310],simde_mm_xor_si128(c2[3002],simde_mm_xor_si128(c2[3000],simde_mm_xor_si128(c2[8578],simde_mm_xor_si128(c2[9809],simde_mm_xor_si128(c2[1798],simde_mm_xor_si128(c2[2414],simde_mm_xor_si128(c2[4293],simde_mm_xor_si128(c2[590],simde_mm_xor_si128(c2[1820],c2[2436]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 30
+     d2[420]=simde_mm_xor_si128(c2[6783],simde_mm_xor_si128(c2[5546],simde_mm_xor_si128(c2[6166],simde_mm_xor_si128(c2[6782],simde_mm_xor_si128(c2[2470],simde_mm_xor_si128(c2[3086],simde_mm_xor_si128(c2[1234],simde_mm_xor_si128(c2[7428],simde_mm_xor_si128(c2[3730],simde_mm_xor_si128(c2[4346],simde_mm_xor_si128(c2[8044],simde_mm_xor_si128(c2[8660],simde_mm_xor_si128(c2[6229],simde_mm_xor_si128(c2[5610],simde_mm_xor_si128(c2[6225],simde_mm_xor_si128(c2[6841],simde_mm_xor_si128(c2[4409],simde_mm_xor_si128(c2[1935],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[704],simde_mm_xor_si128(c2[6282],simde_mm_xor_si128(c2[6892],simde_mm_xor_si128(c2[7508],simde_mm_xor_si128(c2[120],simde_mm_xor_si128(c2[736],simde_mm_xor_si128(c2[3848],simde_mm_xor_si128(c2[1989],simde_mm_xor_si128(c2[9384],simde_mm_xor_si128(c2[145],simde_mm_xor_si128(c2[3872],simde_mm_xor_si128(c2[3253],simde_mm_xor_si128(c2[3869],simde_mm_xor_si128(c2[5106],simde_mm_xor_si128(c2[5722],simde_mm_xor_si128(c2[5743],simde_mm_xor_si128(c2[6369],simde_mm_xor_si128(c2[6985],simde_mm_xor_si128(c2[5742],simde_mm_xor_si128(c2[6358],simde_mm_xor_si128(c2[842],simde_mm_xor_si128(c2[2691],simde_mm_xor_si128(c2[3307],simde_mm_xor_si128(c2[7008],simde_mm_xor_si128(c2[7624],simde_mm_xor_si128(c2[7651],simde_mm_xor_si128(c2[2113],simde_mm_xor_si128(c2[7648],simde_mm_xor_si128(c2[8264],simde_mm_xor_si128(c2[5830],simde_mm_xor_si128(c2[908],simde_mm_xor_si128(c2[1524],simde_mm_xor_si128(c2[8297],simde_mm_xor_si128(c2[8913],simde_mm_xor_si128(c2[1520],simde_mm_xor_si128(c2[2779],simde_mm_xor_si128(c2[6474],simde_mm_xor_si128(c2[4007],simde_mm_xor_si128(c2[4623],simde_mm_xor_si128(c2[961],simde_mm_xor_si128(c2[5893],simde_mm_xor_si128(c2[343],simde_mm_xor_si128(c2[959],simde_mm_xor_si128(c2[2219],simde_mm_xor_si128(c2[5304],simde_mm_xor_si128(c2[5920],simde_mm_xor_si128(c2[8990],simde_mm_xor_si128(c2[9606],simde_mm_xor_si128(c2[1609],simde_mm_xor_si128(c2[7180],simde_mm_xor_si128(c2[1015],simde_mm_xor_si128(c2[1631],simde_mm_xor_si128(c2[404],simde_mm_xor_si128(c2[1020],simde_mm_xor_si128(c2[6588],simde_mm_xor_si128(c2[3508],simde_mm_xor_si128(c2[5360],simde_mm_xor_si128(c2[5976],simde_mm_xor_si128(c2[1692],simde_mm_xor_si128(c2[1681],simde_mm_xor_si128(c2[7237],simde_mm_xor_si128(c2[7853],simde_mm_xor_si128(c2[9105],simde_mm_xor_si128(c2[1098],simde_mm_xor_si128(c2[1714],simde_mm_xor_si128(c2[3568],simde_mm_xor_si128(c2[4184],simde_mm_xor_si128(c2[6050],simde_mm_xor_si128(c2[505],simde_mm_xor_si128(c2[1121],simde_mm_xor_si128(c2[9141],simde_mm_xor_si128(c2[9757],simde_mm_xor_si128(c2[1770],simde_mm_xor_si128(c2[7317],simde_mm_xor_si128(c2[6699],simde_mm_xor_si128(c2[7315],simde_mm_xor_si128(c2[3024],simde_mm_xor_si128(c2[3653],simde_mm_xor_si128(c2[4269],simde_mm_xor_si128(c2[6113],simde_mm_xor_si128(c2[6729],simde_mm_xor_si128(c2[8608],simde_mm_xor_si128(c2[4905],simde_mm_xor_si128(c2[6135],c2[6751])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 31
+     d2[434]=simde_mm_xor_si128(c2[11],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[8629],simde_mm_xor_si128(c2[8626],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[7],simde_mm_xor_si128(c2[6169],simde_mm_xor_si128(c2[5550],simde_mm_xor_si128(c2[6166],simde_mm_xor_si128(c2[656],simde_mm_xor_si128(c2[653],simde_mm_xor_si128(c2[7429],simde_mm_xor_si128(c2[7426],simde_mm_xor_si128(c2[1888],simde_mm_xor_si128(c2[1269],simde_mm_xor_si128(c2[1885],simde_mm_xor_si128(c2[9278],simde_mm_xor_si128(c2[9298],simde_mm_xor_si128(c2[9309],simde_mm_xor_si128(c2[8693],simde_mm_xor_si128(c2[8690],simde_mm_xor_si128(c2[69],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[7478],simde_mm_xor_si128(c2[7489],simde_mm_xor_si128(c2[5018],simde_mm_xor_si128(c2[5015],simde_mm_xor_si128(c2[3787],simde_mm_xor_si128(c2[3168],simde_mm_xor_si128(c2[3784],simde_mm_xor_si128(c2[9365],simde_mm_xor_si128(c2[9362],simde_mm_xor_si128(c2[736],simde_mm_xor_si128(c2[733],simde_mm_xor_si128(c2[3819],simde_mm_xor_si128(c2[3200],simde_mm_xor_si128(c2[3816],simde_mm_xor_si128(c2[6917],simde_mm_xor_si128(c2[6928],simde_mm_xor_si128(c2[5072],simde_mm_xor_si128(c2[5069],simde_mm_xor_si128(c2[3228],simde_mm_xor_si128(c2[3225],simde_mm_xor_si128(c2[6955],simde_mm_xor_si128(c2[6952],simde_mm_xor_si128(c2[6952],simde_mm_xor_si128(c2[6949],simde_mm_xor_si128(c2[8805],simde_mm_xor_si128(c2[8186],simde_mm_xor_si128(c2[8802],simde_mm_xor_si128(c2[8826],simde_mm_xor_si128(c2[8823],simde_mm_xor_si128(c2[199],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[9441],simde_mm_xor_si128(c2[8822],simde_mm_xor_si128(c2[9438],simde_mm_xor_si128(c2[815],simde_mm_xor_si128(c2[3925],simde_mm_xor_si128(c2[3922],simde_mm_xor_si128(c2[6390],simde_mm_xor_si128(c2[6387],simde_mm_xor_si128(c2[852],simde_mm_xor_si128(c2[233],simde_mm_xor_si128(c2[849],simde_mm_xor_si128(c2[879],simde_mm_xor_si128(c2[876],simde_mm_xor_si128(c2[5182],simde_mm_xor_si128(c2[5193],simde_mm_xor_si128(c2[1492],simde_mm_xor_si128(c2[1489],simde_mm_xor_si128(c2[8913],simde_mm_xor_si128(c2[8910],simde_mm_xor_si128(c2[4593],simde_mm_xor_si128(c2[4604],simde_mm_xor_si128(c2[2141],simde_mm_xor_si128(c2[1522],simde_mm_xor_si128(c2[2138],simde_mm_xor_si128(c2[5862],simde_mm_xor_si128(c2[5859],simde_mm_xor_si128(c2[9557],simde_mm_xor_si128(c2[9554],simde_mm_xor_si128(c2[7706],simde_mm_xor_si128(c2[7087],simde_mm_xor_si128(c2[7703],simde_mm_xor_si128(c2[4044],simde_mm_xor_si128(c2[4041],simde_mm_xor_si128(c2[8962],simde_mm_xor_si128(c2[8973],simde_mm_xor_si128(c2[4042],simde_mm_xor_si128(c2[3423],simde_mm_xor_si128(c2[4039],simde_mm_xor_si128(c2[5302],simde_mm_xor_si128(c2[5299],simde_mm_xor_si128(c2[8989],simde_mm_xor_si128(c2[9000],simde_mm_xor_si128(c2[2834],simde_mm_xor_si128(c2[2215],simde_mm_xor_si128(c2[2831],simde_mm_xor_si128(c2[394],simde_mm_xor_si128(c2[405],simde_mm_xor_si128(c2[4714],simde_mm_xor_si128(c2[4711],simde_mm_xor_si128(c2[4089],simde_mm_xor_si128(c2[3484],simde_mm_xor_si128(c2[4100],simde_mm_xor_si128(c2[9671],simde_mm_xor_si128(c2[9668],simde_mm_xor_si128(c2[6591],simde_mm_xor_si128(c2[6588],simde_mm_xor_si128(c2[9045],simde_mm_xor_si128(c2[9056],simde_mm_xor_si128(c2[4761],simde_mm_xor_si128(c2[4772],simde_mm_xor_si128(c2[4764],simde_mm_xor_si128(c2[4761],simde_mm_xor_si128(c2[1067],simde_mm_xor_si128(c2[448],simde_mm_xor_si128(c2[1064],simde_mm_xor_si128(c2[2333],simde_mm_xor_si128(c2[2330],simde_mm_xor_si128(c2[4797],simde_mm_xor_si128(c2[4794],simde_mm_xor_si128(c2[7253],simde_mm_xor_si128(c2[6648],simde_mm_xor_si128(c2[7264],simde_mm_xor_si128(c2[9133],simde_mm_xor_si128(c2[9130],simde_mm_xor_si128(c2[4204],simde_mm_xor_si128(c2[4201],simde_mm_xor_si128(c2[2971],simde_mm_xor_si128(c2[2352],simde_mm_xor_si128(c2[2968],simde_mm_xor_si128(c2[4853],simde_mm_xor_si128(c2[4850],simde_mm_xor_si128(c2[545],simde_mm_xor_si128(c2[542],simde_mm_xor_si128(c2[543],simde_mm_xor_si128(c2[540],simde_mm_xor_si128(c2[6107],simde_mm_xor_si128(c2[6104],simde_mm_xor_si128(c2[7338],simde_mm_xor_si128(c2[7349],simde_mm_xor_si128(c2[9812],simde_mm_xor_si128(c2[9193],simde_mm_xor_si128(c2[9809],simde_mm_xor_si128(c2[1822],simde_mm_xor_si128(c2[1833],simde_mm_xor_si128(c2[7988],simde_mm_xor_si128(c2[7985],simde_mm_xor_si128(c2[9834],simde_mm_xor_si128(c2[9215],c2[9831]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 32
+     d2[448]=simde_mm_xor_si128(c2[8625],simde_mm_xor_si128(c2[7402],simde_mm_xor_si128(c2[8008],simde_mm_xor_si128(c2[8624],simde_mm_xor_si128(c2[4312],simde_mm_xor_si128(c2[4928],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[9270],simde_mm_xor_si128(c2[5572],simde_mm_xor_si128(c2[6188],simde_mm_xor_si128(c2[31],simde_mm_xor_si128(c2[647],simde_mm_xor_si128(c2[8071],simde_mm_xor_si128(c2[7452],simde_mm_xor_si128(c2[8067],simde_mm_xor_si128(c2[8683],simde_mm_xor_si128(c2[6251],simde_mm_xor_si128(c2[3791],simde_mm_xor_si128(c2[1944],simde_mm_xor_si128(c2[2560],simde_mm_xor_si128(c2[8124],simde_mm_xor_si128(c2[8748],simde_mm_xor_si128(c2[9364],simde_mm_xor_si128(c2[1962],simde_mm_xor_si128(c2[2578],simde_mm_xor_si128(c2[5690],simde_mm_xor_si128(c2[3845],simde_mm_xor_si128(c2[1385],simde_mm_xor_si128(c2[2001],simde_mm_xor_si128(c2[5714],simde_mm_xor_si128(c2[5109],simde_mm_xor_si128(c2[5725],simde_mm_xor_si128(c2[6948],simde_mm_xor_si128(c2[7564],simde_mm_xor_si128(c2[7599],simde_mm_xor_si128(c2[8211],simde_mm_xor_si128(c2[8827],simde_mm_xor_si128(c2[7598],simde_mm_xor_si128(c2[8214],simde_mm_xor_si128(c2[2698],simde_mm_xor_si128(c2[4547],simde_mm_xor_si128(c2[5163],simde_mm_xor_si128(c2[8850],simde_mm_xor_si128(c2[9466],simde_mm_xor_si128(c2[9493],simde_mm_xor_si128(c2[3955],simde_mm_xor_si128(c2[9504],simde_mm_xor_si128(c2[265],simde_mm_xor_si128(c2[7672],simde_mm_xor_si128(c2[2750],simde_mm_xor_si128(c2[3366],simde_mm_xor_si128(c2[284],simde_mm_xor_si128(c2[900],simde_mm_xor_si128(c2[4621],simde_mm_xor_si128(c2[8316],simde_mm_xor_si128(c2[5863],simde_mm_xor_si128(c2[6479],simde_mm_xor_si128(c2[2803],simde_mm_xor_si128(c2[7735],simde_mm_xor_si128(c2[2185],simde_mm_xor_si128(c2[2801],simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[4061],simde_mm_xor_si128(c2[7146],simde_mm_xor_si128(c2[7762],simde_mm_xor_si128(c2[991],simde_mm_xor_si128(c2[1607],simde_mm_xor_si128(c2[9022],simde_mm_xor_si128(c2[2857],simde_mm_xor_si128(c2[3473],simde_mm_xor_si128(c2[2246],simde_mm_xor_si128(c2[2862],simde_mm_xor_si128(c2[6555],simde_mm_xor_si128(c2[8430],simde_mm_xor_si128(c2[5350],simde_mm_xor_si128(c2[7202],simde_mm_xor_si128(c2[7818],simde_mm_xor_si128(c2[3534],simde_mm_xor_si128(c2[3537],simde_mm_xor_si128(c2[9079],simde_mm_xor_si128(c2[9695],simde_mm_xor_si128(c2[1092],simde_mm_xor_si128(c2[2940],simde_mm_xor_si128(c2[3556],simde_mm_xor_si128(c2[5410],simde_mm_xor_si128(c2[6026],simde_mm_xor_si128(c2[7906],simde_mm_xor_si128(c2[2361],simde_mm_xor_si128(c2[2977],simde_mm_xor_si128(c2[1128],simde_mm_xor_si128(c2[1744],simde_mm_xor_si128(c2[3612],simde_mm_xor_si128(c2[9159],simde_mm_xor_si128(c2[8541],simde_mm_xor_si128(c2[9157],simde_mm_xor_si128(c2[4880],simde_mm_xor_si128(c2[5495],simde_mm_xor_si128(c2[6111],simde_mm_xor_si128(c2[7955],simde_mm_xor_si128(c2[8571],simde_mm_xor_si128(c2[595],simde_mm_xor_si128(c2[6761],simde_mm_xor_si128(c2[7991],c2[8607])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 33
+     d2[462]=simde_mm_xor_si128(c2[1267],simde_mm_xor_si128(c2[675],simde_mm_xor_si128(c2[934],c2[6140])));
+
+//row: 34
+     d2[476]=simde_mm_xor_si128(c2[1237],simde_mm_xor_si128(c2[6367],simde_mm_xor_si128(c2[2888],c2[4181])));
+
+//row: 35
+     d2[490]=simde_mm_xor_si128(c2[6171],simde_mm_xor_si128(c2[4934],simde_mm_xor_si128(c2[6170],simde_mm_xor_si128(c2[2474],simde_mm_xor_si128(c2[6816],simde_mm_xor_si128(c2[3734],simde_mm_xor_si128(c2[8048],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[5603],simde_mm_xor_si128(c2[4984],simde_mm_xor_si128(c2[6229],simde_mm_xor_si128(c2[3783],simde_mm_xor_si128(c2[1323],simde_mm_xor_si128(c2[92],simde_mm_xor_si128(c2[5656],simde_mm_xor_si128(c2[6896],simde_mm_xor_si128(c2[124],simde_mm_xor_si128(c2[3222],simde_mm_xor_si128(c2[1377],simde_mm_xor_si128(c2[9388],simde_mm_xor_si128(c2[3260],simde_mm_xor_si128(c2[3257],simde_mm_xor_si128(c2[5096],simde_mm_xor_si128(c2[8186],simde_mm_xor_si128(c2[5131],simde_mm_xor_si128(c2[6359],simde_mm_xor_si128(c2[5746],simde_mm_xor_si128(c2[230],simde_mm_xor_si128(c2[2695],simde_mm_xor_si128(c2[7012],simde_mm_xor_si128(c2[7039],simde_mm_xor_si128(c2[1487],simde_mm_xor_si128(c2[7652],simde_mm_xor_si128(c2[5218],simde_mm_xor_si128(c2[898],simde_mm_xor_si128(c2[8301],simde_mm_xor_si128(c2[2167],simde_mm_xor_si128(c2[5862],simde_mm_xor_si128(c2[4011],simde_mm_xor_si128(c2[349],simde_mm_xor_si128(c2[5267],simde_mm_xor_si128(c2[347],simde_mm_xor_si128(c2[6497],simde_mm_xor_si128(c2[1607],simde_mm_xor_si128(c2[5294],simde_mm_xor_si128(c2[8994],simde_mm_xor_si128(c2[6554],simde_mm_xor_si128(c2[1019],simde_mm_xor_si128(c2[394],simde_mm_xor_si128(c2[5976],simde_mm_xor_si128(c2[2896],simde_mm_xor_si128(c2[5350],simde_mm_xor_si128(c2[1066],simde_mm_xor_si128(c2[1069],simde_mm_xor_si128(c2[7227],simde_mm_xor_si128(c2[8493],simde_mm_xor_si128(c2[1102],simde_mm_xor_si128(c2[3558],simde_mm_xor_si128(c2[5438],simde_mm_xor_si128(c2[509],simde_mm_xor_si128(c2[9131],simde_mm_xor_si128(c2[1158],simde_mm_xor_si128(c2[6705],simde_mm_xor_si128(c2[6703],simde_mm_xor_si128(c2[2412],simde_mm_xor_si128(c2[3643],simde_mm_xor_si128(c2[6117],simde_mm_xor_si128(c2[7982],simde_mm_xor_si128(c2[4293],c2[6139])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 36
+     d2[504]=simde_mm_xor_si128(c2[8637],simde_mm_xor_si128(c2[8409],simde_mm_xor_si128(c2[420],c2[4200])));
+
+//row: 37
+     d2[518]=simde_mm_xor_si128(c2[5552],simde_mm_xor_si128(c2[6168],simde_mm_xor_si128(c2[4931],simde_mm_xor_si128(c2[6167],simde_mm_xor_si128(c2[2471],simde_mm_xor_si128(c2[6197],simde_mm_xor_si128(c2[6813],simde_mm_xor_si128(c2[3731],simde_mm_xor_si128(c2[8045],simde_mm_xor_si128(c2[649],simde_mm_xor_si128(c2[4984],simde_mm_xor_si128(c2[5600],simde_mm_xor_si128(c2[4995],simde_mm_xor_si128(c2[6226],simde_mm_xor_si128(c2[3164],simde_mm_xor_si128(c2[3780],simde_mm_xor_si128(c2[1320],simde_mm_xor_si128(c2[89],simde_mm_xor_si128(c2[5667],simde_mm_xor_si128(c2[6893],simde_mm_xor_si128(c2[121],simde_mm_xor_si128(c2[2617],simde_mm_xor_si128(c2[3233],simde_mm_xor_si128(c2[1374],simde_mm_xor_si128(c2[9385],simde_mm_xor_si128(c2[2641],simde_mm_xor_si128(c2[3257],simde_mm_xor_si128(c2[3254],simde_mm_xor_si128(c2[5107],simde_mm_xor_si128(c2[5128],simde_mm_xor_si128(c2[6356],simde_mm_xor_si128(c2[5743],simde_mm_xor_si128(c2[227],simde_mm_xor_si128(c2[2692],simde_mm_xor_si128(c2[7009],simde_mm_xor_si128(c2[6420],simde_mm_xor_si128(c2[7036],simde_mm_xor_si128(c2[1484],simde_mm_xor_si128(c2[7649],simde_mm_xor_si128(c2[4599],simde_mm_xor_si128(c2[5215],simde_mm_xor_si128(c2[909],simde_mm_xor_si128(c2[8298],simde_mm_xor_si128(c2[1548],simde_mm_xor_si128(c2[2164],simde_mm_xor_si128(c2[5859],simde_mm_xor_si128(c2[4008],simde_mm_xor_si128(c2[9585],simde_mm_xor_si128(c2[346],simde_mm_xor_si128(c2[5264],simde_mm_xor_si128(c2[344],simde_mm_xor_si128(c2[988],simde_mm_xor_si128(c2[1604],simde_mm_xor_si128(c2[5305],simde_mm_xor_si128(c2[8991],simde_mm_xor_si128(c2[2224],simde_mm_xor_si128(c2[6565],simde_mm_xor_si128(c2[1016],simde_mm_xor_si128(c2[405],simde_mm_xor_si128(c2[5357],simde_mm_xor_si128(c2[5973],simde_mm_xor_si128(c2[2893],simde_mm_xor_si128(c2[5361],simde_mm_xor_si128(c2[461],simde_mm_xor_si128(c2[1077],simde_mm_xor_si128(c2[1066],simde_mm_xor_si128(c2[7224],simde_mm_xor_si128(c2[8490],simde_mm_xor_si128(c2[1099],simde_mm_xor_si128(c2[3569],simde_mm_xor_si128(c2[4819],simde_mm_xor_si128(c2[5435],simde_mm_xor_si128(c2[506],simde_mm_xor_si128(c2[9128],simde_mm_xor_si128(c2[539],simde_mm_xor_si128(c2[1155],simde_mm_xor_si128(c2[6702],simde_mm_xor_si128(c2[6700],simde_mm_xor_si128(c2[1793],simde_mm_xor_si128(c2[2409],simde_mm_xor_si128(c2[3640],simde_mm_xor_si128(c2[6114],simde_mm_xor_si128(c2[7377],simde_mm_xor_si128(c2[7993],simde_mm_xor_si128(c2[4290],c2[6136])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 38
+     d2[532]=simde_mm_xor_si128(c2[6783],simde_mm_xor_si128(c2[6417],simde_mm_xor_si128(c2[5828],c2[6496])));
+
+//row: 39
+     d2[546]=simde_mm_xor_si128(c2[7420],simde_mm_xor_si128(c2[8096],simde_mm_xor_si128(c2[815],c2[1771])));
+
+//row: 40
+     d2[560]=simde_mm_xor_si128(c2[1852],simde_mm_xor_si128(c2[8234],c2[482]));
+
+//row: 41
+     d2[574]=simde_mm_xor_si128(c2[4341],simde_mm_xor_si128(c2[4409],simde_mm_xor_si128(c2[7647],c2[4826])));
+
+//row: 42
+     d2[588]=simde_mm_xor_si128(c2[8628],simde_mm_xor_si128(c2[7405],simde_mm_xor_si128(c2[8011],simde_mm_xor_si128(c2[8627],simde_mm_xor_si128(c2[4315],simde_mm_xor_si128(c2[4931],simde_mm_xor_si128(c2[1239],simde_mm_xor_si128(c2[9273],simde_mm_xor_si128(c2[5575],simde_mm_xor_si128(c2[6191],simde_mm_xor_si128(c2[34],simde_mm_xor_si128(c2[650],simde_mm_xor_si128(c2[8074],simde_mm_xor_si128(c2[7455],simde_mm_xor_si128(c2[8070],simde_mm_xor_si128(c2[8686],simde_mm_xor_si128(c2[6254],simde_mm_xor_si128(c2[3780],simde_mm_xor_si128(c2[1933],simde_mm_xor_si128(c2[2549],simde_mm_xor_si128(c2[8127],simde_mm_xor_si128(c2[8737],simde_mm_xor_si128(c2[9353],simde_mm_xor_si128(c2[1965],simde_mm_xor_si128(c2[2581],simde_mm_xor_si128(c2[6893],simde_mm_xor_si128(c2[5693],simde_mm_xor_si128(c2[3848],simde_mm_xor_si128(c2[1374],simde_mm_xor_si128(c2[1990],simde_mm_xor_si128(c2[5717],simde_mm_xor_si128(c2[5098],simde_mm_xor_si128(c2[5714],simde_mm_xor_si128(c2[6951],simde_mm_xor_si128(c2[7567],simde_mm_xor_si128(c2[7588],simde_mm_xor_si128(c2[8214],simde_mm_xor_si128(c2[8830],simde_mm_xor_si128(c2[7601],simde_mm_xor_si128(c2[8217],simde_mm_xor_si128(c2[2701],simde_mm_xor_si128(c2[4536],simde_mm_xor_si128(c2[5152],simde_mm_xor_si128(c2[8853],simde_mm_xor_si128(c2[9469],simde_mm_xor_si128(c2[9496],simde_mm_xor_si128(c2[3958],simde_mm_xor_si128(c2[9493],simde_mm_xor_si128(c2[254],simde_mm_xor_si128(c2[7675],simde_mm_xor_si128(c2[2753],simde_mm_xor_si128(c2[3369],simde_mm_xor_si128(c2[287],simde_mm_xor_si128(c2[903],simde_mm_xor_si128(c2[4624],simde_mm_xor_si128(c2[8319],simde_mm_xor_si128(c2[5852],simde_mm_xor_si128(c2[6468],simde_mm_xor_si128(c2[2806],simde_mm_xor_si128(c2[7738],simde_mm_xor_si128(c2[2188],simde_mm_xor_si128(c2[2804],simde_mm_xor_si128(c2[4064],simde_mm_xor_si128(c2[7149],simde_mm_xor_si128(c2[7765],simde_mm_xor_si128(c2[980],simde_mm_xor_si128(c2[1596],simde_mm_xor_si128(c2[9025],simde_mm_xor_si128(c2[2860],simde_mm_xor_si128(c2[3476],simde_mm_xor_si128(c2[2249],simde_mm_xor_si128(c2[2865],simde_mm_xor_si128(c2[8433],simde_mm_xor_si128(c2[5353],simde_mm_xor_si128(c2[7205],simde_mm_xor_si128(c2[7821],simde_mm_xor_si128(c2[3537],simde_mm_xor_si128(c2[3540],simde_mm_xor_si128(c2[9082],simde_mm_xor_si128(c2[9698],simde_mm_xor_si128(c2[1095],simde_mm_xor_si128(c2[2943],simde_mm_xor_si128(c2[3559],simde_mm_xor_si128(c2[5413],simde_mm_xor_si128(c2[6029],simde_mm_xor_si128(c2[7909],simde_mm_xor_si128(c2[2364],simde_mm_xor_si128(c2[2980],simde_mm_xor_si128(c2[1131],simde_mm_xor_si128(c2[1747],simde_mm_xor_si128(c2[3615],simde_mm_xor_si128(c2[9162],simde_mm_xor_si128(c2[8544],simde_mm_xor_si128(c2[9160],simde_mm_xor_si128(c2[4883],simde_mm_xor_si128(c2[5498],simde_mm_xor_si128(c2[6114],simde_mm_xor_si128(c2[7958],simde_mm_xor_si128(c2[8574],simde_mm_xor_si128(c2[598],simde_mm_xor_si128(c2[6750],simde_mm_xor_si128(c2[7980],c2[8596]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 43
+     d2[602]=simde_mm_xor_si128(c2[626],simde_mm_xor_si128(c2[9244],simde_mm_xor_si128(c2[625],simde_mm_xor_si128(c2[6168],simde_mm_xor_si128(c2[6784],simde_mm_xor_si128(c2[1271],simde_mm_xor_si128(c2[8044],simde_mm_xor_si128(c2[1887],simde_mm_xor_si128(c2[2503],simde_mm_xor_si128(c2[8664],simde_mm_xor_si128(c2[58],simde_mm_xor_si128(c2[9308],simde_mm_xor_si128(c2[684],simde_mm_xor_si128(c2[8093],simde_mm_xor_si128(c2[5633],simde_mm_xor_si128(c2[3786],simde_mm_xor_si128(c2[4402],simde_mm_xor_si128(c2[125],simde_mm_xor_si128(c2[1351],simde_mm_xor_si128(c2[3818],simde_mm_xor_si128(c2[4434],simde_mm_xor_si128(c2[7532],simde_mm_xor_si128(c2[5687],simde_mm_xor_si128(c2[3843],simde_mm_xor_si128(c2[7570],simde_mm_xor_si128(c2[7567],simde_mm_xor_si128(c2[8804],simde_mm_xor_si128(c2[9420],simde_mm_xor_si128(c2[9441],simde_mm_xor_si128(c2[814],simde_mm_xor_si128(c2[9440],simde_mm_xor_si128(c2[201],simde_mm_xor_si128(c2[4540],simde_mm_xor_si128(c2[7005],simde_mm_xor_si128(c2[851],simde_mm_xor_si128(c2[1467],simde_mm_xor_si128(c2[1494],simde_mm_xor_si128(c2[5797],simde_mm_xor_si128(c2[2107],simde_mm_xor_si128(c2[9528],simde_mm_xor_si128(c2[5208],simde_mm_xor_si128(c2[2140],simde_mm_xor_si128(c2[2756],simde_mm_xor_si128(c2[6477],simde_mm_xor_si128(c2[317],simde_mm_xor_si128(c2[7705],simde_mm_xor_si128(c2[8321],simde_mm_xor_si128(c2[4659],simde_mm_xor_si128(c2[9577],simde_mm_xor_si128(c2[4041],simde_mm_xor_si128(c2[4657],simde_mm_xor_si128(c2[5917],simde_mm_xor_si128(c2[9604],simde_mm_xor_si128(c2[2833],simde_mm_xor_si128(c2[3449],simde_mm_xor_si128(c2[1009],simde_mm_xor_si128(c2[5329],simde_mm_xor_si128(c2[4088],simde_mm_xor_si128(c2[4704],simde_mm_xor_si128(c2[431],simde_mm_xor_si128(c2[7206],simde_mm_xor_si128(c2[9660],simde_mm_xor_si128(c2[5376],simde_mm_xor_si128(c2[5379],simde_mm_xor_si128(c2[1066],simde_mm_xor_si128(c2[1682],simde_mm_xor_si128(c2[4145],simde_mm_xor_si128(c2[2948],simde_mm_xor_si128(c2[5412],simde_mm_xor_si128(c2[7252],simde_mm_xor_si128(c2[7868],simde_mm_xor_si128(c2[9748],simde_mm_xor_si128(c2[4819],simde_mm_xor_si128(c2[2970],simde_mm_xor_si128(c2[3586],simde_mm_xor_si128(c2[4208],simde_mm_xor_si128(c2[5468],simde_mm_xor_si128(c2[1160],simde_mm_xor_si128(c2[1158],simde_mm_xor_si128(c2[6722],simde_mm_xor_si128(c2[7953],simde_mm_xor_si128(c2[9811],simde_mm_xor_si128(c2[572],simde_mm_xor_si128(c2[2437],simde_mm_xor_si128(c2[8603],simde_mm_xor_si128(c2[9833],c2[594]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 44
+     d2[616]=simde_mm_xor_si128(c2[6776],simde_mm_xor_si128(c2[5553],simde_mm_xor_si128(c2[6789],simde_mm_xor_si128(c2[3093],simde_mm_xor_si128(c2[2469],simde_mm_xor_si128(c2[7421],simde_mm_xor_si128(c2[4353],simde_mm_xor_si128(c2[8653],simde_mm_xor_si128(c2[6222],simde_mm_xor_si128(c2[5603],simde_mm_xor_si128(c2[6834],simde_mm_xor_si128(c2[4402],simde_mm_xor_si128(c2[1942],simde_mm_xor_si128(c2[711],simde_mm_xor_si128(c2[6275],simde_mm_xor_si128(c2[7515],simde_mm_xor_si128(c2[729],simde_mm_xor_si128(c2[3841],simde_mm_xor_si128(c2[1996],simde_mm_xor_si128(c2[152],simde_mm_xor_si128(c2[3865],simde_mm_xor_si128(c2[3876],simde_mm_xor_si128(c2[5715],simde_mm_xor_si128(c2[5750],simde_mm_xor_si128(c2[6978],simde_mm_xor_si128(c2[6365],simde_mm_xor_si128(c2[2660],simde_mm_xor_si128(c2[849],simde_mm_xor_si128(c2[3314],simde_mm_xor_si128(c2[7617],simde_mm_xor_si128(c2[7644],simde_mm_xor_si128(c2[2106],simde_mm_xor_si128(c2[8271],simde_mm_xor_si128(c2[5796],simde_mm_xor_si128(c2[5837],simde_mm_xor_si128(c2[1517],simde_mm_xor_si128(c2[8906],simde_mm_xor_si128(c2[2772],simde_mm_xor_si128(c2[6481],simde_mm_xor_si128(c2[4630],simde_mm_xor_si128(c2[954],simde_mm_xor_si128(c2[5886],simde_mm_xor_si128(c2[952],simde_mm_xor_si128(c2[2212],simde_mm_xor_si128(c2[5913],simde_mm_xor_si128(c2[9613],simde_mm_xor_si128(c2[7173],simde_mm_xor_si128(c2[1624],simde_mm_xor_si128(c2[1013],simde_mm_xor_si128(c2[6581],simde_mm_xor_si128(c2[3501],simde_mm_xor_si128(c2[5969],simde_mm_xor_si128(c2[1685],simde_mm_xor_si128(c2[1688],simde_mm_xor_si128(c2[7846],simde_mm_xor_si128(c2[9112],simde_mm_xor_si128(c2[1721],simde_mm_xor_si128(c2[4177],simde_mm_xor_si128(c2[6057],simde_mm_xor_si128(c2[1128],simde_mm_xor_si128(c2[9750],simde_mm_xor_si128(c2[1777],simde_mm_xor_si128(c2[7310],simde_mm_xor_si128(c2[7308],simde_mm_xor_si128(c2[3031],simde_mm_xor_si128(c2[4262],simde_mm_xor_si128(c2[6722],simde_mm_xor_si128(c2[8601],simde_mm_xor_si128(c2[4912],c2[6758])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 45
+     d2[630]=simde_mm_xor_si128(c2[4967],simde_mm_xor_si128(c2[5719],c2[8289]));
+  }
+}
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc240_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc240_byte.c
index 522a80611765a21285fd7ef62bba56c34db6f056..b65d1f0ad567a853ca78fba607ef4e24acc5edb4 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc240_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc240_byte.c
@@ -1,9 +1,9 @@
 #include "PHY/sse_intrin.h"
 // generated code for Zc=240, byte encoding
 static inline void ldpc240_byte(uint8_t *c,uint8_t *d) {
-  __m128i *csimd=(__m128i *)c,*dsimd=(__m128i *)d;
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
 
-  __m128i *c2,*d2;
+  simde__m128i *c2,*d2;
 
   int i2;
   for (i2=0; i2<15; i2++) {
@@ -11,141 +11,141 @@ static inline void ldpc240_byte(uint8_t *c,uint8_t *d) {
      d2=&dsimd[i2];
 
 //row: 0
-     d2[0]=_mm_xor_si128(c2[4628],_mm_xor_si128(c2[6],_mm_xor_si128(c2[8591],_mm_xor_si128(c2[8],_mm_xor_si128(c2[2024],_mm_xor_si128(c2[2670],_mm_xor_si128(c2[4651],_mm_xor_si128(c2[9307],_mm_xor_si128(c2[7994],_mm_xor_si128(c2[734],_mm_xor_si128(c2[4058],_mm_xor_si128(c2[5378],_mm_xor_si128(c2[1420],_mm_xor_si128(c2[8713],_mm_xor_si128(c2[4749],_mm_xor_si128(c2[8053],_mm_xor_si128(c2[2795],_mm_xor_si128(c2[158],_mm_xor_si128(c2[8084],_mm_xor_si128(c2[2165],_mm_xor_si128(c2[3487],_mm_xor_si128(c2[7442],_mm_xor_si128(c2[8135],_mm_xor_si128(c2[2201],_mm_xor_si128(c2[6821],_mm_xor_si128(c2[8170],_mm_xor_si128(c2[8165],_mm_xor_si128(c2[246],_mm_xor_si128(c2[3573],_mm_xor_si128(c2[5553],_mm_xor_si128(c2[5551],_mm_xor_si128(c2[974],_mm_xor_si128(c2[2944],_mm_xor_si128(c2[960],_mm_xor_si128(c2[8922],_mm_xor_si128(c2[7590],_mm_xor_si128(c2[5623],_mm_xor_si128(c2[368],_mm_xor_si128(c2[8951],_mm_xor_si128(c2[4321],_mm_xor_si128(c2[7654],_mm_xor_si128(c2[4350],_mm_xor_si128(c2[5671],_mm_xor_si128(c2[10325],_mm_xor_si128(c2[3726],_mm_xor_si128(c2[5050],_mm_xor_si128(c2[5078],_mm_xor_si128(c2[3755],_mm_xor_si128(c2[1112],_mm_xor_si128(c2[6433],_mm_xor_si128(c2[6429],_mm_xor_si128(c2[5772],_mm_xor_si128(c2[5135],_mm_xor_si128(c2[516],_mm_xor_si128(c2[512],_mm_xor_si128(c2[8473],_mm_xor_si128(c2[9127],_mm_xor_si128(c2[7814],_mm_xor_si128(c2[7175],_mm_xor_si128(c2[2560],_mm_xor_si128(c2[2554],_mm_xor_si128(c2[6546],_mm_xor_si128(c2[4574],_mm_xor_si128(c2[8530],_mm_xor_si128(c2[6578],_mm_xor_si128(c2[5923],c2[7903]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[0]=simde_mm_xor_si128(c2[4628],simde_mm_xor_si128(c2[6],simde_mm_xor_si128(c2[8591],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[2024],simde_mm_xor_si128(c2[2670],simde_mm_xor_si128(c2[4651],simde_mm_xor_si128(c2[9307],simde_mm_xor_si128(c2[7994],simde_mm_xor_si128(c2[734],simde_mm_xor_si128(c2[4058],simde_mm_xor_si128(c2[5378],simde_mm_xor_si128(c2[1420],simde_mm_xor_si128(c2[8713],simde_mm_xor_si128(c2[4749],simde_mm_xor_si128(c2[8053],simde_mm_xor_si128(c2[2795],simde_mm_xor_si128(c2[158],simde_mm_xor_si128(c2[8084],simde_mm_xor_si128(c2[2165],simde_mm_xor_si128(c2[3487],simde_mm_xor_si128(c2[7442],simde_mm_xor_si128(c2[8135],simde_mm_xor_si128(c2[2201],simde_mm_xor_si128(c2[6821],simde_mm_xor_si128(c2[8170],simde_mm_xor_si128(c2[8165],simde_mm_xor_si128(c2[246],simde_mm_xor_si128(c2[3573],simde_mm_xor_si128(c2[5553],simde_mm_xor_si128(c2[5551],simde_mm_xor_si128(c2[974],simde_mm_xor_si128(c2[2944],simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[8922],simde_mm_xor_si128(c2[7590],simde_mm_xor_si128(c2[5623],simde_mm_xor_si128(c2[368],simde_mm_xor_si128(c2[8951],simde_mm_xor_si128(c2[4321],simde_mm_xor_si128(c2[7654],simde_mm_xor_si128(c2[4350],simde_mm_xor_si128(c2[5671],simde_mm_xor_si128(c2[10325],simde_mm_xor_si128(c2[3726],simde_mm_xor_si128(c2[5050],simde_mm_xor_si128(c2[5078],simde_mm_xor_si128(c2[3755],simde_mm_xor_si128(c2[1112],simde_mm_xor_si128(c2[6433],simde_mm_xor_si128(c2[6429],simde_mm_xor_si128(c2[5772],simde_mm_xor_si128(c2[5135],simde_mm_xor_si128(c2[516],simde_mm_xor_si128(c2[512],simde_mm_xor_si128(c2[8473],simde_mm_xor_si128(c2[9127],simde_mm_xor_si128(c2[7814],simde_mm_xor_si128(c2[7175],simde_mm_xor_si128(c2[2560],simde_mm_xor_si128(c2[2554],simde_mm_xor_si128(c2[6546],simde_mm_xor_si128(c2[4574],simde_mm_xor_si128(c2[8530],simde_mm_xor_si128(c2[6578],simde_mm_xor_si128(c2[5923],c2[7903]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 1
-     d2[15]=_mm_xor_si128(c2[4628],_mm_xor_si128(c2[5288],_mm_xor_si128(c2[666],_mm_xor_si128(c2[9251],_mm_xor_si128(c2[668],_mm_xor_si128(c2[2024],_mm_xor_si128(c2[2684],_mm_xor_si128(c2[3330],_mm_xor_si128(c2[5311],_mm_xor_si128(c2[9307],_mm_xor_si128(c2[9967],_mm_xor_si128(c2[8654],_mm_xor_si128(c2[1394],_mm_xor_si128(c2[4058],_mm_xor_si128(c2[4718],_mm_xor_si128(c2[6038],_mm_xor_si128(c2[2080],_mm_xor_si128(c2[9373],_mm_xor_si128(c2[5409],_mm_xor_si128(c2[8713],_mm_xor_si128(c2[2795],_mm_xor_si128(c2[3455],_mm_xor_si128(c2[818],_mm_xor_si128(c2[8744],_mm_xor_si128(c2[2165],_mm_xor_si128(c2[2825],_mm_xor_si128(c2[4147],_mm_xor_si128(c2[8102],_mm_xor_si128(c2[8795],_mm_xor_si128(c2[2861],_mm_xor_si128(c2[7481],_mm_xor_si128(c2[8830],_mm_xor_si128(c2[8825],_mm_xor_si128(c2[906],_mm_xor_si128(c2[3573],_mm_xor_si128(c2[4233],_mm_xor_si128(c2[6213],_mm_xor_si128(c2[6211],_mm_xor_si128(c2[974],_mm_xor_si128(c2[1634],_mm_xor_si128(c2[3604],_mm_xor_si128(c2[1620],_mm_xor_si128(c2[8922],_mm_xor_si128(c2[9582],_mm_xor_si128(c2[8250],_mm_xor_si128(c2[6283],_mm_xor_si128(c2[368],_mm_xor_si128(c2[1028],_mm_xor_si128(c2[9611],_mm_xor_si128(c2[4981],_mm_xor_si128(c2[7654],_mm_xor_si128(c2[8314],_mm_xor_si128(c2[5010],_mm_xor_si128(c2[6331],_mm_xor_si128(c2[426],_mm_xor_si128(c2[4386],_mm_xor_si128(c2[5710],_mm_xor_si128(c2[5078],_mm_xor_si128(c2[5738],_mm_xor_si128(c2[4415],_mm_xor_si128(c2[1772],_mm_xor_si128(c2[6433],_mm_xor_si128(c2[7093],_mm_xor_si128(c2[7089],_mm_xor_si128(c2[6432],_mm_xor_si128(c2[5795],_mm_xor_si128(c2[1176],_mm_xor_si128(c2[1172],_mm_xor_si128(c2[8473],_mm_xor_si128(c2[9133],_mm_xor_si128(c2[9787],_mm_xor_si128(c2[8474],_mm_xor_si128(c2[7175],_mm_xor_si128(c2[7835],_mm_xor_si128(c2[3220],_mm_xor_si128(c2[3214],_mm_xor_si128(c2[6546],_mm_xor_si128(c2[7206],_mm_xor_si128(c2[5234],_mm_xor_si128(c2[9190],_mm_xor_si128(c2[6578],_mm_xor_si128(c2[7238],_mm_xor_si128(c2[6583],c2[8563])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[15]=simde_mm_xor_si128(c2[4628],simde_mm_xor_si128(c2[5288],simde_mm_xor_si128(c2[666],simde_mm_xor_si128(c2[9251],simde_mm_xor_si128(c2[668],simde_mm_xor_si128(c2[2024],simde_mm_xor_si128(c2[2684],simde_mm_xor_si128(c2[3330],simde_mm_xor_si128(c2[5311],simde_mm_xor_si128(c2[9307],simde_mm_xor_si128(c2[9967],simde_mm_xor_si128(c2[8654],simde_mm_xor_si128(c2[1394],simde_mm_xor_si128(c2[4058],simde_mm_xor_si128(c2[4718],simde_mm_xor_si128(c2[6038],simde_mm_xor_si128(c2[2080],simde_mm_xor_si128(c2[9373],simde_mm_xor_si128(c2[5409],simde_mm_xor_si128(c2[8713],simde_mm_xor_si128(c2[2795],simde_mm_xor_si128(c2[3455],simde_mm_xor_si128(c2[818],simde_mm_xor_si128(c2[8744],simde_mm_xor_si128(c2[2165],simde_mm_xor_si128(c2[2825],simde_mm_xor_si128(c2[4147],simde_mm_xor_si128(c2[8102],simde_mm_xor_si128(c2[8795],simde_mm_xor_si128(c2[2861],simde_mm_xor_si128(c2[7481],simde_mm_xor_si128(c2[8830],simde_mm_xor_si128(c2[8825],simde_mm_xor_si128(c2[906],simde_mm_xor_si128(c2[3573],simde_mm_xor_si128(c2[4233],simde_mm_xor_si128(c2[6213],simde_mm_xor_si128(c2[6211],simde_mm_xor_si128(c2[974],simde_mm_xor_si128(c2[1634],simde_mm_xor_si128(c2[3604],simde_mm_xor_si128(c2[1620],simde_mm_xor_si128(c2[8922],simde_mm_xor_si128(c2[9582],simde_mm_xor_si128(c2[8250],simde_mm_xor_si128(c2[6283],simde_mm_xor_si128(c2[368],simde_mm_xor_si128(c2[1028],simde_mm_xor_si128(c2[9611],simde_mm_xor_si128(c2[4981],simde_mm_xor_si128(c2[7654],simde_mm_xor_si128(c2[8314],simde_mm_xor_si128(c2[5010],simde_mm_xor_si128(c2[6331],simde_mm_xor_si128(c2[426],simde_mm_xor_si128(c2[4386],simde_mm_xor_si128(c2[5710],simde_mm_xor_si128(c2[5078],simde_mm_xor_si128(c2[5738],simde_mm_xor_si128(c2[4415],simde_mm_xor_si128(c2[1772],simde_mm_xor_si128(c2[6433],simde_mm_xor_si128(c2[7093],simde_mm_xor_si128(c2[7089],simde_mm_xor_si128(c2[6432],simde_mm_xor_si128(c2[5795],simde_mm_xor_si128(c2[1176],simde_mm_xor_si128(c2[1172],simde_mm_xor_si128(c2[8473],simde_mm_xor_si128(c2[9133],simde_mm_xor_si128(c2[9787],simde_mm_xor_si128(c2[8474],simde_mm_xor_si128(c2[7175],simde_mm_xor_si128(c2[7835],simde_mm_xor_si128(c2[3220],simde_mm_xor_si128(c2[3214],simde_mm_xor_si128(c2[6546],simde_mm_xor_si128(c2[7206],simde_mm_xor_si128(c2[5234],simde_mm_xor_si128(c2[9190],simde_mm_xor_si128(c2[6578],simde_mm_xor_si128(c2[7238],simde_mm_xor_si128(c2[6583],c2[8563])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 2
-     d2[30]=_mm_xor_si128(c2[5288],_mm_xor_si128(c2[666],_mm_xor_si128(c2[8591],_mm_xor_si128(c2[9251],_mm_xor_si128(c2[8],_mm_xor_si128(c2[668],_mm_xor_si128(c2[2684],_mm_xor_si128(c2[2670],_mm_xor_si128(c2[3330],_mm_xor_si128(c2[4651],_mm_xor_si128(c2[5311],_mm_xor_si128(c2[9967],_mm_xor_si128(c2[8654],_mm_xor_si128(c2[734],_mm_xor_si128(c2[1394],_mm_xor_si128(c2[4718],_mm_xor_si128(c2[6038],_mm_xor_si128(c2[1420],_mm_xor_si128(c2[2080],_mm_xor_si128(c2[9373],_mm_xor_si128(c2[4749],_mm_xor_si128(c2[5409],_mm_xor_si128(c2[8053],_mm_xor_si128(c2[8713],_mm_xor_si128(c2[3455],_mm_xor_si128(c2[818],_mm_xor_si128(c2[8084],_mm_xor_si128(c2[8744],_mm_xor_si128(c2[2825],_mm_xor_si128(c2[3487],_mm_xor_si128(c2[4147],_mm_xor_si128(c2[7442],_mm_xor_si128(c2[8102],_mm_xor_si128(c2[8795],_mm_xor_si128(c2[2201],_mm_xor_si128(c2[2861],_mm_xor_si128(c2[6821],_mm_xor_si128(c2[7481],_mm_xor_si128(c2[8830],_mm_xor_si128(c2[8165],_mm_xor_si128(c2[8825],_mm_xor_si128(c2[246],_mm_xor_si128(c2[906],_mm_xor_si128(c2[4233],_mm_xor_si128(c2[6213],_mm_xor_si128(c2[5551],_mm_xor_si128(c2[6211],_mm_xor_si128(c2[1634],_mm_xor_si128(c2[2944],_mm_xor_si128(c2[3604],_mm_xor_si128(c2[960],_mm_xor_si128(c2[1620],_mm_xor_si128(c2[9582],_mm_xor_si128(c2[8250],_mm_xor_si128(c2[5623],_mm_xor_si128(c2[6283],_mm_xor_si128(c2[1028],_mm_xor_si128(c2[9611],_mm_xor_si128(c2[4321],_mm_xor_si128(c2[4981],_mm_xor_si128(c2[8314],_mm_xor_si128(c2[4350],_mm_xor_si128(c2[5010],_mm_xor_si128(c2[5671],_mm_xor_si128(c2[6331],_mm_xor_si128(c2[426],_mm_xor_si128(c2[3726],_mm_xor_si128(c2[4386],_mm_xor_si128(c2[5050],_mm_xor_si128(c2[5710],_mm_xor_si128(c2[5738],_mm_xor_si128(c2[4415],_mm_xor_si128(c2[1112],_mm_xor_si128(c2[1772],_mm_xor_si128(c2[7093],_mm_xor_si128(c2[7089],_mm_xor_si128(c2[5772],_mm_xor_si128(c2[6432],_mm_xor_si128(c2[5795],_mm_xor_si128(c2[516],_mm_xor_si128(c2[1176],_mm_xor_si128(c2[512],_mm_xor_si128(c2[1172],_mm_xor_si128(c2[9133],_mm_xor_si128(c2[9127],_mm_xor_si128(c2[9787],_mm_xor_si128(c2[7814],_mm_xor_si128(c2[8474],_mm_xor_si128(c2[7835],_mm_xor_si128(c2[3220],_mm_xor_si128(c2[2554],_mm_xor_si128(c2[3214],_mm_xor_si128(c2[7206],_mm_xor_si128(c2[4574],_mm_xor_si128(c2[5234],_mm_xor_si128(c2[8530],_mm_xor_si128(c2[9190],_mm_xor_si128(c2[7238],_mm_xor_si128(c2[6583],_mm_xor_si128(c2[7903],c2[8563]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[30]=simde_mm_xor_si128(c2[5288],simde_mm_xor_si128(c2[666],simde_mm_xor_si128(c2[8591],simde_mm_xor_si128(c2[9251],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[668],simde_mm_xor_si128(c2[2684],simde_mm_xor_si128(c2[2670],simde_mm_xor_si128(c2[3330],simde_mm_xor_si128(c2[4651],simde_mm_xor_si128(c2[5311],simde_mm_xor_si128(c2[9967],simde_mm_xor_si128(c2[8654],simde_mm_xor_si128(c2[734],simde_mm_xor_si128(c2[1394],simde_mm_xor_si128(c2[4718],simde_mm_xor_si128(c2[6038],simde_mm_xor_si128(c2[1420],simde_mm_xor_si128(c2[2080],simde_mm_xor_si128(c2[9373],simde_mm_xor_si128(c2[4749],simde_mm_xor_si128(c2[5409],simde_mm_xor_si128(c2[8053],simde_mm_xor_si128(c2[8713],simde_mm_xor_si128(c2[3455],simde_mm_xor_si128(c2[818],simde_mm_xor_si128(c2[8084],simde_mm_xor_si128(c2[8744],simde_mm_xor_si128(c2[2825],simde_mm_xor_si128(c2[3487],simde_mm_xor_si128(c2[4147],simde_mm_xor_si128(c2[7442],simde_mm_xor_si128(c2[8102],simde_mm_xor_si128(c2[8795],simde_mm_xor_si128(c2[2201],simde_mm_xor_si128(c2[2861],simde_mm_xor_si128(c2[6821],simde_mm_xor_si128(c2[7481],simde_mm_xor_si128(c2[8830],simde_mm_xor_si128(c2[8165],simde_mm_xor_si128(c2[8825],simde_mm_xor_si128(c2[246],simde_mm_xor_si128(c2[906],simde_mm_xor_si128(c2[4233],simde_mm_xor_si128(c2[6213],simde_mm_xor_si128(c2[5551],simde_mm_xor_si128(c2[6211],simde_mm_xor_si128(c2[1634],simde_mm_xor_si128(c2[2944],simde_mm_xor_si128(c2[3604],simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[1620],simde_mm_xor_si128(c2[9582],simde_mm_xor_si128(c2[8250],simde_mm_xor_si128(c2[5623],simde_mm_xor_si128(c2[6283],simde_mm_xor_si128(c2[1028],simde_mm_xor_si128(c2[9611],simde_mm_xor_si128(c2[4321],simde_mm_xor_si128(c2[4981],simde_mm_xor_si128(c2[8314],simde_mm_xor_si128(c2[4350],simde_mm_xor_si128(c2[5010],simde_mm_xor_si128(c2[5671],simde_mm_xor_si128(c2[6331],simde_mm_xor_si128(c2[426],simde_mm_xor_si128(c2[3726],simde_mm_xor_si128(c2[4386],simde_mm_xor_si128(c2[5050],simde_mm_xor_si128(c2[5710],simde_mm_xor_si128(c2[5738],simde_mm_xor_si128(c2[4415],simde_mm_xor_si128(c2[1112],simde_mm_xor_si128(c2[1772],simde_mm_xor_si128(c2[7093],simde_mm_xor_si128(c2[7089],simde_mm_xor_si128(c2[5772],simde_mm_xor_si128(c2[6432],simde_mm_xor_si128(c2[5795],simde_mm_xor_si128(c2[516],simde_mm_xor_si128(c2[1176],simde_mm_xor_si128(c2[512],simde_mm_xor_si128(c2[1172],simde_mm_xor_si128(c2[9133],simde_mm_xor_si128(c2[9127],simde_mm_xor_si128(c2[9787],simde_mm_xor_si128(c2[7814],simde_mm_xor_si128(c2[8474],simde_mm_xor_si128(c2[7835],simde_mm_xor_si128(c2[3220],simde_mm_xor_si128(c2[2554],simde_mm_xor_si128(c2[3214],simde_mm_xor_si128(c2[7206],simde_mm_xor_si128(c2[4574],simde_mm_xor_si128(c2[5234],simde_mm_xor_si128(c2[8530],simde_mm_xor_si128(c2[9190],simde_mm_xor_si128(c2[7238],simde_mm_xor_si128(c2[6583],simde_mm_xor_si128(c2[7903],c2[8563]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 3
-     d2[45]=_mm_xor_si128(c2[5288],_mm_xor_si128(c2[666],_mm_xor_si128(c2[9251],_mm_xor_si128(c2[8],_mm_xor_si128(c2[668],_mm_xor_si128(c2[2684],_mm_xor_si128(c2[3330],_mm_xor_si128(c2[4651],_mm_xor_si128(c2[5311],_mm_xor_si128(c2[9967],_mm_xor_si128(c2[8654],_mm_xor_si128(c2[1394],_mm_xor_si128(c2[4718],_mm_xor_si128(c2[6038],_mm_xor_si128(c2[1420],_mm_xor_si128(c2[2080],_mm_xor_si128(c2[9373],_mm_xor_si128(c2[5409],_mm_xor_si128(c2[8053],_mm_xor_si128(c2[8713],_mm_xor_si128(c2[3455],_mm_xor_si128(c2[818],_mm_xor_si128(c2[8744],_mm_xor_si128(c2[2825],_mm_xor_si128(c2[4147],_mm_xor_si128(c2[7442],_mm_xor_si128(c2[8102],_mm_xor_si128(c2[8795],_mm_xor_si128(c2[2861],_mm_xor_si128(c2[6821],_mm_xor_si128(c2[7481],_mm_xor_si128(c2[8830],_mm_xor_si128(c2[8825],_mm_xor_si128(c2[246],_mm_xor_si128(c2[906],_mm_xor_si128(c2[4233],_mm_xor_si128(c2[6213],_mm_xor_si128(c2[6211],_mm_xor_si128(c2[1634],_mm_xor_si128(c2[3604],_mm_xor_si128(c2[960],_mm_xor_si128(c2[1620],_mm_xor_si128(c2[9582],_mm_xor_si128(c2[8250],_mm_xor_si128(c2[5623],_mm_xor_si128(c2[6283],_mm_xor_si128(c2[1028],_mm_xor_si128(c2[9611],_mm_xor_si128(c2[4321],_mm_xor_si128(c2[4981],_mm_xor_si128(c2[8314],_mm_xor_si128(c2[5010],_mm_xor_si128(c2[5671],_mm_xor_si128(c2[6331],_mm_xor_si128(c2[426],_mm_xor_si128(c2[4386],_mm_xor_si128(c2[5050],_mm_xor_si128(c2[5710],_mm_xor_si128(c2[5738],_mm_xor_si128(c2[4415],_mm_xor_si128(c2[1772],_mm_xor_si128(c2[7093],_mm_xor_si128(c2[7089],_mm_xor_si128(c2[5772],_mm_xor_si128(c2[6432],_mm_xor_si128(c2[5795],_mm_xor_si128(c2[1176],_mm_xor_si128(c2[512],_mm_xor_si128(c2[1172],_mm_xor_si128(c2[9133],_mm_xor_si128(c2[9787],_mm_xor_si128(c2[7814],_mm_xor_si128(c2[8474],_mm_xor_si128(c2[7835],_mm_xor_si128(c2[3220],_mm_xor_si128(c2[3214],_mm_xor_si128(c2[7206],_mm_xor_si128(c2[5234],_mm_xor_si128(c2[8530],_mm_xor_si128(c2[9190],_mm_xor_si128(c2[7238],_mm_xor_si128(c2[6583],_mm_xor_si128(c2[7903],c2[8563])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[45]=simde_mm_xor_si128(c2[5288],simde_mm_xor_si128(c2[666],simde_mm_xor_si128(c2[9251],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[668],simde_mm_xor_si128(c2[2684],simde_mm_xor_si128(c2[3330],simde_mm_xor_si128(c2[4651],simde_mm_xor_si128(c2[5311],simde_mm_xor_si128(c2[9967],simde_mm_xor_si128(c2[8654],simde_mm_xor_si128(c2[1394],simde_mm_xor_si128(c2[4718],simde_mm_xor_si128(c2[6038],simde_mm_xor_si128(c2[1420],simde_mm_xor_si128(c2[2080],simde_mm_xor_si128(c2[9373],simde_mm_xor_si128(c2[5409],simde_mm_xor_si128(c2[8053],simde_mm_xor_si128(c2[8713],simde_mm_xor_si128(c2[3455],simde_mm_xor_si128(c2[818],simde_mm_xor_si128(c2[8744],simde_mm_xor_si128(c2[2825],simde_mm_xor_si128(c2[4147],simde_mm_xor_si128(c2[7442],simde_mm_xor_si128(c2[8102],simde_mm_xor_si128(c2[8795],simde_mm_xor_si128(c2[2861],simde_mm_xor_si128(c2[6821],simde_mm_xor_si128(c2[7481],simde_mm_xor_si128(c2[8830],simde_mm_xor_si128(c2[8825],simde_mm_xor_si128(c2[246],simde_mm_xor_si128(c2[906],simde_mm_xor_si128(c2[4233],simde_mm_xor_si128(c2[6213],simde_mm_xor_si128(c2[6211],simde_mm_xor_si128(c2[1634],simde_mm_xor_si128(c2[3604],simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[1620],simde_mm_xor_si128(c2[9582],simde_mm_xor_si128(c2[8250],simde_mm_xor_si128(c2[5623],simde_mm_xor_si128(c2[6283],simde_mm_xor_si128(c2[1028],simde_mm_xor_si128(c2[9611],simde_mm_xor_si128(c2[4321],simde_mm_xor_si128(c2[4981],simde_mm_xor_si128(c2[8314],simde_mm_xor_si128(c2[5010],simde_mm_xor_si128(c2[5671],simde_mm_xor_si128(c2[6331],simde_mm_xor_si128(c2[426],simde_mm_xor_si128(c2[4386],simde_mm_xor_si128(c2[5050],simde_mm_xor_si128(c2[5710],simde_mm_xor_si128(c2[5738],simde_mm_xor_si128(c2[4415],simde_mm_xor_si128(c2[1772],simde_mm_xor_si128(c2[7093],simde_mm_xor_si128(c2[7089],simde_mm_xor_si128(c2[5772],simde_mm_xor_si128(c2[6432],simde_mm_xor_si128(c2[5795],simde_mm_xor_si128(c2[1176],simde_mm_xor_si128(c2[512],simde_mm_xor_si128(c2[1172],simde_mm_xor_si128(c2[9133],simde_mm_xor_si128(c2[9787],simde_mm_xor_si128(c2[7814],simde_mm_xor_si128(c2[8474],simde_mm_xor_si128(c2[7835],simde_mm_xor_si128(c2[3220],simde_mm_xor_si128(c2[3214],simde_mm_xor_si128(c2[7206],simde_mm_xor_si128(c2[5234],simde_mm_xor_si128(c2[8530],simde_mm_xor_si128(c2[9190],simde_mm_xor_si128(c2[7238],simde_mm_xor_si128(c2[6583],simde_mm_xor_si128(c2[7903],c2[8563])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 4
-     d2[60]=_mm_xor_si128(c2[4],c2[2023]);
+     d2[60]=simde_mm_xor_si128(c2[4],c2[2023]);
 
 //row: 5
-     d2[75]=_mm_xor_si128(c2[7264],_mm_xor_si128(c2[2642],_mm_xor_si128(c2[668],_mm_xor_si128(c2[2644],_mm_xor_si128(c2[1320],_mm_xor_si128(c2[4660],_mm_xor_si128(c2[5321],_mm_xor_si128(c2[7302],_mm_xor_si128(c2[7300],_mm_xor_si128(c2[1384],_mm_xor_si128(c2[71],_mm_xor_si128(c2[3370],_mm_xor_si128(c2[6694],_mm_xor_si128(c2[8014],_mm_xor_si128(c2[4056],_mm_xor_si128(c2[9992],_mm_xor_si128(c2[790],_mm_xor_si128(c2[7385],_mm_xor_si128(c2[130],_mm_xor_si128(c2[5431],_mm_xor_si128(c2[2794],_mm_xor_si128(c2[161],_mm_xor_si128(c2[4801],_mm_xor_si128(c2[6123],_mm_xor_si128(c2[10093],_mm_xor_si128(c2[212],_mm_xor_si128(c2[4837],_mm_xor_si128(c2[9457],_mm_xor_si128(c2[247],_mm_xor_si128(c2[242],_mm_xor_si128(c2[2882],_mm_xor_si128(c2[6224],_mm_xor_si128(c2[8204],_mm_xor_si128(c2[8202],_mm_xor_si128(c2[3610],_mm_xor_si128(c2[5580],_mm_xor_si128(c2[3611],_mm_xor_si128(c2[999],_mm_xor_si128(c2[10241],_mm_xor_si128(c2[8259],_mm_xor_si128(c2[3004],_mm_xor_si128(c2[1028],_mm_xor_si128(c2[6972],_mm_xor_si128(c2[10268],_mm_xor_si128(c2[10290],_mm_xor_si128(c2[7001],_mm_xor_si128(c2[8322],_mm_xor_si128(c2[2402],_mm_xor_si128(c2[6362],_mm_xor_si128(c2[7686],_mm_xor_si128(c2[7714],_mm_xor_si128(c2[6391],_mm_xor_si128(c2[3763],_mm_xor_si128(c2[9069],_mm_xor_si128(c2[9065],_mm_xor_si128(c2[8408],_mm_xor_si128(c2[1813],_mm_xor_si128(c2[7771],_mm_xor_si128(c2[3152],_mm_xor_si128(c2[3163],_mm_xor_si128(c2[550],_mm_xor_si128(c2[1204],_mm_xor_si128(c2[10450],_mm_xor_si128(c2[9811],_mm_xor_si128(c2[5196],_mm_xor_si128(c2[5190],_mm_xor_si128(c2[9182],_mm_xor_si128(c2[7210],_mm_xor_si128(c2[607],_mm_xor_si128(c2[9214],_mm_xor_si128(c2[8559],_mm_xor_si128(c2[10539],c2[3281]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[75]=simde_mm_xor_si128(c2[7264],simde_mm_xor_si128(c2[2642],simde_mm_xor_si128(c2[668],simde_mm_xor_si128(c2[2644],simde_mm_xor_si128(c2[1320],simde_mm_xor_si128(c2[4660],simde_mm_xor_si128(c2[5321],simde_mm_xor_si128(c2[7302],simde_mm_xor_si128(c2[7300],simde_mm_xor_si128(c2[1384],simde_mm_xor_si128(c2[71],simde_mm_xor_si128(c2[3370],simde_mm_xor_si128(c2[6694],simde_mm_xor_si128(c2[8014],simde_mm_xor_si128(c2[4056],simde_mm_xor_si128(c2[9992],simde_mm_xor_si128(c2[790],simde_mm_xor_si128(c2[7385],simde_mm_xor_si128(c2[130],simde_mm_xor_si128(c2[5431],simde_mm_xor_si128(c2[2794],simde_mm_xor_si128(c2[161],simde_mm_xor_si128(c2[4801],simde_mm_xor_si128(c2[6123],simde_mm_xor_si128(c2[10093],simde_mm_xor_si128(c2[212],simde_mm_xor_si128(c2[4837],simde_mm_xor_si128(c2[9457],simde_mm_xor_si128(c2[247],simde_mm_xor_si128(c2[242],simde_mm_xor_si128(c2[2882],simde_mm_xor_si128(c2[6224],simde_mm_xor_si128(c2[8204],simde_mm_xor_si128(c2[8202],simde_mm_xor_si128(c2[3610],simde_mm_xor_si128(c2[5580],simde_mm_xor_si128(c2[3611],simde_mm_xor_si128(c2[999],simde_mm_xor_si128(c2[10241],simde_mm_xor_si128(c2[8259],simde_mm_xor_si128(c2[3004],simde_mm_xor_si128(c2[1028],simde_mm_xor_si128(c2[6972],simde_mm_xor_si128(c2[10268],simde_mm_xor_si128(c2[10290],simde_mm_xor_si128(c2[7001],simde_mm_xor_si128(c2[8322],simde_mm_xor_si128(c2[2402],simde_mm_xor_si128(c2[6362],simde_mm_xor_si128(c2[7686],simde_mm_xor_si128(c2[7714],simde_mm_xor_si128(c2[6391],simde_mm_xor_si128(c2[3763],simde_mm_xor_si128(c2[9069],simde_mm_xor_si128(c2[9065],simde_mm_xor_si128(c2[8408],simde_mm_xor_si128(c2[1813],simde_mm_xor_si128(c2[7771],simde_mm_xor_si128(c2[3152],simde_mm_xor_si128(c2[3163],simde_mm_xor_si128(c2[550],simde_mm_xor_si128(c2[1204],simde_mm_xor_si128(c2[10450],simde_mm_xor_si128(c2[9811],simde_mm_xor_si128(c2[5196],simde_mm_xor_si128(c2[5190],simde_mm_xor_si128(c2[9182],simde_mm_xor_si128(c2[7210],simde_mm_xor_si128(c2[607],simde_mm_xor_si128(c2[9214],simde_mm_xor_si128(c2[8559],simde_mm_xor_si128(c2[10539],c2[3281]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 6
-     d2[90]=_mm_xor_si128(c2[4632],_mm_xor_si128(c2[4141],_mm_xor_si128(c2[4921],_mm_xor_si128(c2[2976],_mm_xor_si128(c2[8315],_mm_xor_si128(c2[10422],_mm_xor_si128(c2[3183],c2[9180])))))));
+     d2[90]=simde_mm_xor_si128(c2[4632],simde_mm_xor_si128(c2[4141],simde_mm_xor_si128(c2[4921],simde_mm_xor_si128(c2[2976],simde_mm_xor_si128(c2[8315],simde_mm_xor_si128(c2[10422],simde_mm_xor_si128(c2[3183],c2[9180])))))));
 
 //row: 7
-     d2[105]=_mm_xor_si128(c2[8584],_mm_xor_si128(c2[1359],_mm_xor_si128(c2[793],_mm_xor_si128(c2[212],_mm_xor_si128(c2[4210],c2[1741])))));
+     d2[105]=simde_mm_xor_si128(c2[8584],simde_mm_xor_si128(c2[1359],simde_mm_xor_si128(c2[793],simde_mm_xor_si128(c2[212],simde_mm_xor_si128(c2[4210],c2[1741])))));
 
 //row: 8
-     d2[120]=_mm_xor_si128(c2[3],_mm_xor_si128(c2[9250],_mm_xor_si128(c2[5940],_mm_xor_si128(c2[4628],_mm_xor_si128(c2[3966],_mm_xor_si128(c2[1994],_mm_xor_si128(c2[2654],_mm_xor_si128(c2[5942],_mm_xor_si128(c2[3970],_mm_xor_si128(c2[4630],_mm_xor_si128(c2[3311],_mm_xor_si128(c2[7958],_mm_xor_si128(c2[6631],_mm_xor_si128(c2[8619],_mm_xor_si128(c2[6632],_mm_xor_si128(c2[7292],_mm_xor_si128(c2[41],_mm_xor_si128(c2[8613],_mm_xor_si128(c2[9273],_mm_xor_si128(c2[5976],_mm_xor_si128(c2[4682],_mm_xor_si128(c2[3370],_mm_xor_si128(c2[3369],_mm_xor_si128(c2[2042],_mm_xor_si128(c2[6668],_mm_xor_si128(c2[4681],_mm_xor_si128(c2[5341],_mm_xor_si128(c2[9992],_mm_xor_si128(c2[8680],_mm_xor_si128(c2[753],_mm_xor_si128(c2[10000],_mm_xor_si128(c2[7354],_mm_xor_si128(c2[5382],_mm_xor_si128(c2[6042],_mm_xor_si128(c2[8678],_mm_xor_si128(c2[4088],_mm_xor_si128(c2[2761],_mm_xor_si128(c2[124],_mm_xor_si128(c2[8711],_mm_xor_si128(c2[9371],_mm_xor_si128(c2[3428],_mm_xor_si128(c2[1441],_mm_xor_si128(c2[2101],_mm_xor_si128(c2[8744],_mm_xor_si128(c2[7417],_mm_xor_si128(c2[6092],_mm_xor_si128(c2[4780],_mm_xor_si128(c2[3459],_mm_xor_si128(c2[1472],_mm_xor_si128(c2[2132],_mm_xor_si128(c2[8114],_mm_xor_si128(c2[6787],_mm_xor_si128(c2[9421],_mm_xor_si128(c2[7449],_mm_xor_si128(c2[8109],_mm_xor_si128(c2[2832],_mm_xor_si128(c2[845],_mm_xor_si128(c2[1505],_mm_xor_si128(c2[3510],_mm_xor_si128(c2[2198],_mm_xor_si128(c2[8135],_mm_xor_si128(c2[6163],_mm_xor_si128(c2[6823],_mm_xor_si128(c2[2196],_mm_xor_si128(c2[224],_mm_xor_si128(c2[884],_mm_xor_si128(c2[3545],_mm_xor_si128(c2[2233],_mm_xor_si128(c2[3540],_mm_xor_si128(c2[1568],_mm_xor_si128(c2[2228],_mm_xor_si128(c2[6180],_mm_xor_si128(c2[4208],_mm_xor_si128(c2[4868],_mm_xor_si128(c2[9522],_mm_xor_si128(c2[8195],_mm_xor_si128(c2[943],_mm_xor_si128(c2[10175],_mm_xor_si128(c2[941],_mm_xor_si128(c2[9513],_mm_xor_si128(c2[10173],_mm_xor_si128(c2[6908],_mm_xor_si128(c2[5581],_mm_xor_si128(c2[8893],_mm_xor_si128(c2[6906],_mm_xor_si128(c2[7566],_mm_xor_si128(c2[6909],_mm_xor_si128(c2[4922],_mm_xor_si128(c2[5582],_mm_xor_si128(c2[4297],_mm_xor_si128(c2[2970],_mm_xor_si128(c2[2980],_mm_xor_si128(c2[1653],_mm_xor_si128(c2[998],_mm_xor_si128(c2[9570],_mm_xor_si128(c2[10230],_mm_xor_si128(c2[6302],_mm_xor_si128(c2[4990],_mm_xor_si128(c2[4326],_mm_xor_si128(c2[3014],_mm_xor_si128(c2[10270],_mm_xor_si128(c2[8283],_mm_xor_si128(c2[8943],_mm_xor_si128(c2[10273],_mm_xor_si128(c2[3044],_mm_xor_si128(c2[1717],_mm_xor_si128(c2[10299],_mm_xor_si128(c2[8312],_mm_xor_si128(c2[8972],_mm_xor_si128(c2[1061],_mm_xor_si128(c2[9633],_mm_xor_si128(c2[10293],_mm_xor_si128(c2[5700],_mm_xor_si128(c2[4388],_mm_xor_si128(c2[9660],_mm_xor_si128(c2[7688],_mm_xor_si128(c2[8348],_mm_xor_si128(c2[425],_mm_xor_si128(c2[9012],_mm_xor_si128(c2[9672],_mm_xor_si128(c2[453],_mm_xor_si128(c2[9700],_mm_xor_si128(c2[9704],_mm_xor_si128(c2[8377],_mm_xor_si128(c2[7061],_mm_xor_si128(c2[5074],_mm_xor_si128(c2[5734],_mm_xor_si128(c2[1808],_mm_xor_si128(c2[481],_mm_xor_si128(c2[1804],_mm_xor_si128(c2[492],_mm_xor_si128(c2[1147],_mm_xor_si128(c2[9734],_mm_xor_si128(c2[10394],_mm_xor_si128(c2[1151],_mm_xor_si128(c2[510],_mm_xor_si128(c2[9757],_mm_xor_si128(c2[6450],_mm_xor_si128(c2[4478],_mm_xor_si128(c2[5138],_mm_xor_si128(c2[6461],_mm_xor_si128(c2[4474],_mm_xor_si128(c2[5134],_mm_xor_si128(c2[3848],_mm_xor_si128(c2[2521],_mm_xor_si128(c2[4502],_mm_xor_si128(c2[2530],_mm_xor_si128(c2[3190],_mm_xor_si128(c2[3189],_mm_xor_si128(c2[1202],_mm_xor_si128(c2[1862],_mm_xor_si128(c2[2550],_mm_xor_si128(c2[1238],_mm_xor_si128(c2[8494],_mm_xor_si128(c2[7182],_mm_xor_si128(c2[8503],_mm_xor_si128(c2[6516],_mm_xor_si128(c2[7176],_mm_xor_si128(c2[1239],_mm_xor_si128(c2[1921],_mm_xor_si128(c2[609],_mm_xor_si128(c2[10508],_mm_xor_si128(c2[8521],_mm_xor_si128(c2[9181],_mm_xor_si128(c2[3905],_mm_xor_si128(c2[1933],_mm_xor_si128(c2[2593],_mm_xor_si128(c2[1953],_mm_xor_si128(c2[641],_mm_xor_si128(c2[1298],_mm_xor_si128(c2[10530],_mm_xor_si128(c2[3278],_mm_xor_si128(c2[1291],_mm_xor_si128(c2[1951],c2[5262]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[120]=simde_mm_xor_si128(c2[3],simde_mm_xor_si128(c2[9250],simde_mm_xor_si128(c2[5940],simde_mm_xor_si128(c2[4628],simde_mm_xor_si128(c2[3966],simde_mm_xor_si128(c2[1994],simde_mm_xor_si128(c2[2654],simde_mm_xor_si128(c2[5942],simde_mm_xor_si128(c2[3970],simde_mm_xor_si128(c2[4630],simde_mm_xor_si128(c2[3311],simde_mm_xor_si128(c2[7958],simde_mm_xor_si128(c2[6631],simde_mm_xor_si128(c2[8619],simde_mm_xor_si128(c2[6632],simde_mm_xor_si128(c2[7292],simde_mm_xor_si128(c2[41],simde_mm_xor_si128(c2[8613],simde_mm_xor_si128(c2[9273],simde_mm_xor_si128(c2[5976],simde_mm_xor_si128(c2[4682],simde_mm_xor_si128(c2[3370],simde_mm_xor_si128(c2[3369],simde_mm_xor_si128(c2[2042],simde_mm_xor_si128(c2[6668],simde_mm_xor_si128(c2[4681],simde_mm_xor_si128(c2[5341],simde_mm_xor_si128(c2[9992],simde_mm_xor_si128(c2[8680],simde_mm_xor_si128(c2[753],simde_mm_xor_si128(c2[10000],simde_mm_xor_si128(c2[7354],simde_mm_xor_si128(c2[5382],simde_mm_xor_si128(c2[6042],simde_mm_xor_si128(c2[8678],simde_mm_xor_si128(c2[4088],simde_mm_xor_si128(c2[2761],simde_mm_xor_si128(c2[124],simde_mm_xor_si128(c2[8711],simde_mm_xor_si128(c2[9371],simde_mm_xor_si128(c2[3428],simde_mm_xor_si128(c2[1441],simde_mm_xor_si128(c2[2101],simde_mm_xor_si128(c2[8744],simde_mm_xor_si128(c2[7417],simde_mm_xor_si128(c2[6092],simde_mm_xor_si128(c2[4780],simde_mm_xor_si128(c2[3459],simde_mm_xor_si128(c2[1472],simde_mm_xor_si128(c2[2132],simde_mm_xor_si128(c2[8114],simde_mm_xor_si128(c2[6787],simde_mm_xor_si128(c2[9421],simde_mm_xor_si128(c2[7449],simde_mm_xor_si128(c2[8109],simde_mm_xor_si128(c2[2832],simde_mm_xor_si128(c2[845],simde_mm_xor_si128(c2[1505],simde_mm_xor_si128(c2[3510],simde_mm_xor_si128(c2[2198],simde_mm_xor_si128(c2[8135],simde_mm_xor_si128(c2[6163],simde_mm_xor_si128(c2[6823],simde_mm_xor_si128(c2[2196],simde_mm_xor_si128(c2[224],simde_mm_xor_si128(c2[884],simde_mm_xor_si128(c2[3545],simde_mm_xor_si128(c2[2233],simde_mm_xor_si128(c2[3540],simde_mm_xor_si128(c2[1568],simde_mm_xor_si128(c2[2228],simde_mm_xor_si128(c2[6180],simde_mm_xor_si128(c2[4208],simde_mm_xor_si128(c2[4868],simde_mm_xor_si128(c2[9522],simde_mm_xor_si128(c2[8195],simde_mm_xor_si128(c2[943],simde_mm_xor_si128(c2[10175],simde_mm_xor_si128(c2[941],simde_mm_xor_si128(c2[9513],simde_mm_xor_si128(c2[10173],simde_mm_xor_si128(c2[6908],simde_mm_xor_si128(c2[5581],simde_mm_xor_si128(c2[8893],simde_mm_xor_si128(c2[6906],simde_mm_xor_si128(c2[7566],simde_mm_xor_si128(c2[6909],simde_mm_xor_si128(c2[4922],simde_mm_xor_si128(c2[5582],simde_mm_xor_si128(c2[4297],simde_mm_xor_si128(c2[2970],simde_mm_xor_si128(c2[2980],simde_mm_xor_si128(c2[1653],simde_mm_xor_si128(c2[998],simde_mm_xor_si128(c2[9570],simde_mm_xor_si128(c2[10230],simde_mm_xor_si128(c2[6302],simde_mm_xor_si128(c2[4990],simde_mm_xor_si128(c2[4326],simde_mm_xor_si128(c2[3014],simde_mm_xor_si128(c2[10270],simde_mm_xor_si128(c2[8283],simde_mm_xor_si128(c2[8943],simde_mm_xor_si128(c2[10273],simde_mm_xor_si128(c2[3044],simde_mm_xor_si128(c2[1717],simde_mm_xor_si128(c2[10299],simde_mm_xor_si128(c2[8312],simde_mm_xor_si128(c2[8972],simde_mm_xor_si128(c2[1061],simde_mm_xor_si128(c2[9633],simde_mm_xor_si128(c2[10293],simde_mm_xor_si128(c2[5700],simde_mm_xor_si128(c2[4388],simde_mm_xor_si128(c2[9660],simde_mm_xor_si128(c2[7688],simde_mm_xor_si128(c2[8348],simde_mm_xor_si128(c2[425],simde_mm_xor_si128(c2[9012],simde_mm_xor_si128(c2[9672],simde_mm_xor_si128(c2[453],simde_mm_xor_si128(c2[9700],simde_mm_xor_si128(c2[9704],simde_mm_xor_si128(c2[8377],simde_mm_xor_si128(c2[7061],simde_mm_xor_si128(c2[5074],simde_mm_xor_si128(c2[5734],simde_mm_xor_si128(c2[1808],simde_mm_xor_si128(c2[481],simde_mm_xor_si128(c2[1804],simde_mm_xor_si128(c2[492],simde_mm_xor_si128(c2[1147],simde_mm_xor_si128(c2[9734],simde_mm_xor_si128(c2[10394],simde_mm_xor_si128(c2[1151],simde_mm_xor_si128(c2[510],simde_mm_xor_si128(c2[9757],simde_mm_xor_si128(c2[6450],simde_mm_xor_si128(c2[4478],simde_mm_xor_si128(c2[5138],simde_mm_xor_si128(c2[6461],simde_mm_xor_si128(c2[4474],simde_mm_xor_si128(c2[5134],simde_mm_xor_si128(c2[3848],simde_mm_xor_si128(c2[2521],simde_mm_xor_si128(c2[4502],simde_mm_xor_si128(c2[2530],simde_mm_xor_si128(c2[3190],simde_mm_xor_si128(c2[3189],simde_mm_xor_si128(c2[1202],simde_mm_xor_si128(c2[1862],simde_mm_xor_si128(c2[2550],simde_mm_xor_si128(c2[1238],simde_mm_xor_si128(c2[8494],simde_mm_xor_si128(c2[7182],simde_mm_xor_si128(c2[8503],simde_mm_xor_si128(c2[6516],simde_mm_xor_si128(c2[7176],simde_mm_xor_si128(c2[1239],simde_mm_xor_si128(c2[1921],simde_mm_xor_si128(c2[609],simde_mm_xor_si128(c2[10508],simde_mm_xor_si128(c2[8521],simde_mm_xor_si128(c2[9181],simde_mm_xor_si128(c2[3905],simde_mm_xor_si128(c2[1933],simde_mm_xor_si128(c2[2593],simde_mm_xor_si128(c2[1953],simde_mm_xor_si128(c2[641],simde_mm_xor_si128(c2[1298],simde_mm_xor_si128(c2[10530],simde_mm_xor_si128(c2[3278],simde_mm_xor_si128(c2[1291],simde_mm_xor_si128(c2[1951],c2[5262]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 9
-     d2[135]=_mm_xor_si128(c2[5950],_mm_xor_si128(c2[7950],_mm_xor_si128(c2[9552],_mm_xor_si128(c2[8923],_mm_xor_si128(c2[1051],_mm_xor_si128(c2[3163],_mm_xor_si128(c2[8465],c2[9192])))))));
+     d2[135]=simde_mm_xor_si128(c2[5950],simde_mm_xor_si128(c2[7950],simde_mm_xor_si128(c2[9552],simde_mm_xor_si128(c2[8923],simde_mm_xor_si128(c2[1051],simde_mm_xor_si128(c2[3163],simde_mm_xor_si128(c2[8465],c2[9192])))))));
 
 //row: 10
-     d2[150]=_mm_xor_si128(c2[2677],_mm_xor_si128(c2[4689],_mm_xor_si128(c2[4084],_mm_xor_si128(c2[4184],_mm_xor_si128(c2[2227],c2[3065])))));
+     d2[150]=simde_mm_xor_si128(c2[2677],simde_mm_xor_si128(c2[4689],simde_mm_xor_si128(c2[4084],simde_mm_xor_si128(c2[4184],simde_mm_xor_si128(c2[2227],c2[3065])))));
 
 //row: 11
-     d2[165]=_mm_xor_si128(c2[7923],_mm_xor_si128(c2[1320],_mm_xor_si128(c2[1980],_mm_xor_si128(c2[3301],_mm_xor_si128(c2[7932],_mm_xor_si128(c2[1327],_mm_xor_si128(c2[5943],_mm_xor_si128(c2[3303],_mm_xor_si128(c2[7934],_mm_xor_si128(c2[8582],_mm_xor_si128(c2[5319],_mm_xor_si128(c2[9275],_mm_xor_si128(c2[9935],_mm_xor_si128(c2[5980],_mm_xor_si128(c2[37],_mm_xor_si128(c2[7961],_mm_xor_si128(c2[2018],_mm_xor_si128(c2[2017],_mm_xor_si128(c2[2043],_mm_xor_si128(c2[6014],_mm_xor_si128(c2[6674],_mm_xor_si128(c2[730],_mm_xor_si128(c2[5346],_mm_xor_si128(c2[4029],_mm_xor_si128(c2[8645],_mm_xor_si128(c2[7353],_mm_xor_si128(c2[750],_mm_xor_si128(c2[1410],_mm_xor_si128(c2[8673],_mm_xor_si128(c2[2730],_mm_xor_si128(c2[4715],_mm_xor_si128(c2[9331],_mm_xor_si128(c2[1449],_mm_xor_si128(c2[6065],_mm_xor_si128(c2[8044],_mm_xor_si128(c2[2101],_mm_xor_si128(c2[789],_mm_xor_si128(c2[5405],_mm_xor_si128(c2[6090],_mm_xor_si128(c2[10061],_mm_xor_si128(c2[162],_mm_xor_si128(c2[3453],_mm_xor_si128(c2[8084],_mm_xor_si128(c2[820],_mm_xor_si128(c2[5436],_mm_xor_si128(c2[5460],_mm_xor_si128(c2[9431],_mm_xor_si128(c2[10091],_mm_xor_si128(c2[6782],_mm_xor_si128(c2[854],_mm_xor_si128(c2[193],_mm_xor_si128(c2[4809],_mm_xor_si128(c2[871],_mm_xor_si128(c2[5502],_mm_xor_si128(c2[5496],_mm_xor_si128(c2[10112],_mm_xor_si128(c2[10116],_mm_xor_si128(c2[4173],_mm_xor_si128(c2[906],_mm_xor_si128(c2[5522],_mm_xor_si128(c2[901],_mm_xor_si128(c2[5532],_mm_xor_si128(c2[3541],_mm_xor_si128(c2[8172],_mm_xor_si128(c2[6883],_mm_xor_si128(c2[280],_mm_xor_si128(c2[940],_mm_xor_si128(c2[8863],_mm_xor_si128(c2[2920],_mm_xor_si128(c2[8861],_mm_xor_si128(c2[2918],_mm_xor_si128(c2[4269],_mm_xor_si128(c2[8225],_mm_xor_si128(c2[8885],_mm_xor_si128(c2[6254],_mm_xor_si128(c2[311],_mm_xor_si128(c2[4270],_mm_xor_si128(c2[8886],_mm_xor_si128(c2[1658],_mm_xor_si128(c2[5614],_mm_xor_si128(c2[6274],_mm_xor_si128(c2[341],_mm_xor_si128(c2[4957],_mm_xor_si128(c2[8918],_mm_xor_si128(c2[2975],_mm_xor_si128(c2[3663],_mm_xor_si128(c2[7634],_mm_xor_si128(c2[8294],_mm_xor_si128(c2[1687],_mm_xor_si128(c2[6303],_mm_xor_si128(c2[7631],_mm_xor_si128(c2[1688],_mm_xor_si128(c2[4328],_mm_xor_si128(c2[390],_mm_xor_si128(c2[4361],_mm_xor_si128(c2[5021],_mm_xor_si128(c2[7660],_mm_xor_si128(c2[1717],_mm_xor_si128(c2[8981],_mm_xor_si128(c2[3038],_mm_xor_si128(c2[3061],_mm_xor_si128(c2[7692],_mm_xor_si128(c2[7021],_mm_xor_si128(c2[1093],_mm_xor_si128(c2[8345],_mm_xor_si128(c2[2402],_mm_xor_si128(c2[8373],_mm_xor_si128(c2[1770],_mm_xor_si128(c2[2430],_mm_xor_si128(c2[7050],_mm_xor_si128(c2[1122],_mm_xor_si128(c2[4422],_mm_xor_si128(c2[9038],_mm_xor_si128(c2[9728],_mm_xor_si128(c2[3125],_mm_xor_si128(c2[3785],_mm_xor_si128(c2[9724],_mm_xor_si128(c2[3781],_mm_xor_si128(c2[9067],_mm_xor_si128(c2[3124],_mm_xor_si128(c2[1140],_mm_xor_si128(c2[8430],_mm_xor_si128(c2[2502],_mm_xor_si128(c2[3811],_mm_xor_si128(c2[8442],_mm_xor_si128(c2[3822],_mm_xor_si128(c2[8438],_mm_xor_si128(c2[1209],_mm_xor_si128(c2[5165],_mm_xor_si128(c2[5825],_mm_xor_si128(c2[1863],_mm_xor_si128(c2[6494],_mm_xor_si128(c2[550],_mm_xor_si128(c2[5166],_mm_xor_si128(c2[10470],_mm_xor_si128(c2[3882],_mm_xor_si128(c2[4542],_mm_xor_si128(c2[5855],_mm_xor_si128(c2[10471],_mm_xor_si128(c2[5864],_mm_xor_si128(c2[10480],_mm_xor_si128(c2[9841],_mm_xor_si128(c2[3253],_mm_xor_si128(c2[3913],_mm_xor_si128(c2[7869],_mm_xor_si128(c2[1926],_mm_xor_si128(c2[1266],_mm_xor_si128(c2[5882],_mm_xor_si128(c2[9873],_mm_xor_si128(c2[3270],_mm_xor_si128(c2[3930],_mm_xor_si128(c2[9218],_mm_xor_si128(c2[3275],_mm_xor_si128(c2[639],_mm_xor_si128(c2[5255],c2[5919])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[165]=simde_mm_xor_si128(c2[7923],simde_mm_xor_si128(c2[1320],simde_mm_xor_si128(c2[1980],simde_mm_xor_si128(c2[3301],simde_mm_xor_si128(c2[7932],simde_mm_xor_si128(c2[1327],simde_mm_xor_si128(c2[5943],simde_mm_xor_si128(c2[3303],simde_mm_xor_si128(c2[7934],simde_mm_xor_si128(c2[8582],simde_mm_xor_si128(c2[5319],simde_mm_xor_si128(c2[9275],simde_mm_xor_si128(c2[9935],simde_mm_xor_si128(c2[5980],simde_mm_xor_si128(c2[37],simde_mm_xor_si128(c2[7961],simde_mm_xor_si128(c2[2018],simde_mm_xor_si128(c2[2017],simde_mm_xor_si128(c2[2043],simde_mm_xor_si128(c2[6014],simde_mm_xor_si128(c2[6674],simde_mm_xor_si128(c2[730],simde_mm_xor_si128(c2[5346],simde_mm_xor_si128(c2[4029],simde_mm_xor_si128(c2[8645],simde_mm_xor_si128(c2[7353],simde_mm_xor_si128(c2[750],simde_mm_xor_si128(c2[1410],simde_mm_xor_si128(c2[8673],simde_mm_xor_si128(c2[2730],simde_mm_xor_si128(c2[4715],simde_mm_xor_si128(c2[9331],simde_mm_xor_si128(c2[1449],simde_mm_xor_si128(c2[6065],simde_mm_xor_si128(c2[8044],simde_mm_xor_si128(c2[2101],simde_mm_xor_si128(c2[789],simde_mm_xor_si128(c2[5405],simde_mm_xor_si128(c2[6090],simde_mm_xor_si128(c2[10061],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[3453],simde_mm_xor_si128(c2[8084],simde_mm_xor_si128(c2[820],simde_mm_xor_si128(c2[5436],simde_mm_xor_si128(c2[5460],simde_mm_xor_si128(c2[9431],simde_mm_xor_si128(c2[10091],simde_mm_xor_si128(c2[6782],simde_mm_xor_si128(c2[854],simde_mm_xor_si128(c2[193],simde_mm_xor_si128(c2[4809],simde_mm_xor_si128(c2[871],simde_mm_xor_si128(c2[5502],simde_mm_xor_si128(c2[5496],simde_mm_xor_si128(c2[10112],simde_mm_xor_si128(c2[10116],simde_mm_xor_si128(c2[4173],simde_mm_xor_si128(c2[906],simde_mm_xor_si128(c2[5522],simde_mm_xor_si128(c2[901],simde_mm_xor_si128(c2[5532],simde_mm_xor_si128(c2[3541],simde_mm_xor_si128(c2[8172],simde_mm_xor_si128(c2[6883],simde_mm_xor_si128(c2[280],simde_mm_xor_si128(c2[940],simde_mm_xor_si128(c2[8863],simde_mm_xor_si128(c2[2920],simde_mm_xor_si128(c2[8861],simde_mm_xor_si128(c2[2918],simde_mm_xor_si128(c2[4269],simde_mm_xor_si128(c2[8225],simde_mm_xor_si128(c2[8885],simde_mm_xor_si128(c2[6254],simde_mm_xor_si128(c2[311],simde_mm_xor_si128(c2[4270],simde_mm_xor_si128(c2[8886],simde_mm_xor_si128(c2[1658],simde_mm_xor_si128(c2[5614],simde_mm_xor_si128(c2[6274],simde_mm_xor_si128(c2[341],simde_mm_xor_si128(c2[4957],simde_mm_xor_si128(c2[8918],simde_mm_xor_si128(c2[2975],simde_mm_xor_si128(c2[3663],simde_mm_xor_si128(c2[7634],simde_mm_xor_si128(c2[8294],simde_mm_xor_si128(c2[1687],simde_mm_xor_si128(c2[6303],simde_mm_xor_si128(c2[7631],simde_mm_xor_si128(c2[1688],simde_mm_xor_si128(c2[4328],simde_mm_xor_si128(c2[390],simde_mm_xor_si128(c2[4361],simde_mm_xor_si128(c2[5021],simde_mm_xor_si128(c2[7660],simde_mm_xor_si128(c2[1717],simde_mm_xor_si128(c2[8981],simde_mm_xor_si128(c2[3038],simde_mm_xor_si128(c2[3061],simde_mm_xor_si128(c2[7692],simde_mm_xor_si128(c2[7021],simde_mm_xor_si128(c2[1093],simde_mm_xor_si128(c2[8345],simde_mm_xor_si128(c2[2402],simde_mm_xor_si128(c2[8373],simde_mm_xor_si128(c2[1770],simde_mm_xor_si128(c2[2430],simde_mm_xor_si128(c2[7050],simde_mm_xor_si128(c2[1122],simde_mm_xor_si128(c2[4422],simde_mm_xor_si128(c2[9038],simde_mm_xor_si128(c2[9728],simde_mm_xor_si128(c2[3125],simde_mm_xor_si128(c2[3785],simde_mm_xor_si128(c2[9724],simde_mm_xor_si128(c2[3781],simde_mm_xor_si128(c2[9067],simde_mm_xor_si128(c2[3124],simde_mm_xor_si128(c2[1140],simde_mm_xor_si128(c2[8430],simde_mm_xor_si128(c2[2502],simde_mm_xor_si128(c2[3811],simde_mm_xor_si128(c2[8442],simde_mm_xor_si128(c2[3822],simde_mm_xor_si128(c2[8438],simde_mm_xor_si128(c2[1209],simde_mm_xor_si128(c2[5165],simde_mm_xor_si128(c2[5825],simde_mm_xor_si128(c2[1863],simde_mm_xor_si128(c2[6494],simde_mm_xor_si128(c2[550],simde_mm_xor_si128(c2[5166],simde_mm_xor_si128(c2[10470],simde_mm_xor_si128(c2[3882],simde_mm_xor_si128(c2[4542],simde_mm_xor_si128(c2[5855],simde_mm_xor_si128(c2[10471],simde_mm_xor_si128(c2[5864],simde_mm_xor_si128(c2[10480],simde_mm_xor_si128(c2[9841],simde_mm_xor_si128(c2[3253],simde_mm_xor_si128(c2[3913],simde_mm_xor_si128(c2[7869],simde_mm_xor_si128(c2[1926],simde_mm_xor_si128(c2[1266],simde_mm_xor_si128(c2[5882],simde_mm_xor_si128(c2[9873],simde_mm_xor_si128(c2[3270],simde_mm_xor_si128(c2[3930],simde_mm_xor_si128(c2[9218],simde_mm_xor_si128(c2[3275],simde_mm_xor_si128(c2[639],simde_mm_xor_si128(c2[5255],c2[5919])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 12
-     d2[180]=_mm_xor_si128(c2[6611],_mm_xor_si128(c2[4663],_mm_xor_si128(c2[8227],_mm_xor_si128(c2[2981],_mm_xor_si128(c2[1716],c2[545])))));
+     d2[180]=simde_mm_xor_si128(c2[6611],simde_mm_xor_si128(c2[4663],simde_mm_xor_si128(c2[8227],simde_mm_xor_si128(c2[2981],simde_mm_xor_si128(c2[1716],c2[545])))));
 
 //row: 13
-     d2[195]=_mm_xor_si128(c2[8582],_mm_xor_si128(c2[9242],_mm_xor_si128(c2[4620],_mm_xor_si128(c2[2646],_mm_xor_si128(c2[4622],_mm_xor_si128(c2[7933],_mm_xor_si128(c2[5978],_mm_xor_si128(c2[6638],_mm_xor_si128(c2[7299],_mm_xor_si128(c2[9280],_mm_xor_si128(c2[2702],_mm_xor_si128(c2[3362],_mm_xor_si128(c2[2049],_mm_xor_si128(c2[5348],_mm_xor_si128(c2[8012],_mm_xor_si128(c2[8672],_mm_xor_si128(c2[9992],_mm_xor_si128(c2[6034],_mm_xor_si128(c2[6041],_mm_xor_si128(c2[2768],_mm_xor_si128(c2[9363],_mm_xor_si128(c2[2108],_mm_xor_si128(c2[6764],_mm_xor_si128(c2[7424],_mm_xor_si128(c2[4772],_mm_xor_si128(c2[2139],_mm_xor_si128(c2[6134],_mm_xor_si128(c2[6794],_mm_xor_si128(c2[8101],_mm_xor_si128(c2[1512],_mm_xor_si128(c2[2190],_mm_xor_si128(c2[6815],_mm_xor_si128(c2[876],_mm_xor_si128(c2[6819],_mm_xor_si128(c2[2225],_mm_xor_si128(c2[2220],_mm_xor_si128(c2[4860],_mm_xor_si128(c2[7542],_mm_xor_si128(c2[8202],_mm_xor_si128(c2[10182],_mm_xor_si128(c2[10180],_mm_xor_si128(c2[4928],_mm_xor_si128(c2[5588],_mm_xor_si128(c2[7573],_mm_xor_si128(c2[5589],_mm_xor_si128(c2[2317],_mm_xor_si128(c2[2977],_mm_xor_si128(c2[1660],_mm_xor_si128(c2[10237],_mm_xor_si128(c2[4322],_mm_xor_si128(c2[4982],_mm_xor_si128(c2[3006],_mm_xor_si128(c2[8950],_mm_xor_si128(c2[1064],_mm_xor_si128(c2[1724],_mm_xor_si128(c2[8979],_mm_xor_si128(c2[10300],_mm_xor_si128(c2[4380],_mm_xor_si128(c2[8340],_mm_xor_si128(c2[9664],_mm_xor_si128(c2[9032],_mm_xor_si128(c2[9692],_mm_xor_si128(c2[8384],_mm_xor_si128(c2[5741],_mm_xor_si128(c2[10387],_mm_xor_si128(c2[488],_mm_xor_si128(c2[484],_mm_xor_si128(c2[10386],_mm_xor_si128(c2[9764],_mm_xor_si128(c2[5130],_mm_xor_si128(c2[5141],_mm_xor_si128(c2[1868],_mm_xor_si128(c2[2528],_mm_xor_si128(c2[3182],_mm_xor_si128(c2[1869],_mm_xor_si128(c2[570],_mm_xor_si128(c2[1230],_mm_xor_si128(c2[7174],_mm_xor_si128(c2[7183],_mm_xor_si128(c2[10500],_mm_xor_si128(c2[601],_mm_xor_si128(c2[9188],_mm_xor_si128(c2[2585],_mm_xor_si128(c2[1931],_mm_xor_si128(c2[10532],_mm_xor_si128(c2[633],_mm_xor_si128(c2[10537],c2[1958])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[195]=simde_mm_xor_si128(c2[8582],simde_mm_xor_si128(c2[9242],simde_mm_xor_si128(c2[4620],simde_mm_xor_si128(c2[2646],simde_mm_xor_si128(c2[4622],simde_mm_xor_si128(c2[7933],simde_mm_xor_si128(c2[5978],simde_mm_xor_si128(c2[6638],simde_mm_xor_si128(c2[7299],simde_mm_xor_si128(c2[9280],simde_mm_xor_si128(c2[2702],simde_mm_xor_si128(c2[3362],simde_mm_xor_si128(c2[2049],simde_mm_xor_si128(c2[5348],simde_mm_xor_si128(c2[8012],simde_mm_xor_si128(c2[8672],simde_mm_xor_si128(c2[9992],simde_mm_xor_si128(c2[6034],simde_mm_xor_si128(c2[6041],simde_mm_xor_si128(c2[2768],simde_mm_xor_si128(c2[9363],simde_mm_xor_si128(c2[2108],simde_mm_xor_si128(c2[6764],simde_mm_xor_si128(c2[7424],simde_mm_xor_si128(c2[4772],simde_mm_xor_si128(c2[2139],simde_mm_xor_si128(c2[6134],simde_mm_xor_si128(c2[6794],simde_mm_xor_si128(c2[8101],simde_mm_xor_si128(c2[1512],simde_mm_xor_si128(c2[2190],simde_mm_xor_si128(c2[6815],simde_mm_xor_si128(c2[876],simde_mm_xor_si128(c2[6819],simde_mm_xor_si128(c2[2225],simde_mm_xor_si128(c2[2220],simde_mm_xor_si128(c2[4860],simde_mm_xor_si128(c2[7542],simde_mm_xor_si128(c2[8202],simde_mm_xor_si128(c2[10182],simde_mm_xor_si128(c2[10180],simde_mm_xor_si128(c2[4928],simde_mm_xor_si128(c2[5588],simde_mm_xor_si128(c2[7573],simde_mm_xor_si128(c2[5589],simde_mm_xor_si128(c2[2317],simde_mm_xor_si128(c2[2977],simde_mm_xor_si128(c2[1660],simde_mm_xor_si128(c2[10237],simde_mm_xor_si128(c2[4322],simde_mm_xor_si128(c2[4982],simde_mm_xor_si128(c2[3006],simde_mm_xor_si128(c2[8950],simde_mm_xor_si128(c2[1064],simde_mm_xor_si128(c2[1724],simde_mm_xor_si128(c2[8979],simde_mm_xor_si128(c2[10300],simde_mm_xor_si128(c2[4380],simde_mm_xor_si128(c2[8340],simde_mm_xor_si128(c2[9664],simde_mm_xor_si128(c2[9032],simde_mm_xor_si128(c2[9692],simde_mm_xor_si128(c2[8384],simde_mm_xor_si128(c2[5741],simde_mm_xor_si128(c2[10387],simde_mm_xor_si128(c2[488],simde_mm_xor_si128(c2[484],simde_mm_xor_si128(c2[10386],simde_mm_xor_si128(c2[9764],simde_mm_xor_si128(c2[5130],simde_mm_xor_si128(c2[5141],simde_mm_xor_si128(c2[1868],simde_mm_xor_si128(c2[2528],simde_mm_xor_si128(c2[3182],simde_mm_xor_si128(c2[1869],simde_mm_xor_si128(c2[570],simde_mm_xor_si128(c2[1230],simde_mm_xor_si128(c2[7174],simde_mm_xor_si128(c2[7183],simde_mm_xor_si128(c2[10500],simde_mm_xor_si128(c2[601],simde_mm_xor_si128(c2[9188],simde_mm_xor_si128(c2[2585],simde_mm_xor_si128(c2[1931],simde_mm_xor_si128(c2[10532],simde_mm_xor_si128(c2[633],simde_mm_xor_si128(c2[10537],c2[1958])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 14
-     d2[210]=_mm_xor_si128(c2[7927],_mm_xor_si128(c2[369],_mm_xor_si128(c2[4421],_mm_xor_si128(c2[10385],_mm_xor_si128(c2[5794],c2[8554])))));
+     d2[210]=simde_mm_xor_si128(c2[7927],simde_mm_xor_si128(c2[369],simde_mm_xor_si128(c2[4421],simde_mm_xor_si128(c2[10385],simde_mm_xor_si128(c2[5794],c2[8554])))));
 
 //row: 15
-     d2[225]=_mm_xor_si128(c2[5951],_mm_xor_si128(c2[1329],_mm_xor_si128(c2[9914],_mm_xor_si128(c2[671],_mm_xor_si128(c2[1331],_mm_xor_si128(c2[4622],_mm_xor_si128(c2[3332],_mm_xor_si128(c2[3993],_mm_xor_si128(c2[5314],_mm_xor_si128(c2[5974],_mm_xor_si128(c2[6638],_mm_xor_si128(c2[71],_mm_xor_si128(c2[9302],_mm_xor_si128(c2[2042],_mm_xor_si128(c2[5381],_mm_xor_si128(c2[6701],_mm_xor_si128(c2[2083],_mm_xor_si128(c2[2743],_mm_xor_si128(c2[10021],_mm_xor_si128(c2[6072],_mm_xor_si128(c2[8701],_mm_xor_si128(c2[9361],_mm_xor_si128(c2[4118],_mm_xor_si128(c2[1481],_mm_xor_si128(c2[9392],_mm_xor_si128(c2[3488],_mm_xor_si128(c2[4810],_mm_xor_si128(c2[8105],_mm_xor_si128(c2[8765],_mm_xor_si128(c2[9458],_mm_xor_si128(c2[3524],_mm_xor_si128(c2[7484],_mm_xor_si128(c2[8144],_mm_xor_si128(c2[9493],_mm_xor_si128(c2[9488],_mm_xor_si128(c2[909],_mm_xor_si128(c2[1569],_mm_xor_si128(c2[4896],_mm_xor_si128(c2[6876],_mm_xor_si128(c2[6874],_mm_xor_si128(c2[2282],_mm_xor_si128(c2[4267],_mm_xor_si128(c2[1623],_mm_xor_si128(c2[2283],_mm_xor_si128(c2[8233],_mm_xor_si128(c2[10230],_mm_xor_si128(c2[8913],_mm_xor_si128(c2[6271],_mm_xor_si128(c2[6931],_mm_xor_si128(c2[1691],_mm_xor_si128(c2[10274],_mm_xor_si128(c2[4984],_mm_xor_si128(c2[5644],_mm_xor_si128(c2[8977],_mm_xor_si128(c2[5673],_mm_xor_si128(c2[6334],_mm_xor_si128(c2[6994],_mm_xor_si128(c2[8980],_mm_xor_si128(c2[1089],_mm_xor_si128(c2[5049],_mm_xor_si128(c2[5713],_mm_xor_si128(c2[6373],_mm_xor_si128(c2[6401],_mm_xor_si128(c2[5078],_mm_xor_si128(c2[2435],_mm_xor_si128(c2[7741],_mm_xor_si128(c2[7752],_mm_xor_si128(c2[6420],_mm_xor_si128(c2[7080],_mm_xor_si128(c2[6458],_mm_xor_si128(c2[1839],_mm_xor_si128(c2[1175],_mm_xor_si128(c2[1835],_mm_xor_si128(c2[9781],_mm_xor_si128(c2[10450],_mm_xor_si128(c2[8462],_mm_xor_si128(c2[9122],_mm_xor_si128(c2[9788],_mm_xor_si128(c2[8498],_mm_xor_si128(c2[3883],_mm_xor_si128(c2[3877],_mm_xor_si128(c2[7869],_mm_xor_si128(c2[5882],_mm_xor_si128(c2[9193],_mm_xor_si128(c2[9853],_mm_xor_si128(c2[7901],_mm_xor_si128(c2[7231],_mm_xor_si128(c2[8551],c2[9211]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[225]=simde_mm_xor_si128(c2[5951],simde_mm_xor_si128(c2[1329],simde_mm_xor_si128(c2[9914],simde_mm_xor_si128(c2[671],simde_mm_xor_si128(c2[1331],simde_mm_xor_si128(c2[4622],simde_mm_xor_si128(c2[3332],simde_mm_xor_si128(c2[3993],simde_mm_xor_si128(c2[5314],simde_mm_xor_si128(c2[5974],simde_mm_xor_si128(c2[6638],simde_mm_xor_si128(c2[71],simde_mm_xor_si128(c2[9302],simde_mm_xor_si128(c2[2042],simde_mm_xor_si128(c2[5381],simde_mm_xor_si128(c2[6701],simde_mm_xor_si128(c2[2083],simde_mm_xor_si128(c2[2743],simde_mm_xor_si128(c2[10021],simde_mm_xor_si128(c2[6072],simde_mm_xor_si128(c2[8701],simde_mm_xor_si128(c2[9361],simde_mm_xor_si128(c2[4118],simde_mm_xor_si128(c2[1481],simde_mm_xor_si128(c2[9392],simde_mm_xor_si128(c2[3488],simde_mm_xor_si128(c2[4810],simde_mm_xor_si128(c2[8105],simde_mm_xor_si128(c2[8765],simde_mm_xor_si128(c2[9458],simde_mm_xor_si128(c2[3524],simde_mm_xor_si128(c2[7484],simde_mm_xor_si128(c2[8144],simde_mm_xor_si128(c2[9493],simde_mm_xor_si128(c2[9488],simde_mm_xor_si128(c2[909],simde_mm_xor_si128(c2[1569],simde_mm_xor_si128(c2[4896],simde_mm_xor_si128(c2[6876],simde_mm_xor_si128(c2[6874],simde_mm_xor_si128(c2[2282],simde_mm_xor_si128(c2[4267],simde_mm_xor_si128(c2[1623],simde_mm_xor_si128(c2[2283],simde_mm_xor_si128(c2[8233],simde_mm_xor_si128(c2[10230],simde_mm_xor_si128(c2[8913],simde_mm_xor_si128(c2[6271],simde_mm_xor_si128(c2[6931],simde_mm_xor_si128(c2[1691],simde_mm_xor_si128(c2[10274],simde_mm_xor_si128(c2[4984],simde_mm_xor_si128(c2[5644],simde_mm_xor_si128(c2[8977],simde_mm_xor_si128(c2[5673],simde_mm_xor_si128(c2[6334],simde_mm_xor_si128(c2[6994],simde_mm_xor_si128(c2[8980],simde_mm_xor_si128(c2[1089],simde_mm_xor_si128(c2[5049],simde_mm_xor_si128(c2[5713],simde_mm_xor_si128(c2[6373],simde_mm_xor_si128(c2[6401],simde_mm_xor_si128(c2[5078],simde_mm_xor_si128(c2[2435],simde_mm_xor_si128(c2[7741],simde_mm_xor_si128(c2[7752],simde_mm_xor_si128(c2[6420],simde_mm_xor_si128(c2[7080],simde_mm_xor_si128(c2[6458],simde_mm_xor_si128(c2[1839],simde_mm_xor_si128(c2[1175],simde_mm_xor_si128(c2[1835],simde_mm_xor_si128(c2[9781],simde_mm_xor_si128(c2[10450],simde_mm_xor_si128(c2[8462],simde_mm_xor_si128(c2[9122],simde_mm_xor_si128(c2[9788],simde_mm_xor_si128(c2[8498],simde_mm_xor_si128(c2[3883],simde_mm_xor_si128(c2[3877],simde_mm_xor_si128(c2[7869],simde_mm_xor_si128(c2[5882],simde_mm_xor_si128(c2[9193],simde_mm_xor_si128(c2[9853],simde_mm_xor_si128(c2[7901],simde_mm_xor_si128(c2[7231],simde_mm_xor_si128(c2[8551],c2[9211]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 16
-     d2[240]=_mm_xor_si128(c2[2646],_mm_xor_si128(c2[8583],_mm_xor_si128(c2[6609],_mm_xor_si128(c2[8585],_mm_xor_si128(c2[42],_mm_xor_si128(c2[703],_mm_xor_si128(c2[2684],_mm_xor_si128(c2[9274],_mm_xor_si128(c2[7325],_mm_xor_si128(c2[6012],_mm_xor_si128(c2[9311],_mm_xor_si128(c2[2076],_mm_xor_si128(c2[3396],_mm_xor_si128(c2[9997],_mm_xor_si128(c2[5379],_mm_xor_si128(c2[6731],_mm_xor_si128(c2[2767],_mm_xor_si128(c2[6071],_mm_xor_si128(c2[813],_mm_xor_si128(c2[8735],_mm_xor_si128(c2[6102],_mm_xor_si128(c2[183],_mm_xor_si128(c2[1505],_mm_xor_si128(c2[5460],_mm_xor_si128(c2[6153],_mm_xor_si128(c2[219],_mm_xor_si128(c2[4839],_mm_xor_si128(c2[6188],_mm_xor_si128(c2[6183],_mm_xor_si128(c2[8823],_mm_xor_si128(c2[1591],_mm_xor_si128(c2[3571],_mm_xor_si128(c2[3584],_mm_xor_si128(c2[9551],_mm_xor_si128(c2[962],_mm_xor_si128(c2[9552],_mm_xor_si128(c2[6940],_mm_xor_si128(c2[5623],_mm_xor_si128(c2[3641],_mm_xor_si128(c2[2975],_mm_xor_si128(c2[8945],_mm_xor_si128(c2[6969],_mm_xor_si128(c2[2354],_mm_xor_si128(c2[5672],_mm_xor_si128(c2[2383],_mm_xor_si128(c2[3704],_mm_xor_si128(c2[8343],_mm_xor_si128(c2[1744],_mm_xor_si128(c2[3068],_mm_xor_si128(c2[3096],_mm_xor_si128(c2[1773],_mm_xor_si128(c2[9704],_mm_xor_si128(c2[4451],_mm_xor_si128(c2[4447],_mm_xor_si128(c2[3790],_mm_xor_si128(c2[3153],_mm_xor_si128(c2[9093],_mm_xor_si128(c2[9104],_mm_xor_si128(c2[6491],_mm_xor_si128(c2[7145],_mm_xor_si128(c2[5832],_mm_xor_si128(c2[5193],_mm_xor_si128(c2[578],_mm_xor_si128(c2[572],_mm_xor_si128(c2[4564],_mm_xor_si128(c2[2592],_mm_xor_si128(c2[6548],_mm_xor_si128(c2[3900],_mm_xor_si128(c2[4596],_mm_xor_si128(c2[3941],c2[5921]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[240]=simde_mm_xor_si128(c2[2646],simde_mm_xor_si128(c2[8583],simde_mm_xor_si128(c2[6609],simde_mm_xor_si128(c2[8585],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[703],simde_mm_xor_si128(c2[2684],simde_mm_xor_si128(c2[9274],simde_mm_xor_si128(c2[7325],simde_mm_xor_si128(c2[6012],simde_mm_xor_si128(c2[9311],simde_mm_xor_si128(c2[2076],simde_mm_xor_si128(c2[3396],simde_mm_xor_si128(c2[9997],simde_mm_xor_si128(c2[5379],simde_mm_xor_si128(c2[6731],simde_mm_xor_si128(c2[2767],simde_mm_xor_si128(c2[6071],simde_mm_xor_si128(c2[813],simde_mm_xor_si128(c2[8735],simde_mm_xor_si128(c2[6102],simde_mm_xor_si128(c2[183],simde_mm_xor_si128(c2[1505],simde_mm_xor_si128(c2[5460],simde_mm_xor_si128(c2[6153],simde_mm_xor_si128(c2[219],simde_mm_xor_si128(c2[4839],simde_mm_xor_si128(c2[6188],simde_mm_xor_si128(c2[6183],simde_mm_xor_si128(c2[8823],simde_mm_xor_si128(c2[1591],simde_mm_xor_si128(c2[3571],simde_mm_xor_si128(c2[3584],simde_mm_xor_si128(c2[9551],simde_mm_xor_si128(c2[962],simde_mm_xor_si128(c2[9552],simde_mm_xor_si128(c2[6940],simde_mm_xor_si128(c2[5623],simde_mm_xor_si128(c2[3641],simde_mm_xor_si128(c2[2975],simde_mm_xor_si128(c2[8945],simde_mm_xor_si128(c2[6969],simde_mm_xor_si128(c2[2354],simde_mm_xor_si128(c2[5672],simde_mm_xor_si128(c2[2383],simde_mm_xor_si128(c2[3704],simde_mm_xor_si128(c2[8343],simde_mm_xor_si128(c2[1744],simde_mm_xor_si128(c2[3068],simde_mm_xor_si128(c2[3096],simde_mm_xor_si128(c2[1773],simde_mm_xor_si128(c2[9704],simde_mm_xor_si128(c2[4451],simde_mm_xor_si128(c2[4447],simde_mm_xor_si128(c2[3790],simde_mm_xor_si128(c2[3153],simde_mm_xor_si128(c2[9093],simde_mm_xor_si128(c2[9104],simde_mm_xor_si128(c2[6491],simde_mm_xor_si128(c2[7145],simde_mm_xor_si128(c2[5832],simde_mm_xor_si128(c2[5193],simde_mm_xor_si128(c2[578],simde_mm_xor_si128(c2[572],simde_mm_xor_si128(c2[4564],simde_mm_xor_si128(c2[2592],simde_mm_xor_si128(c2[6548],simde_mm_xor_si128(c2[3900],simde_mm_xor_si128(c2[4596],simde_mm_xor_si128(c2[3941],c2[5921]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 17
-     d2[255]=_mm_xor_si128(c2[4631],_mm_xor_si128(c2[427],_mm_xor_si128(c2[7086],_mm_xor_si128(c2[7783],c2[1298]))));
+     d2[255]=simde_mm_xor_si128(c2[4631],simde_mm_xor_si128(c2[427],simde_mm_xor_si128(c2[7086],simde_mm_xor_si128(c2[7783],c2[1298]))));
 
 //row: 18
-     d2[270]=_mm_xor_si128(c2[4661],_mm_xor_si128(c2[4993],_mm_xor_si128(c2[3041],_mm_xor_si128(c2[10448],c2[9810]))));
+     d2[270]=simde_mm_xor_si128(c2[4661],simde_mm_xor_si128(c2[4993],simde_mm_xor_si128(c2[3041],simde_mm_xor_si128(c2[10448],c2[9810]))));
 
 //row: 19
-     d2[285]=_mm_xor_si128(c2[1991],_mm_xor_si128(c2[7956],_mm_xor_si128(c2[10119],_mm_xor_si128(c2[6848],c2[2952]))));
+     d2[285]=simde_mm_xor_si128(c2[1991],simde_mm_xor_si128(c2[7956],simde_mm_xor_si128(c2[10119],simde_mm_xor_si128(c2[6848],c2[2952]))));
 
 //row: 20
-     d2[300]=_mm_xor_si128(c2[5941],_mm_xor_si128(c2[1334],_mm_xor_si128(c2[9904],_mm_xor_si128(c2[1321],_mm_xor_si128(c2[8584],_mm_xor_si128(c2[3337],_mm_xor_si128(c2[3998],_mm_xor_si128(c2[5979],_mm_xor_si128(c2[61],_mm_xor_si128(c2[9307],_mm_xor_si128(c2[2047],_mm_xor_si128(c2[5371],_mm_xor_si128(c2[6691],_mm_xor_si128(c2[2733],_mm_xor_si128(c2[7361],_mm_xor_si128(c2[10026],_mm_xor_si128(c2[6062],_mm_xor_si128(c2[9366],_mm_xor_si128(c2[4123],_mm_xor_si128(c2[1471],_mm_xor_si128(c2[9397],_mm_xor_si128(c2[3493],_mm_xor_si128(c2[4800],_mm_xor_si128(c2[8770],_mm_xor_si128(c2[9463],_mm_xor_si128(c2[3514],_mm_xor_si128(c2[8134],_mm_xor_si128(c2[9483],_mm_xor_si128(c2[9493],_mm_xor_si128(c2[1574],_mm_xor_si128(c2[4901],_mm_xor_si128(c2[6881],_mm_xor_si128(c2[6879],_mm_xor_si128(c2[7542],_mm_xor_si128(c2[2287],_mm_xor_si128(c2[4272],_mm_xor_si128(c2[2288],_mm_xor_si128(c2[10235],_mm_xor_si128(c2[8918],_mm_xor_si128(c2[6936],_mm_xor_si128(c2[4960],_mm_xor_si128(c2[1681],_mm_xor_si128(c2[10264],_mm_xor_si128(c2[5649],_mm_xor_si128(c2[8982],_mm_xor_si128(c2[5678],_mm_xor_si128(c2[6999],_mm_xor_si128(c2[1094],_mm_xor_si128(c2[5054],_mm_xor_si128(c2[6363],_mm_xor_si128(c2[6391],_mm_xor_si128(c2[5083],_mm_xor_si128(c2[2440],_mm_xor_si128(c2[7746],_mm_xor_si128(c2[7742],_mm_xor_si128(c2[7085],_mm_xor_si128(c2[6463],_mm_xor_si128(c2[1844],_mm_xor_si128(c2[1840],_mm_xor_si128(c2[9786],_mm_xor_si128(c2[10440],_mm_xor_si128(c2[9127],_mm_xor_si128(c2[8503],_mm_xor_si128(c2[3873],_mm_xor_si128(c2[3882],_mm_xor_si128(c2[7874],_mm_xor_si128(c2[5887],_mm_xor_si128(c2[9843],_mm_xor_si128(c2[7891],_mm_xor_si128(c2[7236],c2[9216]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[300]=simde_mm_xor_si128(c2[5941],simde_mm_xor_si128(c2[1334],simde_mm_xor_si128(c2[9904],simde_mm_xor_si128(c2[1321],simde_mm_xor_si128(c2[8584],simde_mm_xor_si128(c2[3337],simde_mm_xor_si128(c2[3998],simde_mm_xor_si128(c2[5979],simde_mm_xor_si128(c2[61],simde_mm_xor_si128(c2[9307],simde_mm_xor_si128(c2[2047],simde_mm_xor_si128(c2[5371],simde_mm_xor_si128(c2[6691],simde_mm_xor_si128(c2[2733],simde_mm_xor_si128(c2[7361],simde_mm_xor_si128(c2[10026],simde_mm_xor_si128(c2[6062],simde_mm_xor_si128(c2[9366],simde_mm_xor_si128(c2[4123],simde_mm_xor_si128(c2[1471],simde_mm_xor_si128(c2[9397],simde_mm_xor_si128(c2[3493],simde_mm_xor_si128(c2[4800],simde_mm_xor_si128(c2[8770],simde_mm_xor_si128(c2[9463],simde_mm_xor_si128(c2[3514],simde_mm_xor_si128(c2[8134],simde_mm_xor_si128(c2[9483],simde_mm_xor_si128(c2[9493],simde_mm_xor_si128(c2[1574],simde_mm_xor_si128(c2[4901],simde_mm_xor_si128(c2[6881],simde_mm_xor_si128(c2[6879],simde_mm_xor_si128(c2[7542],simde_mm_xor_si128(c2[2287],simde_mm_xor_si128(c2[4272],simde_mm_xor_si128(c2[2288],simde_mm_xor_si128(c2[10235],simde_mm_xor_si128(c2[8918],simde_mm_xor_si128(c2[6936],simde_mm_xor_si128(c2[4960],simde_mm_xor_si128(c2[1681],simde_mm_xor_si128(c2[10264],simde_mm_xor_si128(c2[5649],simde_mm_xor_si128(c2[8982],simde_mm_xor_si128(c2[5678],simde_mm_xor_si128(c2[6999],simde_mm_xor_si128(c2[1094],simde_mm_xor_si128(c2[5054],simde_mm_xor_si128(c2[6363],simde_mm_xor_si128(c2[6391],simde_mm_xor_si128(c2[5083],simde_mm_xor_si128(c2[2440],simde_mm_xor_si128(c2[7746],simde_mm_xor_si128(c2[7742],simde_mm_xor_si128(c2[7085],simde_mm_xor_si128(c2[6463],simde_mm_xor_si128(c2[1844],simde_mm_xor_si128(c2[1840],simde_mm_xor_si128(c2[9786],simde_mm_xor_si128(c2[10440],simde_mm_xor_si128(c2[9127],simde_mm_xor_si128(c2[8503],simde_mm_xor_si128(c2[3873],simde_mm_xor_si128(c2[3882],simde_mm_xor_si128(c2[7874],simde_mm_xor_si128(c2[5887],simde_mm_xor_si128(c2[9843],simde_mm_xor_si128(c2[7891],simde_mm_xor_si128(c2[7236],c2[9216]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 21
-     d2[315]=_mm_xor_si128(c2[3342],_mm_xor_si128(c2[6757],_mm_xor_si128(c2[5113],_mm_xor_si128(c2[1264],c2[5923]))));
+     d2[315]=simde_mm_xor_si128(c2[3342],simde_mm_xor_si128(c2[6757],simde_mm_xor_si128(c2[5113],simde_mm_xor_si128(c2[1264],c2[5923]))));
 
 //row: 22
-     d2[330]=_mm_xor_si128(c2[5941],_mm_xor_si128(c2[10262],_mm_xor_si128(c2[9637],c2[1841])));
+     d2[330]=simde_mm_xor_si128(c2[5941],simde_mm_xor_si128(c2[10262],simde_mm_xor_si128(c2[9637],c2[1841])));
 
 //row: 23
-     d2[345]=_mm_xor_si128(c2[5981],_mm_xor_si128(c2[9967],_mm_xor_si128(c2[3607],c2[5172])));
+     d2[345]=simde_mm_xor_si128(c2[5981],simde_mm_xor_si128(c2[9967],simde_mm_xor_si128(c2[3607],c2[5172])));
 
 //row: 24
-     d2[360]=_mm_xor_si128(c2[672],_mm_xor_si128(c2[6609],_mm_xor_si128(c2[4620],_mm_xor_si128(c2[6611],_mm_xor_si128(c2[2],_mm_xor_si128(c2[8612],_mm_xor_si128(c2[9273],_mm_xor_si128(c2[695],_mm_xor_si128(c2[5351],_mm_xor_si128(c2[4023],_mm_xor_si128(c2[7322],_mm_xor_si128(c2[102],_mm_xor_si128(c2[1422],_mm_xor_si128(c2[8023],_mm_xor_si128(c2[1421],_mm_xor_si128(c2[4742],_mm_xor_si128(c2[793],_mm_xor_si128(c2[4082],_mm_xor_si128(c2[1440],_mm_xor_si128(c2[9398],_mm_xor_si128(c2[6761],_mm_xor_si128(c2[4113],_mm_xor_si128(c2[8768],_mm_xor_si128(c2[10090],_mm_xor_si128(c2[3486],_mm_xor_si128(c2[4179],_mm_xor_si128(c2[8804],_mm_xor_si128(c2[2850],_mm_xor_si128(c2[4214],_mm_xor_si128(c2[4209],_mm_xor_si128(c2[6849],_mm_xor_si128(c2[10176],_mm_xor_si128(c2[1597],_mm_xor_si128(c2[1595],_mm_xor_si128(c2[7562],_mm_xor_si128(c2[9547],_mm_xor_si128(c2[7563],_mm_xor_si128(c2[4951],_mm_xor_si128(c2[3634],_mm_xor_si128(c2[1652],_mm_xor_si128(c2[8259],_mm_xor_si128(c2[6971],_mm_xor_si128(c2[4980],_mm_xor_si128(c2[365],_mm_xor_si128(c2[3698],_mm_xor_si128(c2[394],_mm_xor_si128(c2[1715],_mm_xor_si128(c2[6369],_mm_xor_si128(c2[10329],_mm_xor_si128(c2[1094],_mm_xor_si128(c2[1122],_mm_xor_si128(c2[10358],_mm_xor_si128(c2[7715],_mm_xor_si128(c2[2462],_mm_xor_si128(c2[2473],_mm_xor_si128(c2[1801],_mm_xor_si128(c2[1179],_mm_xor_si128(c2[7119],_mm_xor_si128(c2[7115],_mm_xor_si128(c2[4502],_mm_xor_si128(c2[5171],_mm_xor_si128(c2[3843],_mm_xor_si128(c2[3219],_mm_xor_si128(c2[9163],_mm_xor_si128(c2[9157],_mm_xor_si128(c2[2590],_mm_xor_si128(c2[603],_mm_xor_si128(c2[4574],_mm_xor_si128(c2[2622],_mm_xor_si128(c2[1952],c2[3932]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[360]=simde_mm_xor_si128(c2[672],simde_mm_xor_si128(c2[6609],simde_mm_xor_si128(c2[4620],simde_mm_xor_si128(c2[6611],simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[8612],simde_mm_xor_si128(c2[9273],simde_mm_xor_si128(c2[695],simde_mm_xor_si128(c2[5351],simde_mm_xor_si128(c2[4023],simde_mm_xor_si128(c2[7322],simde_mm_xor_si128(c2[102],simde_mm_xor_si128(c2[1422],simde_mm_xor_si128(c2[8023],simde_mm_xor_si128(c2[1421],simde_mm_xor_si128(c2[4742],simde_mm_xor_si128(c2[793],simde_mm_xor_si128(c2[4082],simde_mm_xor_si128(c2[1440],simde_mm_xor_si128(c2[9398],simde_mm_xor_si128(c2[6761],simde_mm_xor_si128(c2[4113],simde_mm_xor_si128(c2[8768],simde_mm_xor_si128(c2[10090],simde_mm_xor_si128(c2[3486],simde_mm_xor_si128(c2[4179],simde_mm_xor_si128(c2[8804],simde_mm_xor_si128(c2[2850],simde_mm_xor_si128(c2[4214],simde_mm_xor_si128(c2[4209],simde_mm_xor_si128(c2[6849],simde_mm_xor_si128(c2[10176],simde_mm_xor_si128(c2[1597],simde_mm_xor_si128(c2[1595],simde_mm_xor_si128(c2[7562],simde_mm_xor_si128(c2[9547],simde_mm_xor_si128(c2[7563],simde_mm_xor_si128(c2[4951],simde_mm_xor_si128(c2[3634],simde_mm_xor_si128(c2[1652],simde_mm_xor_si128(c2[8259],simde_mm_xor_si128(c2[6971],simde_mm_xor_si128(c2[4980],simde_mm_xor_si128(c2[365],simde_mm_xor_si128(c2[3698],simde_mm_xor_si128(c2[394],simde_mm_xor_si128(c2[1715],simde_mm_xor_si128(c2[6369],simde_mm_xor_si128(c2[10329],simde_mm_xor_si128(c2[1094],simde_mm_xor_si128(c2[1122],simde_mm_xor_si128(c2[10358],simde_mm_xor_si128(c2[7715],simde_mm_xor_si128(c2[2462],simde_mm_xor_si128(c2[2473],simde_mm_xor_si128(c2[1801],simde_mm_xor_si128(c2[1179],simde_mm_xor_si128(c2[7119],simde_mm_xor_si128(c2[7115],simde_mm_xor_si128(c2[4502],simde_mm_xor_si128(c2[5171],simde_mm_xor_si128(c2[3843],simde_mm_xor_si128(c2[3219],simde_mm_xor_si128(c2[9163],simde_mm_xor_si128(c2[9157],simde_mm_xor_si128(c2[2590],simde_mm_xor_si128(c2[603],simde_mm_xor_si128(c2[4574],simde_mm_xor_si128(c2[2622],simde_mm_xor_si128(c2[1952],c2[3932]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 25
-     d2[375]=_mm_xor_si128(c2[7291],_mm_xor_si128(c2[8768],_mm_xor_si128(c2[7470],c2[3731])));
+     d2[375]=simde_mm_xor_si128(c2[7291],simde_mm_xor_si128(c2[8768],simde_mm_xor_si128(c2[7470],c2[3731])));
 
 //row: 26
-     d2[390]=_mm_xor_si128(c2[1990],_mm_xor_si128(c2[2048],_mm_xor_si128(c2[6070],c2[1776])));
+     d2[390]=simde_mm_xor_si128(c2[1990],simde_mm_xor_si128(c2[2048],simde_mm_xor_si128(c2[6070],c2[1776])));
 
 //row: 27
-     d2[405]=_mm_xor_si128(c2[3340],_mm_xor_si128(c2[5474],c2[6180]));
+     d2[405]=simde_mm_xor_si128(c2[3340],simde_mm_xor_si128(c2[5474],c2[6180]));
 
 //row: 28
-     d2[420]=_mm_xor_si128(c2[2],_mm_xor_si128(c2[7382],_mm_xor_si128(c2[5862],c2[9222])));
+     d2[420]=simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[7382],simde_mm_xor_si128(c2[5862],c2[9222])));
 
 //row: 29
-     d2[435]=_mm_xor_si128(c2[9914],_mm_xor_si128(c2[5292],_mm_xor_si128(c2[3303],_mm_xor_si128(c2[4634],_mm_xor_si128(c2[5294],_mm_xor_si128(c2[7295],_mm_xor_si128(c2[7956],_mm_xor_si128(c2[9277],_mm_xor_si128(c2[9937],_mm_xor_si128(c2[5324],_mm_xor_si128(c2[4034],_mm_xor_si128(c2[2706],_mm_xor_si128(c2[6005],_mm_xor_si128(c2[9344],_mm_xor_si128(c2[90],_mm_xor_si128(c2[6031],_mm_xor_si128(c2[6691],_mm_xor_si128(c2[3425],_mm_xor_si128(c2[10020],_mm_xor_si128(c2[2105],_mm_xor_si128(c2[2765],_mm_xor_si128(c2[8081],_mm_xor_si128(c2[5444],_mm_xor_si128(c2[2796],_mm_xor_si128(c2[7451],_mm_xor_si128(c2[8773],_mm_xor_si128(c2[1509],_mm_xor_si128(c2[2169],_mm_xor_si128(c2[2862],_mm_xor_si128(c2[7472],_mm_xor_si128(c2[873],_mm_xor_si128(c2[1533],_mm_xor_si128(c2[2882],_mm_xor_si128(c2[2892],_mm_xor_si128(c2[4872],_mm_xor_si128(c2[5532],_mm_xor_si128(c2[8859],_mm_xor_si128(c2[280],_mm_xor_si128(c2[278],_mm_xor_si128(c2[6245],_mm_xor_si128(c2[8230],_mm_xor_si128(c2[5586],_mm_xor_si128(c2[6246],_mm_xor_si128(c2[3634],_mm_xor_si128(c2[2317],_mm_xor_si128(c2[10234],_mm_xor_si128(c2[335],_mm_xor_si128(c2[5654],_mm_xor_si128(c2[3663],_mm_xor_si128(c2[8947],_mm_xor_si128(c2[9607],_mm_xor_si128(c2[2381],_mm_xor_si128(c2[9636],_mm_xor_si128(c2[10297],_mm_xor_si128(c2[398],_mm_xor_si128(c2[5052],_mm_xor_si128(c2[9012],_mm_xor_si128(c2[9661],_mm_xor_si128(c2[10321],_mm_xor_si128(c2[422],_mm_xor_si128(c2[10364],_mm_xor_si128(c2[9041],_mm_xor_si128(c2[6398],_mm_xor_si128(c2[1145],_mm_xor_si128(c2[1141],_mm_xor_si128(c2[10383],_mm_xor_si128(c2[484],_mm_xor_si128(c2[10421],_mm_xor_si128(c2[5802],_mm_xor_si128(c2[5138],_mm_xor_si128(c2[5798],_mm_xor_si128(c2[3185],_mm_xor_si128(c2[3854],_mm_xor_si128(c2[1866],_mm_xor_si128(c2[2526],_mm_xor_si128(c2[4507],_mm_xor_si128(c2[1902],_mm_xor_si128(c2[7831],_mm_xor_si128(c2[7840],_mm_xor_si128(c2[1273],_mm_xor_si128(c2[9845],_mm_xor_si128(c2[2582],_mm_xor_si128(c2[3242],_mm_xor_si128(c2[1290],_mm_xor_si128(c2[635],_mm_xor_si128(c2[1955],c2[2615]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[435]=simde_mm_xor_si128(c2[9914],simde_mm_xor_si128(c2[5292],simde_mm_xor_si128(c2[3303],simde_mm_xor_si128(c2[4634],simde_mm_xor_si128(c2[5294],simde_mm_xor_si128(c2[7295],simde_mm_xor_si128(c2[7956],simde_mm_xor_si128(c2[9277],simde_mm_xor_si128(c2[9937],simde_mm_xor_si128(c2[5324],simde_mm_xor_si128(c2[4034],simde_mm_xor_si128(c2[2706],simde_mm_xor_si128(c2[6005],simde_mm_xor_si128(c2[9344],simde_mm_xor_si128(c2[90],simde_mm_xor_si128(c2[6031],simde_mm_xor_si128(c2[6691],simde_mm_xor_si128(c2[3425],simde_mm_xor_si128(c2[10020],simde_mm_xor_si128(c2[2105],simde_mm_xor_si128(c2[2765],simde_mm_xor_si128(c2[8081],simde_mm_xor_si128(c2[5444],simde_mm_xor_si128(c2[2796],simde_mm_xor_si128(c2[7451],simde_mm_xor_si128(c2[8773],simde_mm_xor_si128(c2[1509],simde_mm_xor_si128(c2[2169],simde_mm_xor_si128(c2[2862],simde_mm_xor_si128(c2[7472],simde_mm_xor_si128(c2[873],simde_mm_xor_si128(c2[1533],simde_mm_xor_si128(c2[2882],simde_mm_xor_si128(c2[2892],simde_mm_xor_si128(c2[4872],simde_mm_xor_si128(c2[5532],simde_mm_xor_si128(c2[8859],simde_mm_xor_si128(c2[280],simde_mm_xor_si128(c2[278],simde_mm_xor_si128(c2[6245],simde_mm_xor_si128(c2[8230],simde_mm_xor_si128(c2[5586],simde_mm_xor_si128(c2[6246],simde_mm_xor_si128(c2[3634],simde_mm_xor_si128(c2[2317],simde_mm_xor_si128(c2[10234],simde_mm_xor_si128(c2[335],simde_mm_xor_si128(c2[5654],simde_mm_xor_si128(c2[3663],simde_mm_xor_si128(c2[8947],simde_mm_xor_si128(c2[9607],simde_mm_xor_si128(c2[2381],simde_mm_xor_si128(c2[9636],simde_mm_xor_si128(c2[10297],simde_mm_xor_si128(c2[398],simde_mm_xor_si128(c2[5052],simde_mm_xor_si128(c2[9012],simde_mm_xor_si128(c2[9661],simde_mm_xor_si128(c2[10321],simde_mm_xor_si128(c2[422],simde_mm_xor_si128(c2[10364],simde_mm_xor_si128(c2[9041],simde_mm_xor_si128(c2[6398],simde_mm_xor_si128(c2[1145],simde_mm_xor_si128(c2[1141],simde_mm_xor_si128(c2[10383],simde_mm_xor_si128(c2[484],simde_mm_xor_si128(c2[10421],simde_mm_xor_si128(c2[5802],simde_mm_xor_si128(c2[5138],simde_mm_xor_si128(c2[5798],simde_mm_xor_si128(c2[3185],simde_mm_xor_si128(c2[3854],simde_mm_xor_si128(c2[1866],simde_mm_xor_si128(c2[2526],simde_mm_xor_si128(c2[4507],simde_mm_xor_si128(c2[1902],simde_mm_xor_si128(c2[7831],simde_mm_xor_si128(c2[7840],simde_mm_xor_si128(c2[1273],simde_mm_xor_si128(c2[9845],simde_mm_xor_si128(c2[2582],simde_mm_xor_si128(c2[3242],simde_mm_xor_si128(c2[1290],simde_mm_xor_si128(c2[635],simde_mm_xor_si128(c2[1955],c2[2615]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 30
-     d2[450]=_mm_xor_si128(c2[660],_mm_xor_si128(c2[6612],_mm_xor_si128(c2[3963],_mm_xor_si128(c2[4623],_mm_xor_si128(c2[5954],_mm_xor_si128(c2[6614],_mm_xor_si128(c2[6610],_mm_xor_si128(c2[8615],_mm_xor_si128(c2[8616],_mm_xor_si128(c2[9276],_mm_xor_si128(c2[38],_mm_xor_si128(c2[698],_mm_xor_si128(c2[5354],_mm_xor_si128(c2[4026],_mm_xor_si128(c2[6665],_mm_xor_si128(c2[7325],_mm_xor_si128(c2[90],_mm_xor_si128(c2[1410],_mm_xor_si128(c2[7351],_mm_xor_si128(c2[8011],_mm_xor_si128(c2[4745],_mm_xor_si128(c2[121],_mm_xor_si128(c2[781],_mm_xor_si128(c2[3425],_mm_xor_si128(c2[4085],_mm_xor_si128(c2[9401],_mm_xor_si128(c2[6764],_mm_xor_si128(c2[3456],_mm_xor_si128(c2[4116],_mm_xor_si128(c2[8771],_mm_xor_si128(c2[9433],_mm_xor_si128(c2[10093],_mm_xor_si128(c2[2829],_mm_xor_si128(c2[3489],_mm_xor_si128(c2[4182],_mm_xor_si128(c2[8132],_mm_xor_si128(c2[8792],_mm_xor_si128(c2[2193],_mm_xor_si128(c2[2853],_mm_xor_si128(c2[4202],_mm_xor_si128(c2[3552],_mm_xor_si128(c2[4212],_mm_xor_si128(c2[6192],_mm_xor_si128(c2[6852],_mm_xor_si128(c2[10179],_mm_xor_si128(c2[1600],_mm_xor_si128(c2[938],_mm_xor_si128(c2[1598],_mm_xor_si128(c2[7565],_mm_xor_si128(c2[8890],_mm_xor_si128(c2[9550],_mm_xor_si128(c2[6906],_mm_xor_si128(c2[7566],_mm_xor_si128(c2[4932],_mm_xor_si128(c2[4954],_mm_xor_si128(c2[3637],_mm_xor_si128(c2[995],_mm_xor_si128(c2[1655],_mm_xor_si128(c2[6974],_mm_xor_si128(c2[4983],_mm_xor_si128(c2[10267],_mm_xor_si128(c2[368],_mm_xor_si128(c2[3701],_mm_xor_si128(c2[10296],_mm_xor_si128(c2[397],_mm_xor_si128(c2[1058],_mm_xor_si128(c2[1718],_mm_xor_si128(c2[6991],_mm_xor_si128(c2[6372],_mm_xor_si128(c2[9672],_mm_xor_si128(c2[10332],_mm_xor_si128(c2[422],_mm_xor_si128(c2[1082],_mm_xor_si128(c2[1110],_mm_xor_si128(c2[10361],_mm_xor_si128(c2[7058],_mm_xor_si128(c2[7718],_mm_xor_si128(c2[2465],_mm_xor_si128(c2[2461],_mm_xor_si128(c2[1144],_mm_xor_si128(c2[1804],_mm_xor_si128(c2[1182],_mm_xor_si128(c2[6462],_mm_xor_si128(c2[7122],_mm_xor_si128(c2[6458],_mm_xor_si128(c2[7118],_mm_xor_si128(c2[4505],_mm_xor_si128(c2[4514],_mm_xor_si128(c2[5174],_mm_xor_si128(c2[3186],_mm_xor_si128(c2[3846],_mm_xor_si128(c2[3222],_mm_xor_si128(c2[9151],_mm_xor_si128(c2[8500],_mm_xor_si128(c2[9160],_mm_xor_si128(c2[2593],_mm_xor_si128(c2[10505],_mm_xor_si128(c2[606],_mm_xor_si128(c2[3902],_mm_xor_si128(c2[4562],_mm_xor_si128(c2[2610],_mm_xor_si128(c2[1955],_mm_xor_si128(c2[3275],c2[3935])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[450]=simde_mm_xor_si128(c2[660],simde_mm_xor_si128(c2[6612],simde_mm_xor_si128(c2[3963],simde_mm_xor_si128(c2[4623],simde_mm_xor_si128(c2[5954],simde_mm_xor_si128(c2[6614],simde_mm_xor_si128(c2[6610],simde_mm_xor_si128(c2[8615],simde_mm_xor_si128(c2[8616],simde_mm_xor_si128(c2[9276],simde_mm_xor_si128(c2[38],simde_mm_xor_si128(c2[698],simde_mm_xor_si128(c2[5354],simde_mm_xor_si128(c2[4026],simde_mm_xor_si128(c2[6665],simde_mm_xor_si128(c2[7325],simde_mm_xor_si128(c2[90],simde_mm_xor_si128(c2[1410],simde_mm_xor_si128(c2[7351],simde_mm_xor_si128(c2[8011],simde_mm_xor_si128(c2[4745],simde_mm_xor_si128(c2[121],simde_mm_xor_si128(c2[781],simde_mm_xor_si128(c2[3425],simde_mm_xor_si128(c2[4085],simde_mm_xor_si128(c2[9401],simde_mm_xor_si128(c2[6764],simde_mm_xor_si128(c2[3456],simde_mm_xor_si128(c2[4116],simde_mm_xor_si128(c2[8771],simde_mm_xor_si128(c2[9433],simde_mm_xor_si128(c2[10093],simde_mm_xor_si128(c2[2829],simde_mm_xor_si128(c2[3489],simde_mm_xor_si128(c2[4182],simde_mm_xor_si128(c2[8132],simde_mm_xor_si128(c2[8792],simde_mm_xor_si128(c2[2193],simde_mm_xor_si128(c2[2853],simde_mm_xor_si128(c2[4202],simde_mm_xor_si128(c2[3552],simde_mm_xor_si128(c2[4212],simde_mm_xor_si128(c2[6192],simde_mm_xor_si128(c2[6852],simde_mm_xor_si128(c2[10179],simde_mm_xor_si128(c2[1600],simde_mm_xor_si128(c2[938],simde_mm_xor_si128(c2[1598],simde_mm_xor_si128(c2[7565],simde_mm_xor_si128(c2[8890],simde_mm_xor_si128(c2[9550],simde_mm_xor_si128(c2[6906],simde_mm_xor_si128(c2[7566],simde_mm_xor_si128(c2[4932],simde_mm_xor_si128(c2[4954],simde_mm_xor_si128(c2[3637],simde_mm_xor_si128(c2[995],simde_mm_xor_si128(c2[1655],simde_mm_xor_si128(c2[6974],simde_mm_xor_si128(c2[4983],simde_mm_xor_si128(c2[10267],simde_mm_xor_si128(c2[368],simde_mm_xor_si128(c2[3701],simde_mm_xor_si128(c2[10296],simde_mm_xor_si128(c2[397],simde_mm_xor_si128(c2[1058],simde_mm_xor_si128(c2[1718],simde_mm_xor_si128(c2[6991],simde_mm_xor_si128(c2[6372],simde_mm_xor_si128(c2[9672],simde_mm_xor_si128(c2[10332],simde_mm_xor_si128(c2[422],simde_mm_xor_si128(c2[1082],simde_mm_xor_si128(c2[1110],simde_mm_xor_si128(c2[10361],simde_mm_xor_si128(c2[7058],simde_mm_xor_si128(c2[7718],simde_mm_xor_si128(c2[2465],simde_mm_xor_si128(c2[2461],simde_mm_xor_si128(c2[1144],simde_mm_xor_si128(c2[1804],simde_mm_xor_si128(c2[1182],simde_mm_xor_si128(c2[6462],simde_mm_xor_si128(c2[7122],simde_mm_xor_si128(c2[6458],simde_mm_xor_si128(c2[7118],simde_mm_xor_si128(c2[4505],simde_mm_xor_si128(c2[4514],simde_mm_xor_si128(c2[5174],simde_mm_xor_si128(c2[3186],simde_mm_xor_si128(c2[3846],simde_mm_xor_si128(c2[3222],simde_mm_xor_si128(c2[9151],simde_mm_xor_si128(c2[8500],simde_mm_xor_si128(c2[9160],simde_mm_xor_si128(c2[2593],simde_mm_xor_si128(c2[10505],simde_mm_xor_si128(c2[606],simde_mm_xor_si128(c2[3902],simde_mm_xor_si128(c2[4562],simde_mm_xor_si128(c2[2610],simde_mm_xor_si128(c2[1955],simde_mm_xor_si128(c2[3275],c2[3935])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 31
-     d2[465]=_mm_xor_si128(c2[3964],_mm_xor_si128(c2[2640],_mm_xor_si128(c2[9901],_mm_xor_si128(c2[8592],_mm_xor_si128(c2[7927],_mm_xor_si128(c2[6603],_mm_xor_si128(c2[9903],_mm_xor_si128(c2[7934],_mm_xor_si128(c2[8594],_mm_xor_si128(c2[1360],_mm_xor_si128(c2[36],_mm_xor_si128(c2[2021],_mm_xor_si128(c2[697],_mm_xor_si128(c2[4002],_mm_xor_si128(c2[2018],_mm_xor_si128(c2[2678],_mm_xor_si128(c2[5974],_mm_xor_si128(c2[8643],_mm_xor_si128(c2[7334],_mm_xor_si128(c2[7330],_mm_xor_si128(c2[6006],_mm_xor_si128(c2[70],_mm_xor_si128(c2[9305],_mm_xor_si128(c2[3394],_mm_xor_si128(c2[2070],_mm_xor_si128(c2[4714],_mm_xor_si128(c2[3390],_mm_xor_si128(c2[756],_mm_xor_si128(c2[9331],_mm_xor_si128(c2[9991],_mm_xor_si128(c2[8049],_mm_xor_si128(c2[6725],_mm_xor_si128(c2[4085],_mm_xor_si128(c2[2761],_mm_xor_si128(c2[7389],_mm_xor_si128(c2[5405],_mm_xor_si128(c2[6065],_mm_xor_si128(c2[2131],_mm_xor_si128(c2[822],_mm_xor_si128(c2[10053],_mm_xor_si128(c2[8744],_mm_xor_si128(c2[7420],_mm_xor_si128(c2[6096],_mm_xor_si128(c2[1501],_mm_xor_si128(c2[192],_mm_xor_si128(c2[2823],_mm_xor_si128(c2[1514],_mm_xor_si128(c2[6793],_mm_xor_si128(c2[4809],_mm_xor_si128(c2[5469],_mm_xor_si128(c2[7471],_mm_xor_si128(c2[6162],_mm_xor_si128(c2[1537],_mm_xor_si128(c2[213],_mm_xor_si128(c2[6157],_mm_xor_si128(c2[4173],_mm_xor_si128(c2[4833],_mm_xor_si128(c2[3519],_mm_xor_si128(c2[7506],_mm_xor_si128(c2[6182],_mm_xor_si128(c2[7501],_mm_xor_si128(c2[6192],_mm_xor_si128(c2[10141],_mm_xor_si128(c2[8172],_mm_xor_si128(c2[8832],_mm_xor_si128(c2[2924],_mm_xor_si128(c2[1600],_mm_xor_si128(c2[4904],_mm_xor_si128(c2[3580],_mm_xor_si128(c2[4902],_mm_xor_si128(c2[3578],_mm_xor_si128(c2[310],_mm_xor_si128(c2[9545],_mm_xor_si128(c2[2280],_mm_xor_si128(c2[971],_mm_xor_si128(c2[311],_mm_xor_si128(c2[8886],_mm_xor_si128(c2[9546],_mm_xor_si128(c2[8258],_mm_xor_si128(c2[6934],_mm_xor_si128(c2[6941],_mm_xor_si128(c2[5617],_mm_xor_si128(c2[4959],_mm_xor_si128(c2[2975],_mm_xor_si128(c2[3635],_mm_xor_si128(c2[10263],_mm_xor_si128(c2[8954],_mm_xor_si128(c2[8287],_mm_xor_si128(c2[6963],_mm_xor_si128(c2[3672],_mm_xor_si128(c2[1688],_mm_xor_si128(c2[2348],_mm_xor_si128(c2[6990],_mm_xor_si128(c2[5681],_mm_xor_si128(c2[3701],_mm_xor_si128(c2[2377],_mm_xor_si128(c2[5022],_mm_xor_si128(c2[3038],_mm_xor_si128(c2[3698],_mm_xor_si128(c2[9661],_mm_xor_si128(c2[8352],_mm_xor_si128(c2[3062],_mm_xor_si128(c2[1753],_mm_xor_si128(c2[4386],_mm_xor_si128(c2[2402],_mm_xor_si128(c2[3062],_mm_xor_si128(c2[4414],_mm_xor_si128(c2[3090],_mm_xor_si128(c2[3091],_mm_xor_si128(c2[1782],_mm_xor_si128(c2[463],_mm_xor_si128(c2[9698],_mm_xor_si128(c2[5769],_mm_xor_si128(c2[4445],_mm_xor_si128(c2[5765],_mm_xor_si128(c2[4441],_mm_xor_si128(c2[5108],_mm_xor_si128(c2[3124],_mm_xor_si128(c2[3784],_mm_xor_si128(c2[4471],_mm_xor_si128(c2[3162],_mm_xor_si128(c2[10411],_mm_xor_si128(c2[9102],_mm_xor_si128(c2[10422],_mm_xor_si128(c2[8438],_mm_xor_si128(c2[9098],_mm_xor_si128(c2[7809],_mm_xor_si128(c2[6485],_mm_xor_si128(c2[8463],_mm_xor_si128(c2[7154],_mm_xor_si128(c2[7150],_mm_xor_si128(c2[5166],_mm_xor_si128(c2[5826],_mm_xor_si128(c2[6511],_mm_xor_si128(c2[5202],_mm_xor_si128(c2[1896],_mm_xor_si128(c2[572],_mm_xor_si128(c2[1890],_mm_xor_si128(c2[581],_mm_xor_si128(c2[5882],_mm_xor_si128(c2[4573],_mm_xor_si128(c2[3910],_mm_xor_si128(c2[2586],_mm_xor_si128(c2[7866],_mm_xor_si128(c2[5882],_mm_xor_si128(c2[6542],_mm_xor_si128(c2[5914],_mm_xor_si128(c2[4590],_mm_xor_si128(c2[5259],_mm_xor_si128(c2[3935],_mm_xor_si128(c2[7239],_mm_xor_si128(c2[5255],c2[5915]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[465]=simde_mm_xor_si128(c2[3964],simde_mm_xor_si128(c2[2640],simde_mm_xor_si128(c2[9901],simde_mm_xor_si128(c2[8592],simde_mm_xor_si128(c2[7927],simde_mm_xor_si128(c2[6603],simde_mm_xor_si128(c2[9903],simde_mm_xor_si128(c2[7934],simde_mm_xor_si128(c2[8594],simde_mm_xor_si128(c2[1360],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[2021],simde_mm_xor_si128(c2[697],simde_mm_xor_si128(c2[4002],simde_mm_xor_si128(c2[2018],simde_mm_xor_si128(c2[2678],simde_mm_xor_si128(c2[5974],simde_mm_xor_si128(c2[8643],simde_mm_xor_si128(c2[7334],simde_mm_xor_si128(c2[7330],simde_mm_xor_si128(c2[6006],simde_mm_xor_si128(c2[70],simde_mm_xor_si128(c2[9305],simde_mm_xor_si128(c2[3394],simde_mm_xor_si128(c2[2070],simde_mm_xor_si128(c2[4714],simde_mm_xor_si128(c2[3390],simde_mm_xor_si128(c2[756],simde_mm_xor_si128(c2[9331],simde_mm_xor_si128(c2[9991],simde_mm_xor_si128(c2[8049],simde_mm_xor_si128(c2[6725],simde_mm_xor_si128(c2[4085],simde_mm_xor_si128(c2[2761],simde_mm_xor_si128(c2[7389],simde_mm_xor_si128(c2[5405],simde_mm_xor_si128(c2[6065],simde_mm_xor_si128(c2[2131],simde_mm_xor_si128(c2[822],simde_mm_xor_si128(c2[10053],simde_mm_xor_si128(c2[8744],simde_mm_xor_si128(c2[7420],simde_mm_xor_si128(c2[6096],simde_mm_xor_si128(c2[1501],simde_mm_xor_si128(c2[192],simde_mm_xor_si128(c2[2823],simde_mm_xor_si128(c2[1514],simde_mm_xor_si128(c2[6793],simde_mm_xor_si128(c2[4809],simde_mm_xor_si128(c2[5469],simde_mm_xor_si128(c2[7471],simde_mm_xor_si128(c2[6162],simde_mm_xor_si128(c2[1537],simde_mm_xor_si128(c2[213],simde_mm_xor_si128(c2[6157],simde_mm_xor_si128(c2[4173],simde_mm_xor_si128(c2[4833],simde_mm_xor_si128(c2[3519],simde_mm_xor_si128(c2[7506],simde_mm_xor_si128(c2[6182],simde_mm_xor_si128(c2[7501],simde_mm_xor_si128(c2[6192],simde_mm_xor_si128(c2[10141],simde_mm_xor_si128(c2[8172],simde_mm_xor_si128(c2[8832],simde_mm_xor_si128(c2[2924],simde_mm_xor_si128(c2[1600],simde_mm_xor_si128(c2[4904],simde_mm_xor_si128(c2[3580],simde_mm_xor_si128(c2[4902],simde_mm_xor_si128(c2[3578],simde_mm_xor_si128(c2[310],simde_mm_xor_si128(c2[9545],simde_mm_xor_si128(c2[2280],simde_mm_xor_si128(c2[971],simde_mm_xor_si128(c2[311],simde_mm_xor_si128(c2[8886],simde_mm_xor_si128(c2[9546],simde_mm_xor_si128(c2[8258],simde_mm_xor_si128(c2[6934],simde_mm_xor_si128(c2[6941],simde_mm_xor_si128(c2[5617],simde_mm_xor_si128(c2[4959],simde_mm_xor_si128(c2[2975],simde_mm_xor_si128(c2[3635],simde_mm_xor_si128(c2[10263],simde_mm_xor_si128(c2[8954],simde_mm_xor_si128(c2[8287],simde_mm_xor_si128(c2[6963],simde_mm_xor_si128(c2[3672],simde_mm_xor_si128(c2[1688],simde_mm_xor_si128(c2[2348],simde_mm_xor_si128(c2[6990],simde_mm_xor_si128(c2[5681],simde_mm_xor_si128(c2[3701],simde_mm_xor_si128(c2[2377],simde_mm_xor_si128(c2[5022],simde_mm_xor_si128(c2[3038],simde_mm_xor_si128(c2[3698],simde_mm_xor_si128(c2[9661],simde_mm_xor_si128(c2[8352],simde_mm_xor_si128(c2[3062],simde_mm_xor_si128(c2[1753],simde_mm_xor_si128(c2[4386],simde_mm_xor_si128(c2[2402],simde_mm_xor_si128(c2[3062],simde_mm_xor_si128(c2[4414],simde_mm_xor_si128(c2[3090],simde_mm_xor_si128(c2[3091],simde_mm_xor_si128(c2[1782],simde_mm_xor_si128(c2[463],simde_mm_xor_si128(c2[9698],simde_mm_xor_si128(c2[5769],simde_mm_xor_si128(c2[4445],simde_mm_xor_si128(c2[5765],simde_mm_xor_si128(c2[4441],simde_mm_xor_si128(c2[5108],simde_mm_xor_si128(c2[3124],simde_mm_xor_si128(c2[3784],simde_mm_xor_si128(c2[4471],simde_mm_xor_si128(c2[3162],simde_mm_xor_si128(c2[10411],simde_mm_xor_si128(c2[9102],simde_mm_xor_si128(c2[10422],simde_mm_xor_si128(c2[8438],simde_mm_xor_si128(c2[9098],simde_mm_xor_si128(c2[7809],simde_mm_xor_si128(c2[6485],simde_mm_xor_si128(c2[8463],simde_mm_xor_si128(c2[7154],simde_mm_xor_si128(c2[7150],simde_mm_xor_si128(c2[5166],simde_mm_xor_si128(c2[5826],simde_mm_xor_si128(c2[6511],simde_mm_xor_si128(c2[5202],simde_mm_xor_si128(c2[1896],simde_mm_xor_si128(c2[572],simde_mm_xor_si128(c2[1890],simde_mm_xor_si128(c2[581],simde_mm_xor_si128(c2[5882],simde_mm_xor_si128(c2[4573],simde_mm_xor_si128(c2[3910],simde_mm_xor_si128(c2[2586],simde_mm_xor_si128(c2[7866],simde_mm_xor_si128(c2[5882],simde_mm_xor_si128(c2[6542],simde_mm_xor_si128(c2[5914],simde_mm_xor_si128(c2[4590],simde_mm_xor_si128(c2[5259],simde_mm_xor_si128(c2[3935],simde_mm_xor_si128(c2[7239],simde_mm_xor_si128(c2[5255],c2[5915]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 32
-     d2[480]=_mm_xor_si128(c2[7266],_mm_xor_si128(c2[2644],_mm_xor_si128(c2[10],_mm_xor_si128(c2[670],_mm_xor_si128(c2[1986],_mm_xor_si128(c2[2646],_mm_xor_si128(c2[4626],_mm_xor_si128(c2[4662],_mm_xor_si128(c2[4663],_mm_xor_si128(c2[5323],_mm_xor_si128(c2[6644],_mm_xor_si128(c2[7304],_mm_xor_si128(c2[1386],_mm_xor_si128(c2[73],_mm_xor_si128(c2[2712],_mm_xor_si128(c2[3372],_mm_xor_si128(c2[6696],_mm_xor_si128(c2[8016],_mm_xor_si128(c2[3398],_mm_xor_si128(c2[4058],_mm_xor_si128(c2[792],_mm_xor_si128(c2[6727],_mm_xor_si128(c2[7387],_mm_xor_si128(c2[10031],_mm_xor_si128(c2[132],_mm_xor_si128(c2[5433],_mm_xor_si128(c2[2796],_mm_xor_si128(c2[10062],_mm_xor_si128(c2[163],_mm_xor_si128(c2[4803],_mm_xor_si128(c2[5465],_mm_xor_si128(c2[6125],_mm_xor_si128(c2[9420],_mm_xor_si128(c2[10080],_mm_xor_si128(c2[214],_mm_xor_si128(c2[4179],_mm_xor_si128(c2[4839],_mm_xor_si128(c2[8799],_mm_xor_si128(c2[9459],_mm_xor_si128(c2[249],_mm_xor_si128(c2[10143],_mm_xor_si128(c2[244],_mm_xor_si128(c2[2224],_mm_xor_si128(c2[2884],_mm_xor_si128(c2[6211],_mm_xor_si128(c2[8191],_mm_xor_si128(c2[7544],_mm_xor_si128(c2[8204],_mm_xor_si128(c2[3612],_mm_xor_si128(c2[4922],_mm_xor_si128(c2[5582],_mm_xor_si128(c2[2953],_mm_xor_si128(c2[3613],_mm_xor_si128(c2[1001],_mm_xor_si128(c2[10243],_mm_xor_si128(c2[7601],_mm_xor_si128(c2[8261],_mm_xor_si128(c2[3006],_mm_xor_si128(c2[1030],_mm_xor_si128(c2[6314],_mm_xor_si128(c2[6974],_mm_xor_si128(c2[9606],_mm_xor_si128(c2[10292],_mm_xor_si128(c2[6343],_mm_xor_si128(c2[7003],_mm_xor_si128(c2[7664],_mm_xor_si128(c2[8324],_mm_xor_si128(c2[2404],_mm_xor_si128(c2[5704],_mm_xor_si128(c2[6364],_mm_xor_si128(c2[7028],_mm_xor_si128(c2[7688],_mm_xor_si128(c2[5049],_mm_xor_si128(c2[7716],_mm_xor_si128(c2[6393],_mm_xor_si128(c2[3090],_mm_xor_si128(c2[3750],_mm_xor_si128(c2[9071],_mm_xor_si128(c2[9067],_mm_xor_si128(c2[7750],_mm_xor_si128(c2[8410],_mm_xor_si128(c2[7773],_mm_xor_si128(c2[2494],_mm_xor_si128(c2[3154],_mm_xor_si128(c2[2490],_mm_xor_si128(c2[3150],_mm_xor_si128(c2[552],_mm_xor_si128(c2[546],_mm_xor_si128(c2[1206],_mm_xor_si128(c2[9792],_mm_xor_si128(c2[10452],_mm_xor_si128(c2[9813],_mm_xor_si128(c2[5198],_mm_xor_si128(c2[4532],_mm_xor_si128(c2[5192],_mm_xor_si128(c2[9184],_mm_xor_si128(c2[6552],_mm_xor_si128(c2[7212],_mm_xor_si128(c2[10508],_mm_xor_si128(c2[609],_mm_xor_si128(c2[9216],_mm_xor_si128(c2[8561],_mm_xor_si128(c2[9881],c2[10541])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[480]=simde_mm_xor_si128(c2[7266],simde_mm_xor_si128(c2[2644],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[670],simde_mm_xor_si128(c2[1986],simde_mm_xor_si128(c2[2646],simde_mm_xor_si128(c2[4626],simde_mm_xor_si128(c2[4662],simde_mm_xor_si128(c2[4663],simde_mm_xor_si128(c2[5323],simde_mm_xor_si128(c2[6644],simde_mm_xor_si128(c2[7304],simde_mm_xor_si128(c2[1386],simde_mm_xor_si128(c2[73],simde_mm_xor_si128(c2[2712],simde_mm_xor_si128(c2[3372],simde_mm_xor_si128(c2[6696],simde_mm_xor_si128(c2[8016],simde_mm_xor_si128(c2[3398],simde_mm_xor_si128(c2[4058],simde_mm_xor_si128(c2[792],simde_mm_xor_si128(c2[6727],simde_mm_xor_si128(c2[7387],simde_mm_xor_si128(c2[10031],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[5433],simde_mm_xor_si128(c2[2796],simde_mm_xor_si128(c2[10062],simde_mm_xor_si128(c2[163],simde_mm_xor_si128(c2[4803],simde_mm_xor_si128(c2[5465],simde_mm_xor_si128(c2[6125],simde_mm_xor_si128(c2[9420],simde_mm_xor_si128(c2[10080],simde_mm_xor_si128(c2[214],simde_mm_xor_si128(c2[4179],simde_mm_xor_si128(c2[4839],simde_mm_xor_si128(c2[8799],simde_mm_xor_si128(c2[9459],simde_mm_xor_si128(c2[249],simde_mm_xor_si128(c2[10143],simde_mm_xor_si128(c2[244],simde_mm_xor_si128(c2[2224],simde_mm_xor_si128(c2[2884],simde_mm_xor_si128(c2[6211],simde_mm_xor_si128(c2[8191],simde_mm_xor_si128(c2[7544],simde_mm_xor_si128(c2[8204],simde_mm_xor_si128(c2[3612],simde_mm_xor_si128(c2[4922],simde_mm_xor_si128(c2[5582],simde_mm_xor_si128(c2[2953],simde_mm_xor_si128(c2[3613],simde_mm_xor_si128(c2[1001],simde_mm_xor_si128(c2[10243],simde_mm_xor_si128(c2[7601],simde_mm_xor_si128(c2[8261],simde_mm_xor_si128(c2[3006],simde_mm_xor_si128(c2[1030],simde_mm_xor_si128(c2[6314],simde_mm_xor_si128(c2[6974],simde_mm_xor_si128(c2[9606],simde_mm_xor_si128(c2[10292],simde_mm_xor_si128(c2[6343],simde_mm_xor_si128(c2[7003],simde_mm_xor_si128(c2[7664],simde_mm_xor_si128(c2[8324],simde_mm_xor_si128(c2[2404],simde_mm_xor_si128(c2[5704],simde_mm_xor_si128(c2[6364],simde_mm_xor_si128(c2[7028],simde_mm_xor_si128(c2[7688],simde_mm_xor_si128(c2[5049],simde_mm_xor_si128(c2[7716],simde_mm_xor_si128(c2[6393],simde_mm_xor_si128(c2[3090],simde_mm_xor_si128(c2[3750],simde_mm_xor_si128(c2[9071],simde_mm_xor_si128(c2[9067],simde_mm_xor_si128(c2[7750],simde_mm_xor_si128(c2[8410],simde_mm_xor_si128(c2[7773],simde_mm_xor_si128(c2[2494],simde_mm_xor_si128(c2[3154],simde_mm_xor_si128(c2[2490],simde_mm_xor_si128(c2[3150],simde_mm_xor_si128(c2[552],simde_mm_xor_si128(c2[546],simde_mm_xor_si128(c2[1206],simde_mm_xor_si128(c2[9792],simde_mm_xor_si128(c2[10452],simde_mm_xor_si128(c2[9813],simde_mm_xor_si128(c2[5198],simde_mm_xor_si128(c2[4532],simde_mm_xor_si128(c2[5192],simde_mm_xor_si128(c2[9184],simde_mm_xor_si128(c2[6552],simde_mm_xor_si128(c2[7212],simde_mm_xor_si128(c2[10508],simde_mm_xor_si128(c2[609],simde_mm_xor_si128(c2[9216],simde_mm_xor_si128(c2[8561],simde_mm_xor_si128(c2[9881],c2[10541])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 33
-     d2[495]=_mm_xor_si128(c2[4662],_mm_xor_si128(c2[2708],_mm_xor_si128(c2[8260],c2[1294])));
+     d2[495]=simde_mm_xor_si128(c2[4662],simde_mm_xor_si128(c2[2708],simde_mm_xor_si128(c2[8260],c2[1294])));
 
 //row: 34
-     d2[510]=_mm_xor_si128(c2[670],_mm_xor_si128(c2[8804],_mm_xor_si128(c2[9698],c2[3161])));
+     d2[510]=simde_mm_xor_si128(c2[670],simde_mm_xor_si128(c2[8804],simde_mm_xor_si128(c2[9698],c2[3161])));
 
 //row: 35
-     d2[525]=_mm_xor_si128(c2[7274],_mm_xor_si128(c2[2652],_mm_xor_si128(c2[663],_mm_xor_si128(c2[2654],_mm_xor_si128(c2[4655],_mm_xor_si128(c2[5316],_mm_xor_si128(c2[7297],_mm_xor_si128(c2[4664],_mm_xor_si128(c2[1394],_mm_xor_si128(c2[66],_mm_xor_si128(c2[3365],_mm_xor_si128(c2[6704],_mm_xor_si128(c2[8024],_mm_xor_si128(c2[4051],_mm_xor_si128(c2[785],_mm_xor_si128(c2[7380],_mm_xor_si128(c2[125],_mm_xor_si128(c2[5441],_mm_xor_si128(c2[2804],_mm_xor_si128(c2[156],_mm_xor_si128(c2[4811],_mm_xor_si128(c2[6133],_mm_xor_si128(c2[10088],_mm_xor_si128(c2[9430],_mm_xor_si128(c2[222],_mm_xor_si128(c2[4832],_mm_xor_si128(c2[9452],_mm_xor_si128(c2[242],_mm_xor_si128(c2[252],_mm_xor_si128(c2[2892],_mm_xor_si128(c2[6219],_mm_xor_si128(c2[8199],_mm_xor_si128(c2[8197],_mm_xor_si128(c2[3605],_mm_xor_si128(c2[5590],_mm_xor_si128(c2[3606],_mm_xor_si128(c2[994],_mm_xor_si128(c2[10236],_mm_xor_si128(c2[8254],_mm_xor_si128(c2[3014],_mm_xor_si128(c2[1023],_mm_xor_si128(c2[6967],_mm_xor_si128(c2[1029],_mm_xor_si128(c2[10300],_mm_xor_si128(c2[6996],_mm_xor_si128(c2[8317],_mm_xor_si128(c2[2412],_mm_xor_si128(c2[6372],_mm_xor_si128(c2[7681],_mm_xor_si128(c2[7724],_mm_xor_si128(c2[6401],_mm_xor_si128(c2[3758],_mm_xor_si128(c2[9064],_mm_xor_si128(c2[9060],_mm_xor_si128(c2[8403],_mm_xor_si128(c2[7781],_mm_xor_si128(c2[3162],_mm_xor_si128(c2[3158],_mm_xor_si128(c2[545],_mm_xor_si128(c2[1214],_mm_xor_si128(c2[10445],_mm_xor_si128(c2[9821],_mm_xor_si128(c2[5191],_mm_xor_si128(c2[5200],_mm_xor_si128(c2[9192],_mm_xor_si128(c2[7205],_mm_xor_si128(c2[602],_mm_xor_si128(c2[9224],_mm_xor_si128(c2[8554],c2[10534])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[525]=simde_mm_xor_si128(c2[7274],simde_mm_xor_si128(c2[2652],simde_mm_xor_si128(c2[663],simde_mm_xor_si128(c2[2654],simde_mm_xor_si128(c2[4655],simde_mm_xor_si128(c2[5316],simde_mm_xor_si128(c2[7297],simde_mm_xor_si128(c2[4664],simde_mm_xor_si128(c2[1394],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[3365],simde_mm_xor_si128(c2[6704],simde_mm_xor_si128(c2[8024],simde_mm_xor_si128(c2[4051],simde_mm_xor_si128(c2[785],simde_mm_xor_si128(c2[7380],simde_mm_xor_si128(c2[125],simde_mm_xor_si128(c2[5441],simde_mm_xor_si128(c2[2804],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[4811],simde_mm_xor_si128(c2[6133],simde_mm_xor_si128(c2[10088],simde_mm_xor_si128(c2[9430],simde_mm_xor_si128(c2[222],simde_mm_xor_si128(c2[4832],simde_mm_xor_si128(c2[9452],simde_mm_xor_si128(c2[242],simde_mm_xor_si128(c2[252],simde_mm_xor_si128(c2[2892],simde_mm_xor_si128(c2[6219],simde_mm_xor_si128(c2[8199],simde_mm_xor_si128(c2[8197],simde_mm_xor_si128(c2[3605],simde_mm_xor_si128(c2[5590],simde_mm_xor_si128(c2[3606],simde_mm_xor_si128(c2[994],simde_mm_xor_si128(c2[10236],simde_mm_xor_si128(c2[8254],simde_mm_xor_si128(c2[3014],simde_mm_xor_si128(c2[1023],simde_mm_xor_si128(c2[6967],simde_mm_xor_si128(c2[1029],simde_mm_xor_si128(c2[10300],simde_mm_xor_si128(c2[6996],simde_mm_xor_si128(c2[8317],simde_mm_xor_si128(c2[2412],simde_mm_xor_si128(c2[6372],simde_mm_xor_si128(c2[7681],simde_mm_xor_si128(c2[7724],simde_mm_xor_si128(c2[6401],simde_mm_xor_si128(c2[3758],simde_mm_xor_si128(c2[9064],simde_mm_xor_si128(c2[9060],simde_mm_xor_si128(c2[8403],simde_mm_xor_si128(c2[7781],simde_mm_xor_si128(c2[3162],simde_mm_xor_si128(c2[3158],simde_mm_xor_si128(c2[545],simde_mm_xor_si128(c2[1214],simde_mm_xor_si128(c2[10445],simde_mm_xor_si128(c2[9821],simde_mm_xor_si128(c2[5191],simde_mm_xor_si128(c2[5200],simde_mm_xor_si128(c2[9192],simde_mm_xor_si128(c2[7205],simde_mm_xor_si128(c2[602],simde_mm_xor_si128(c2[9224],simde_mm_xor_si128(c2[8554],c2[10534])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 36
-     d2[540]=_mm_xor_si128(c2[7260],_mm_xor_si128(c2[10332],_mm_xor_si128(c2[7052],c2[3186])));
+     d2[540]=simde_mm_xor_si128(c2[7260],simde_mm_xor_si128(c2[10332],simde_mm_xor_si128(c2[7052],c2[3186])));
 
 //row: 37
-     d2[555]=_mm_xor_si128(c2[5283],_mm_xor_si128(c2[5943],_mm_xor_si128(c2[1321],_mm_xor_si128(c2[9906],_mm_xor_si128(c2[1323],_mm_xor_si128(c2[2679],_mm_xor_si128(c2[3339],_mm_xor_si128(c2[4000],_mm_xor_si128(c2[5981],_mm_xor_si128(c2[7293],_mm_xor_si128(c2[9962],_mm_xor_si128(c2[63],_mm_xor_si128(c2[9309],_mm_xor_si128(c2[2049],_mm_xor_si128(c2[4713],_mm_xor_si128(c2[5373],_mm_xor_si128(c2[6693],_mm_xor_si128(c2[2735],_mm_xor_si128(c2[10028],_mm_xor_si128(c2[6064],_mm_xor_si128(c2[9368],_mm_xor_si128(c2[3450],_mm_xor_si128(c2[4110],_mm_xor_si128(c2[1473],_mm_xor_si128(c2[9399],_mm_xor_si128(c2[2820],_mm_xor_si128(c2[3480],_mm_xor_si128(c2[4802],_mm_xor_si128(c2[8772],_mm_xor_si128(c2[9450],_mm_xor_si128(c2[3516],_mm_xor_si128(c2[8136],_mm_xor_si128(c2[9485],_mm_xor_si128(c2[9480],_mm_xor_si128(c2[1561],_mm_xor_si128(c2[4243],_mm_xor_si128(c2[4903],_mm_xor_si128(c2[6883],_mm_xor_si128(c2[6881],_mm_xor_si128(c2[1629],_mm_xor_si128(c2[2289],_mm_xor_si128(c2[4274],_mm_xor_si128(c2[2290],_mm_xor_si128(c2[9577],_mm_xor_si128(c2[10237],_mm_xor_si128(c2[8920],_mm_xor_si128(c2[6938],_mm_xor_si128(c2[1023],_mm_xor_si128(c2[1683],_mm_xor_si128(c2[10266],_mm_xor_si128(c2[5651],_mm_xor_si128(c2[8324],_mm_xor_si128(c2[8984],_mm_xor_si128(c2[5680],_mm_xor_si128(c2[7001],_mm_xor_si128(c2[8322],_mm_xor_si128(c2[1081],_mm_xor_si128(c2[5041],_mm_xor_si128(c2[6365],_mm_xor_si128(c2[5733],_mm_xor_si128(c2[6393],_mm_xor_si128(c2[5070],_mm_xor_si128(c2[2442],_mm_xor_si128(c2[7088],_mm_xor_si128(c2[7748],_mm_xor_si128(c2[7744],_mm_xor_si128(c2[7087],_mm_xor_si128(c2[6450],_mm_xor_si128(c2[1831],_mm_xor_si128(c2[1842],_mm_xor_si128(c2[9128],_mm_xor_si128(c2[9788],_mm_xor_si128(c2[10442],_mm_xor_si128(c2[9129],_mm_xor_si128(c2[7830],_mm_xor_si128(c2[8490],_mm_xor_si128(c2[3875],_mm_xor_si128(c2[3884],_mm_xor_si128(c2[7201],_mm_xor_si128(c2[7861],_mm_xor_si128(c2[5889],_mm_xor_si128(c2[9845],_mm_xor_si128(c2[7233],_mm_xor_si128(c2[7893],_mm_xor_si128(c2[7238],c2[9218])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[555]=simde_mm_xor_si128(c2[5283],simde_mm_xor_si128(c2[5943],simde_mm_xor_si128(c2[1321],simde_mm_xor_si128(c2[9906],simde_mm_xor_si128(c2[1323],simde_mm_xor_si128(c2[2679],simde_mm_xor_si128(c2[3339],simde_mm_xor_si128(c2[4000],simde_mm_xor_si128(c2[5981],simde_mm_xor_si128(c2[7293],simde_mm_xor_si128(c2[9962],simde_mm_xor_si128(c2[63],simde_mm_xor_si128(c2[9309],simde_mm_xor_si128(c2[2049],simde_mm_xor_si128(c2[4713],simde_mm_xor_si128(c2[5373],simde_mm_xor_si128(c2[6693],simde_mm_xor_si128(c2[2735],simde_mm_xor_si128(c2[10028],simde_mm_xor_si128(c2[6064],simde_mm_xor_si128(c2[9368],simde_mm_xor_si128(c2[3450],simde_mm_xor_si128(c2[4110],simde_mm_xor_si128(c2[1473],simde_mm_xor_si128(c2[9399],simde_mm_xor_si128(c2[2820],simde_mm_xor_si128(c2[3480],simde_mm_xor_si128(c2[4802],simde_mm_xor_si128(c2[8772],simde_mm_xor_si128(c2[9450],simde_mm_xor_si128(c2[3516],simde_mm_xor_si128(c2[8136],simde_mm_xor_si128(c2[9485],simde_mm_xor_si128(c2[9480],simde_mm_xor_si128(c2[1561],simde_mm_xor_si128(c2[4243],simde_mm_xor_si128(c2[4903],simde_mm_xor_si128(c2[6883],simde_mm_xor_si128(c2[6881],simde_mm_xor_si128(c2[1629],simde_mm_xor_si128(c2[2289],simde_mm_xor_si128(c2[4274],simde_mm_xor_si128(c2[2290],simde_mm_xor_si128(c2[9577],simde_mm_xor_si128(c2[10237],simde_mm_xor_si128(c2[8920],simde_mm_xor_si128(c2[6938],simde_mm_xor_si128(c2[1023],simde_mm_xor_si128(c2[1683],simde_mm_xor_si128(c2[10266],simde_mm_xor_si128(c2[5651],simde_mm_xor_si128(c2[8324],simde_mm_xor_si128(c2[8984],simde_mm_xor_si128(c2[5680],simde_mm_xor_si128(c2[7001],simde_mm_xor_si128(c2[8322],simde_mm_xor_si128(c2[1081],simde_mm_xor_si128(c2[5041],simde_mm_xor_si128(c2[6365],simde_mm_xor_si128(c2[5733],simde_mm_xor_si128(c2[6393],simde_mm_xor_si128(c2[5070],simde_mm_xor_si128(c2[2442],simde_mm_xor_si128(c2[7088],simde_mm_xor_si128(c2[7748],simde_mm_xor_si128(c2[7744],simde_mm_xor_si128(c2[7087],simde_mm_xor_si128(c2[6450],simde_mm_xor_si128(c2[1831],simde_mm_xor_si128(c2[1842],simde_mm_xor_si128(c2[9128],simde_mm_xor_si128(c2[9788],simde_mm_xor_si128(c2[10442],simde_mm_xor_si128(c2[9129],simde_mm_xor_si128(c2[7830],simde_mm_xor_si128(c2[8490],simde_mm_xor_si128(c2[3875],simde_mm_xor_si128(c2[3884],simde_mm_xor_si128(c2[7201],simde_mm_xor_si128(c2[7861],simde_mm_xor_si128(c2[5889],simde_mm_xor_si128(c2[9845],simde_mm_xor_si128(c2[7233],simde_mm_xor_si128(c2[7893],simde_mm_xor_si128(c2[7238],c2[9218])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 38
-     d2[570]=_mm_xor_si128(c2[5947],_mm_xor_si128(c2[6875],_mm_xor_si128(c2[6901],c2[8288])));
+     d2[570]=simde_mm_xor_si128(c2[5947],simde_mm_xor_si128(c2[6875],simde_mm_xor_si128(c2[6901],c2[8288])));
 
 //row: 39
-     d2[585]=_mm_xor_si128(c2[2017],_mm_xor_si128(c2[8021],_mm_xor_si128(c2[5500],c2[3213])));
+     d2[585]=simde_mm_xor_si128(c2[2017],simde_mm_xor_si128(c2[8021],simde_mm_xor_si128(c2[5500],c2[3213])));
 
 //row: 40
-     d2[600]=_mm_xor_si128(c2[2640],_mm_xor_si128(c2[4866],c2[9751]));
+     d2[600]=simde_mm_xor_si128(c2[2640],simde_mm_xor_si128(c2[4866],c2[9751]));
 
 //row: 41
-     d2[615]=_mm_xor_si128(c2[3333],_mm_xor_si128(c2[8681],_mm_xor_si128(c2[4903],c2[5821])));
+     d2[615]=simde_mm_xor_si128(c2[3333],simde_mm_xor_si128(c2[8681],simde_mm_xor_si128(c2[4903],c2[5821])));
 
 //row: 42
-     d2[630]=_mm_xor_si128(c2[9912],_mm_xor_si128(c2[5290],_mm_xor_si128(c2[2641],_mm_xor_si128(c2[3301],_mm_xor_si128(c2[4632],_mm_xor_si128(c2[5292],_mm_xor_si128(c2[9253],_mm_xor_si128(c2[7293],_mm_xor_si128(c2[7294],_mm_xor_si128(c2[7954],_mm_xor_si128(c2[9275],_mm_xor_si128(c2[9935],_mm_xor_si128(c2[4032],_mm_xor_si128(c2[2704],_mm_xor_si128(c2[5343],_mm_xor_si128(c2[6003],_mm_xor_si128(c2[9342],_mm_xor_si128(c2[103],_mm_xor_si128(c2[6044],_mm_xor_si128(c2[6704],_mm_xor_si128(c2[3423],_mm_xor_si128(c2[9373],_mm_xor_si128(c2[10033],_mm_xor_si128(c2[2103],_mm_xor_si128(c2[2763],_mm_xor_si128(c2[6730],_mm_xor_si128(c2[8079],_mm_xor_si128(c2[5442],_mm_xor_si128(c2[2134],_mm_xor_si128(c2[2794],_mm_xor_si128(c2[7449],_mm_xor_si128(c2[8111],_mm_xor_si128(c2[8771],_mm_xor_si128(c2[1507],_mm_xor_si128(c2[2167],_mm_xor_si128(c2[2860],_mm_xor_si128(c2[6810],_mm_xor_si128(c2[7470],_mm_xor_si128(c2[871],_mm_xor_si128(c2[1531],_mm_xor_si128(c2[2880],_mm_xor_si128(c2[2230],_mm_xor_si128(c2[2890],_mm_xor_si128(c2[4870],_mm_xor_si128(c2[5530],_mm_xor_si128(c2[8857],_mm_xor_si128(c2[278],_mm_xor_si128(c2[10175],_mm_xor_si128(c2[276],_mm_xor_si128(c2[6243],_mm_xor_si128(c2[7568],_mm_xor_si128(c2[8228],_mm_xor_si128(c2[5584],_mm_xor_si128(c2[6244],_mm_xor_si128(c2[3632],_mm_xor_si128(c2[2315],_mm_xor_si128(c2[10232],_mm_xor_si128(c2[333],_mm_xor_si128(c2[5652],_mm_xor_si128(c2[3661],_mm_xor_si128(c2[8945],_mm_xor_si128(c2[9605],_mm_xor_si128(c2[2379],_mm_xor_si128(c2[8974],_mm_xor_si128(c2[9634],_mm_xor_si128(c2[10295],_mm_xor_si128(c2[396],_mm_xor_si128(c2[5050],_mm_xor_si128(c2[8350],_mm_xor_si128(c2[9010],_mm_xor_si128(c2[9674],_mm_xor_si128(c2[10334],_mm_xor_si128(c2[10362],_mm_xor_si128(c2[9039],_mm_xor_si128(c2[5736],_mm_xor_si128(c2[6396],_mm_xor_si128(c2[1143],_mm_xor_si128(c2[1154],_mm_xor_si128(c2[10381],_mm_xor_si128(c2[482],_mm_xor_si128(c2[10419],_mm_xor_si128(c2[5140],_mm_xor_si128(c2[5800],_mm_xor_si128(c2[5136],_mm_xor_si128(c2[5796],_mm_xor_si128(c2[3183],_mm_xor_si128(c2[3192],_mm_xor_si128(c2[3852],_mm_xor_si128(c2[1864],_mm_xor_si128(c2[2524],_mm_xor_si128(c2[1900],_mm_xor_si128(c2[7844],_mm_xor_si128(c2[7178],_mm_xor_si128(c2[7838],_mm_xor_si128(c2[1271],_mm_xor_si128(c2[9183],_mm_xor_si128(c2[9843],_mm_xor_si128(c2[2580],_mm_xor_si128(c2[3240],_mm_xor_si128(c2[1303],_mm_xor_si128(c2[633],_mm_xor_si128(c2[1953],c2[2613]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[630]=simde_mm_xor_si128(c2[9912],simde_mm_xor_si128(c2[5290],simde_mm_xor_si128(c2[2641],simde_mm_xor_si128(c2[3301],simde_mm_xor_si128(c2[4632],simde_mm_xor_si128(c2[5292],simde_mm_xor_si128(c2[9253],simde_mm_xor_si128(c2[7293],simde_mm_xor_si128(c2[7294],simde_mm_xor_si128(c2[7954],simde_mm_xor_si128(c2[9275],simde_mm_xor_si128(c2[9935],simde_mm_xor_si128(c2[4032],simde_mm_xor_si128(c2[2704],simde_mm_xor_si128(c2[5343],simde_mm_xor_si128(c2[6003],simde_mm_xor_si128(c2[9342],simde_mm_xor_si128(c2[103],simde_mm_xor_si128(c2[6044],simde_mm_xor_si128(c2[6704],simde_mm_xor_si128(c2[3423],simde_mm_xor_si128(c2[9373],simde_mm_xor_si128(c2[10033],simde_mm_xor_si128(c2[2103],simde_mm_xor_si128(c2[2763],simde_mm_xor_si128(c2[6730],simde_mm_xor_si128(c2[8079],simde_mm_xor_si128(c2[5442],simde_mm_xor_si128(c2[2134],simde_mm_xor_si128(c2[2794],simde_mm_xor_si128(c2[7449],simde_mm_xor_si128(c2[8111],simde_mm_xor_si128(c2[8771],simde_mm_xor_si128(c2[1507],simde_mm_xor_si128(c2[2167],simde_mm_xor_si128(c2[2860],simde_mm_xor_si128(c2[6810],simde_mm_xor_si128(c2[7470],simde_mm_xor_si128(c2[871],simde_mm_xor_si128(c2[1531],simde_mm_xor_si128(c2[2880],simde_mm_xor_si128(c2[2230],simde_mm_xor_si128(c2[2890],simde_mm_xor_si128(c2[4870],simde_mm_xor_si128(c2[5530],simde_mm_xor_si128(c2[8857],simde_mm_xor_si128(c2[278],simde_mm_xor_si128(c2[10175],simde_mm_xor_si128(c2[276],simde_mm_xor_si128(c2[6243],simde_mm_xor_si128(c2[7568],simde_mm_xor_si128(c2[8228],simde_mm_xor_si128(c2[5584],simde_mm_xor_si128(c2[6244],simde_mm_xor_si128(c2[3632],simde_mm_xor_si128(c2[2315],simde_mm_xor_si128(c2[10232],simde_mm_xor_si128(c2[333],simde_mm_xor_si128(c2[5652],simde_mm_xor_si128(c2[3661],simde_mm_xor_si128(c2[8945],simde_mm_xor_si128(c2[9605],simde_mm_xor_si128(c2[2379],simde_mm_xor_si128(c2[8974],simde_mm_xor_si128(c2[9634],simde_mm_xor_si128(c2[10295],simde_mm_xor_si128(c2[396],simde_mm_xor_si128(c2[5050],simde_mm_xor_si128(c2[8350],simde_mm_xor_si128(c2[9010],simde_mm_xor_si128(c2[9674],simde_mm_xor_si128(c2[10334],simde_mm_xor_si128(c2[10362],simde_mm_xor_si128(c2[9039],simde_mm_xor_si128(c2[5736],simde_mm_xor_si128(c2[6396],simde_mm_xor_si128(c2[1143],simde_mm_xor_si128(c2[1154],simde_mm_xor_si128(c2[10381],simde_mm_xor_si128(c2[482],simde_mm_xor_si128(c2[10419],simde_mm_xor_si128(c2[5140],simde_mm_xor_si128(c2[5800],simde_mm_xor_si128(c2[5136],simde_mm_xor_si128(c2[5796],simde_mm_xor_si128(c2[3183],simde_mm_xor_si128(c2[3192],simde_mm_xor_si128(c2[3852],simde_mm_xor_si128(c2[1864],simde_mm_xor_si128(c2[2524],simde_mm_xor_si128(c2[1900],simde_mm_xor_si128(c2[7844],simde_mm_xor_si128(c2[7178],simde_mm_xor_si128(c2[7838],simde_mm_xor_si128(c2[1271],simde_mm_xor_si128(c2[9183],simde_mm_xor_si128(c2[9843],simde_mm_xor_si128(c2[2580],simde_mm_xor_si128(c2[3240],simde_mm_xor_si128(c2[1303],simde_mm_xor_si128(c2[633],simde_mm_xor_si128(c2[1953],c2[2613]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 43
-     d2[645]=_mm_xor_si128(c2[3301],_mm_xor_si128(c2[9253],_mm_xor_si128(c2[7264],_mm_xor_si128(c2[8580],_mm_xor_si128(c2[9240],_mm_xor_si128(c2[697],_mm_xor_si128(c2[1358],_mm_xor_si128(c2[2679],_mm_xor_si128(c2[3339],_mm_xor_si128(c2[3991],_mm_xor_si128(c2[7980],_mm_xor_si128(c2[6667],_mm_xor_si128(c2[9966],_mm_xor_si128(c2[2731],_mm_xor_si128(c2[4051],_mm_xor_si128(c2[9992],_mm_xor_si128(c2[93],_mm_xor_si128(c2[7386],_mm_xor_si128(c2[3422],_mm_xor_si128(c2[6066],_mm_xor_si128(c2[6726],_mm_xor_si128(c2[1483],_mm_xor_si128(c2[9390],_mm_xor_si128(c2[6757],_mm_xor_si128(c2[853],_mm_xor_si128(c2[2160],_mm_xor_si128(c2[5470],_mm_xor_si128(c2[6130],_mm_xor_si128(c2[6823],_mm_xor_si128(c2[874],_mm_xor_si128(c2[4834],_mm_xor_si128(c2[5494],_mm_xor_si128(c2[6843],_mm_xor_si128(c2[6853],_mm_xor_si128(c2[8833],_mm_xor_si128(c2[9493],_mm_xor_si128(c2[2261],_mm_xor_si128(c2[4241],_mm_xor_si128(c2[4239],_mm_xor_si128(c2[10206],_mm_xor_si128(c2[1632],_mm_xor_si128(c2[9547],_mm_xor_si128(c2[10207],_mm_xor_si128(c2[7595],_mm_xor_si128(c2[6278],_mm_xor_si128(c2[3636],_mm_xor_si128(c2[4296],_mm_xor_si128(c2[9600],_mm_xor_si128(c2[7624],_mm_xor_si128(c2[2349],_mm_xor_si128(c2[3009],_mm_xor_si128(c2[6342],_mm_xor_si128(c2[3038],_mm_xor_si128(c2[3699],_mm_xor_si128(c2[4359],_mm_xor_si128(c2[9013],_mm_xor_si128(c2[2414],_mm_xor_si128(c2[3063],_mm_xor_si128(c2[3723],_mm_xor_si128(c2[3751],_mm_xor_si128(c2[2443],_mm_xor_si128(c2[10359],_mm_xor_si128(c2[5106],_mm_xor_si128(c2[5102],_mm_xor_si128(c2[3785],_mm_xor_si128(c2[4445],_mm_xor_si128(c2[10387],_mm_xor_si128(c2[3823],_mm_xor_si128(c2[9763],_mm_xor_si128(c2[9099],_mm_xor_si128(c2[9759],_mm_xor_si128(c2[7146],_mm_xor_si128(c2[7800],_mm_xor_si128(c2[5827],_mm_xor_si128(c2[6487],_mm_xor_si128(c2[1203],_mm_xor_si128(c2[5863],_mm_xor_si128(c2[1233],_mm_xor_si128(c2[1242],_mm_xor_si128(c2[5234],_mm_xor_si128(c2[3247],_mm_xor_si128(c2[6543],_mm_xor_si128(c2[7203],_mm_xor_si128(c2[5251],_mm_xor_si128(c2[4596],_mm_xor_si128(c2[5916],c2[6576]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[645]=simde_mm_xor_si128(c2[3301],simde_mm_xor_si128(c2[9253],simde_mm_xor_si128(c2[7264],simde_mm_xor_si128(c2[8580],simde_mm_xor_si128(c2[9240],simde_mm_xor_si128(c2[697],simde_mm_xor_si128(c2[1358],simde_mm_xor_si128(c2[2679],simde_mm_xor_si128(c2[3339],simde_mm_xor_si128(c2[3991],simde_mm_xor_si128(c2[7980],simde_mm_xor_si128(c2[6667],simde_mm_xor_si128(c2[9966],simde_mm_xor_si128(c2[2731],simde_mm_xor_si128(c2[4051],simde_mm_xor_si128(c2[9992],simde_mm_xor_si128(c2[93],simde_mm_xor_si128(c2[7386],simde_mm_xor_si128(c2[3422],simde_mm_xor_si128(c2[6066],simde_mm_xor_si128(c2[6726],simde_mm_xor_si128(c2[1483],simde_mm_xor_si128(c2[9390],simde_mm_xor_si128(c2[6757],simde_mm_xor_si128(c2[853],simde_mm_xor_si128(c2[2160],simde_mm_xor_si128(c2[5470],simde_mm_xor_si128(c2[6130],simde_mm_xor_si128(c2[6823],simde_mm_xor_si128(c2[874],simde_mm_xor_si128(c2[4834],simde_mm_xor_si128(c2[5494],simde_mm_xor_si128(c2[6843],simde_mm_xor_si128(c2[6853],simde_mm_xor_si128(c2[8833],simde_mm_xor_si128(c2[9493],simde_mm_xor_si128(c2[2261],simde_mm_xor_si128(c2[4241],simde_mm_xor_si128(c2[4239],simde_mm_xor_si128(c2[10206],simde_mm_xor_si128(c2[1632],simde_mm_xor_si128(c2[9547],simde_mm_xor_si128(c2[10207],simde_mm_xor_si128(c2[7595],simde_mm_xor_si128(c2[6278],simde_mm_xor_si128(c2[3636],simde_mm_xor_si128(c2[4296],simde_mm_xor_si128(c2[9600],simde_mm_xor_si128(c2[7624],simde_mm_xor_si128(c2[2349],simde_mm_xor_si128(c2[3009],simde_mm_xor_si128(c2[6342],simde_mm_xor_si128(c2[3038],simde_mm_xor_si128(c2[3699],simde_mm_xor_si128(c2[4359],simde_mm_xor_si128(c2[9013],simde_mm_xor_si128(c2[2414],simde_mm_xor_si128(c2[3063],simde_mm_xor_si128(c2[3723],simde_mm_xor_si128(c2[3751],simde_mm_xor_si128(c2[2443],simde_mm_xor_si128(c2[10359],simde_mm_xor_si128(c2[5106],simde_mm_xor_si128(c2[5102],simde_mm_xor_si128(c2[3785],simde_mm_xor_si128(c2[4445],simde_mm_xor_si128(c2[10387],simde_mm_xor_si128(c2[3823],simde_mm_xor_si128(c2[9763],simde_mm_xor_si128(c2[9099],simde_mm_xor_si128(c2[9759],simde_mm_xor_si128(c2[7146],simde_mm_xor_si128(c2[7800],simde_mm_xor_si128(c2[5827],simde_mm_xor_si128(c2[6487],simde_mm_xor_si128(c2[1203],simde_mm_xor_si128(c2[5863],simde_mm_xor_si128(c2[1233],simde_mm_xor_si128(c2[1242],simde_mm_xor_si128(c2[5234],simde_mm_xor_si128(c2[3247],simde_mm_xor_si128(c2[6543],simde_mm_xor_si128(c2[7203],simde_mm_xor_si128(c2[5251],simde_mm_xor_si128(c2[4596],simde_mm_xor_si128(c2[5916],c2[6576]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 44
-     d2[660]=_mm_xor_si128(c2[7262],_mm_xor_si128(c2[2640],_mm_xor_si128(c2[666],_mm_xor_si128(c2[2642],_mm_xor_si128(c2[9911],_mm_xor_si128(c2[4658],_mm_xor_si128(c2[5319],_mm_xor_si128(c2[7300],_mm_xor_si128(c2[1382],_mm_xor_si128(c2[69],_mm_xor_si128(c2[3368],_mm_xor_si128(c2[6692],_mm_xor_si128(c2[8012],_mm_xor_si128(c2[4054],_mm_xor_si128(c2[788],_mm_xor_si128(c2[7383],_mm_xor_si128(c2[128],_mm_xor_si128(c2[5444],_mm_xor_si128(c2[2792],_mm_xor_si128(c2[159],_mm_xor_si128(c2[4814],_mm_xor_si128(c2[6121],_mm_xor_si128(c2[10091],_mm_xor_si128(c2[210],_mm_xor_si128(c2[4835],_mm_xor_si128(c2[9455],_mm_xor_si128(c2[2203],_mm_xor_si128(c2[245],_mm_xor_si128(c2[240],_mm_xor_si128(c2[2880],_mm_xor_si128(c2[6222],_mm_xor_si128(c2[8202],_mm_xor_si128(c2[8200],_mm_xor_si128(c2[7541],_mm_xor_si128(c2[3608],_mm_xor_si128(c2[5593],_mm_xor_si128(c2[3609],_mm_xor_si128(c2[997],_mm_xor_si128(c2[10239],_mm_xor_si128(c2[8257],_mm_xor_si128(c2[3002],_mm_xor_si128(c2[1026],_mm_xor_si128(c2[6970],_mm_xor_si128(c2[10303],_mm_xor_si128(c2[6999],_mm_xor_si128(c2[8320],_mm_xor_si128(c2[2400],_mm_xor_si128(c2[6360],_mm_xor_si128(c2[7684],_mm_xor_si128(c2[7712],_mm_xor_si128(c2[6404],_mm_xor_si128(c2[3761],_mm_xor_si128(c2[9067],_mm_xor_si128(c2[9063],_mm_xor_si128(c2[8406],_mm_xor_si128(c2[7784],_mm_xor_si128(c2[3150],_mm_xor_si128(c2[3161],_mm_xor_si128(c2[548],_mm_xor_si128(c2[1202],_mm_xor_si128(c2[10448],_mm_xor_si128(c2[9824],_mm_xor_si128(c2[5194],_mm_xor_si128(c2[5203],_mm_xor_si128(c2[9180],_mm_xor_si128(c2[7208],_mm_xor_si128(c2[605],_mm_xor_si128(c2[9212],_mm_xor_si128(c2[8557],c2[10537])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[660]=simde_mm_xor_si128(c2[7262],simde_mm_xor_si128(c2[2640],simde_mm_xor_si128(c2[666],simde_mm_xor_si128(c2[2642],simde_mm_xor_si128(c2[9911],simde_mm_xor_si128(c2[4658],simde_mm_xor_si128(c2[5319],simde_mm_xor_si128(c2[7300],simde_mm_xor_si128(c2[1382],simde_mm_xor_si128(c2[69],simde_mm_xor_si128(c2[3368],simde_mm_xor_si128(c2[6692],simde_mm_xor_si128(c2[8012],simde_mm_xor_si128(c2[4054],simde_mm_xor_si128(c2[788],simde_mm_xor_si128(c2[7383],simde_mm_xor_si128(c2[128],simde_mm_xor_si128(c2[5444],simde_mm_xor_si128(c2[2792],simde_mm_xor_si128(c2[159],simde_mm_xor_si128(c2[4814],simde_mm_xor_si128(c2[6121],simde_mm_xor_si128(c2[10091],simde_mm_xor_si128(c2[210],simde_mm_xor_si128(c2[4835],simde_mm_xor_si128(c2[9455],simde_mm_xor_si128(c2[2203],simde_mm_xor_si128(c2[245],simde_mm_xor_si128(c2[240],simde_mm_xor_si128(c2[2880],simde_mm_xor_si128(c2[6222],simde_mm_xor_si128(c2[8202],simde_mm_xor_si128(c2[8200],simde_mm_xor_si128(c2[7541],simde_mm_xor_si128(c2[3608],simde_mm_xor_si128(c2[5593],simde_mm_xor_si128(c2[3609],simde_mm_xor_si128(c2[997],simde_mm_xor_si128(c2[10239],simde_mm_xor_si128(c2[8257],simde_mm_xor_si128(c2[3002],simde_mm_xor_si128(c2[1026],simde_mm_xor_si128(c2[6970],simde_mm_xor_si128(c2[10303],simde_mm_xor_si128(c2[6999],simde_mm_xor_si128(c2[8320],simde_mm_xor_si128(c2[2400],simde_mm_xor_si128(c2[6360],simde_mm_xor_si128(c2[7684],simde_mm_xor_si128(c2[7712],simde_mm_xor_si128(c2[6404],simde_mm_xor_si128(c2[3761],simde_mm_xor_si128(c2[9067],simde_mm_xor_si128(c2[9063],simde_mm_xor_si128(c2[8406],simde_mm_xor_si128(c2[7784],simde_mm_xor_si128(c2[3150],simde_mm_xor_si128(c2[3161],simde_mm_xor_si128(c2[548],simde_mm_xor_si128(c2[1202],simde_mm_xor_si128(c2[10448],simde_mm_xor_si128(c2[9824],simde_mm_xor_si128(c2[5194],simde_mm_xor_si128(c2[5203],simde_mm_xor_si128(c2[9180],simde_mm_xor_si128(c2[7208],simde_mm_xor_si128(c2[605],simde_mm_xor_si128(c2[9212],simde_mm_xor_si128(c2[8557],c2[10537])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 45
-     d2[675]=_mm_xor_si128(c2[701],_mm_xor_si128(c2[1507],c2[8885]));
+     d2[675]=simde_mm_xor_si128(c2[701],simde_mm_xor_si128(c2[1507],c2[8885]));
   }
 }
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc240_byte_128.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc240_byte_128.c
new file mode 100644
index 0000000000000000000000000000000000000000..c230ac480562a618a382b3f59cac03a85337c0f0
--- /dev/null
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc240_byte_128.c
@@ -0,0 +1,153 @@
+#ifndef __AVX2__
+#include "PHY/sse_intrin.h"
+// generated code for Zc=240, byte encoding
+static inline void ldpc240_byte(uint8_t *c,uint8_t *d) {
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
+
+  simde__m128i *c2,*d2;
+
+  int i2;
+  for (i2=0; i2<15; i2++) {
+     c2=&csimd[i2];
+     d2=&dsimd[i2];
+
+//row: 0
+     d2[0]=simde_mm_xor_si128(c2[4628],simde_mm_xor_si128(c2[6],simde_mm_xor_si128(c2[8591],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[2024],simde_mm_xor_si128(c2[2670],simde_mm_xor_si128(c2[4651],simde_mm_xor_si128(c2[9307],simde_mm_xor_si128(c2[7994],simde_mm_xor_si128(c2[734],simde_mm_xor_si128(c2[4058],simde_mm_xor_si128(c2[5378],simde_mm_xor_si128(c2[1420],simde_mm_xor_si128(c2[8713],simde_mm_xor_si128(c2[4749],simde_mm_xor_si128(c2[8053],simde_mm_xor_si128(c2[2795],simde_mm_xor_si128(c2[158],simde_mm_xor_si128(c2[8084],simde_mm_xor_si128(c2[2165],simde_mm_xor_si128(c2[3487],simde_mm_xor_si128(c2[7442],simde_mm_xor_si128(c2[8135],simde_mm_xor_si128(c2[2201],simde_mm_xor_si128(c2[6821],simde_mm_xor_si128(c2[8170],simde_mm_xor_si128(c2[8165],simde_mm_xor_si128(c2[246],simde_mm_xor_si128(c2[3573],simde_mm_xor_si128(c2[5553],simde_mm_xor_si128(c2[5551],simde_mm_xor_si128(c2[974],simde_mm_xor_si128(c2[2944],simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[8922],simde_mm_xor_si128(c2[7590],simde_mm_xor_si128(c2[5623],simde_mm_xor_si128(c2[368],simde_mm_xor_si128(c2[8951],simde_mm_xor_si128(c2[4321],simde_mm_xor_si128(c2[7654],simde_mm_xor_si128(c2[4350],simde_mm_xor_si128(c2[5671],simde_mm_xor_si128(c2[10325],simde_mm_xor_si128(c2[3726],simde_mm_xor_si128(c2[5050],simde_mm_xor_si128(c2[5078],simde_mm_xor_si128(c2[3755],simde_mm_xor_si128(c2[1112],simde_mm_xor_si128(c2[6433],simde_mm_xor_si128(c2[6429],simde_mm_xor_si128(c2[5772],simde_mm_xor_si128(c2[5135],simde_mm_xor_si128(c2[516],simde_mm_xor_si128(c2[512],simde_mm_xor_si128(c2[8473],simde_mm_xor_si128(c2[9127],simde_mm_xor_si128(c2[7814],simde_mm_xor_si128(c2[7175],simde_mm_xor_si128(c2[2560],simde_mm_xor_si128(c2[2554],simde_mm_xor_si128(c2[6546],simde_mm_xor_si128(c2[4574],simde_mm_xor_si128(c2[8530],simde_mm_xor_si128(c2[6578],simde_mm_xor_si128(c2[5923],c2[7903]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 1
+     d2[15]=simde_mm_xor_si128(c2[4628],simde_mm_xor_si128(c2[5288],simde_mm_xor_si128(c2[666],simde_mm_xor_si128(c2[9251],simde_mm_xor_si128(c2[668],simde_mm_xor_si128(c2[2024],simde_mm_xor_si128(c2[2684],simde_mm_xor_si128(c2[3330],simde_mm_xor_si128(c2[5311],simde_mm_xor_si128(c2[9307],simde_mm_xor_si128(c2[9967],simde_mm_xor_si128(c2[8654],simde_mm_xor_si128(c2[1394],simde_mm_xor_si128(c2[4058],simde_mm_xor_si128(c2[4718],simde_mm_xor_si128(c2[6038],simde_mm_xor_si128(c2[2080],simde_mm_xor_si128(c2[9373],simde_mm_xor_si128(c2[5409],simde_mm_xor_si128(c2[8713],simde_mm_xor_si128(c2[2795],simde_mm_xor_si128(c2[3455],simde_mm_xor_si128(c2[818],simde_mm_xor_si128(c2[8744],simde_mm_xor_si128(c2[2165],simde_mm_xor_si128(c2[2825],simde_mm_xor_si128(c2[4147],simde_mm_xor_si128(c2[8102],simde_mm_xor_si128(c2[8795],simde_mm_xor_si128(c2[2861],simde_mm_xor_si128(c2[7481],simde_mm_xor_si128(c2[8830],simde_mm_xor_si128(c2[8825],simde_mm_xor_si128(c2[906],simde_mm_xor_si128(c2[3573],simde_mm_xor_si128(c2[4233],simde_mm_xor_si128(c2[6213],simde_mm_xor_si128(c2[6211],simde_mm_xor_si128(c2[974],simde_mm_xor_si128(c2[1634],simde_mm_xor_si128(c2[3604],simde_mm_xor_si128(c2[1620],simde_mm_xor_si128(c2[8922],simde_mm_xor_si128(c2[9582],simde_mm_xor_si128(c2[8250],simde_mm_xor_si128(c2[6283],simde_mm_xor_si128(c2[368],simde_mm_xor_si128(c2[1028],simde_mm_xor_si128(c2[9611],simde_mm_xor_si128(c2[4981],simde_mm_xor_si128(c2[7654],simde_mm_xor_si128(c2[8314],simde_mm_xor_si128(c2[5010],simde_mm_xor_si128(c2[6331],simde_mm_xor_si128(c2[426],simde_mm_xor_si128(c2[4386],simde_mm_xor_si128(c2[5710],simde_mm_xor_si128(c2[5078],simde_mm_xor_si128(c2[5738],simde_mm_xor_si128(c2[4415],simde_mm_xor_si128(c2[1772],simde_mm_xor_si128(c2[6433],simde_mm_xor_si128(c2[7093],simde_mm_xor_si128(c2[7089],simde_mm_xor_si128(c2[6432],simde_mm_xor_si128(c2[5795],simde_mm_xor_si128(c2[1176],simde_mm_xor_si128(c2[1172],simde_mm_xor_si128(c2[8473],simde_mm_xor_si128(c2[9133],simde_mm_xor_si128(c2[9787],simde_mm_xor_si128(c2[8474],simde_mm_xor_si128(c2[7175],simde_mm_xor_si128(c2[7835],simde_mm_xor_si128(c2[3220],simde_mm_xor_si128(c2[3214],simde_mm_xor_si128(c2[6546],simde_mm_xor_si128(c2[7206],simde_mm_xor_si128(c2[5234],simde_mm_xor_si128(c2[9190],simde_mm_xor_si128(c2[6578],simde_mm_xor_si128(c2[7238],simde_mm_xor_si128(c2[6583],c2[8563])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 2
+     d2[30]=simde_mm_xor_si128(c2[5288],simde_mm_xor_si128(c2[666],simde_mm_xor_si128(c2[8591],simde_mm_xor_si128(c2[9251],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[668],simde_mm_xor_si128(c2[2684],simde_mm_xor_si128(c2[2670],simde_mm_xor_si128(c2[3330],simde_mm_xor_si128(c2[4651],simde_mm_xor_si128(c2[5311],simde_mm_xor_si128(c2[9967],simde_mm_xor_si128(c2[8654],simde_mm_xor_si128(c2[734],simde_mm_xor_si128(c2[1394],simde_mm_xor_si128(c2[4718],simde_mm_xor_si128(c2[6038],simde_mm_xor_si128(c2[1420],simde_mm_xor_si128(c2[2080],simde_mm_xor_si128(c2[9373],simde_mm_xor_si128(c2[4749],simde_mm_xor_si128(c2[5409],simde_mm_xor_si128(c2[8053],simde_mm_xor_si128(c2[8713],simde_mm_xor_si128(c2[3455],simde_mm_xor_si128(c2[818],simde_mm_xor_si128(c2[8084],simde_mm_xor_si128(c2[8744],simde_mm_xor_si128(c2[2825],simde_mm_xor_si128(c2[3487],simde_mm_xor_si128(c2[4147],simde_mm_xor_si128(c2[7442],simde_mm_xor_si128(c2[8102],simde_mm_xor_si128(c2[8795],simde_mm_xor_si128(c2[2201],simde_mm_xor_si128(c2[2861],simde_mm_xor_si128(c2[6821],simde_mm_xor_si128(c2[7481],simde_mm_xor_si128(c2[8830],simde_mm_xor_si128(c2[8165],simde_mm_xor_si128(c2[8825],simde_mm_xor_si128(c2[246],simde_mm_xor_si128(c2[906],simde_mm_xor_si128(c2[4233],simde_mm_xor_si128(c2[6213],simde_mm_xor_si128(c2[5551],simde_mm_xor_si128(c2[6211],simde_mm_xor_si128(c2[1634],simde_mm_xor_si128(c2[2944],simde_mm_xor_si128(c2[3604],simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[1620],simde_mm_xor_si128(c2[9582],simde_mm_xor_si128(c2[8250],simde_mm_xor_si128(c2[5623],simde_mm_xor_si128(c2[6283],simde_mm_xor_si128(c2[1028],simde_mm_xor_si128(c2[9611],simde_mm_xor_si128(c2[4321],simde_mm_xor_si128(c2[4981],simde_mm_xor_si128(c2[8314],simde_mm_xor_si128(c2[4350],simde_mm_xor_si128(c2[5010],simde_mm_xor_si128(c2[5671],simde_mm_xor_si128(c2[6331],simde_mm_xor_si128(c2[426],simde_mm_xor_si128(c2[3726],simde_mm_xor_si128(c2[4386],simde_mm_xor_si128(c2[5050],simde_mm_xor_si128(c2[5710],simde_mm_xor_si128(c2[5738],simde_mm_xor_si128(c2[4415],simde_mm_xor_si128(c2[1112],simde_mm_xor_si128(c2[1772],simde_mm_xor_si128(c2[7093],simde_mm_xor_si128(c2[7089],simde_mm_xor_si128(c2[5772],simde_mm_xor_si128(c2[6432],simde_mm_xor_si128(c2[5795],simde_mm_xor_si128(c2[516],simde_mm_xor_si128(c2[1176],simde_mm_xor_si128(c2[512],simde_mm_xor_si128(c2[1172],simde_mm_xor_si128(c2[9133],simde_mm_xor_si128(c2[9127],simde_mm_xor_si128(c2[9787],simde_mm_xor_si128(c2[7814],simde_mm_xor_si128(c2[8474],simde_mm_xor_si128(c2[7835],simde_mm_xor_si128(c2[3220],simde_mm_xor_si128(c2[2554],simde_mm_xor_si128(c2[3214],simde_mm_xor_si128(c2[7206],simde_mm_xor_si128(c2[4574],simde_mm_xor_si128(c2[5234],simde_mm_xor_si128(c2[8530],simde_mm_xor_si128(c2[9190],simde_mm_xor_si128(c2[7238],simde_mm_xor_si128(c2[6583],simde_mm_xor_si128(c2[7903],c2[8563]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 3
+     d2[45]=simde_mm_xor_si128(c2[5288],simde_mm_xor_si128(c2[666],simde_mm_xor_si128(c2[9251],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[668],simde_mm_xor_si128(c2[2684],simde_mm_xor_si128(c2[3330],simde_mm_xor_si128(c2[4651],simde_mm_xor_si128(c2[5311],simde_mm_xor_si128(c2[9967],simde_mm_xor_si128(c2[8654],simde_mm_xor_si128(c2[1394],simde_mm_xor_si128(c2[4718],simde_mm_xor_si128(c2[6038],simde_mm_xor_si128(c2[1420],simde_mm_xor_si128(c2[2080],simde_mm_xor_si128(c2[9373],simde_mm_xor_si128(c2[5409],simde_mm_xor_si128(c2[8053],simde_mm_xor_si128(c2[8713],simde_mm_xor_si128(c2[3455],simde_mm_xor_si128(c2[818],simde_mm_xor_si128(c2[8744],simde_mm_xor_si128(c2[2825],simde_mm_xor_si128(c2[4147],simde_mm_xor_si128(c2[7442],simde_mm_xor_si128(c2[8102],simde_mm_xor_si128(c2[8795],simde_mm_xor_si128(c2[2861],simde_mm_xor_si128(c2[6821],simde_mm_xor_si128(c2[7481],simde_mm_xor_si128(c2[8830],simde_mm_xor_si128(c2[8825],simde_mm_xor_si128(c2[246],simde_mm_xor_si128(c2[906],simde_mm_xor_si128(c2[4233],simde_mm_xor_si128(c2[6213],simde_mm_xor_si128(c2[6211],simde_mm_xor_si128(c2[1634],simde_mm_xor_si128(c2[3604],simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[1620],simde_mm_xor_si128(c2[9582],simde_mm_xor_si128(c2[8250],simde_mm_xor_si128(c2[5623],simde_mm_xor_si128(c2[6283],simde_mm_xor_si128(c2[1028],simde_mm_xor_si128(c2[9611],simde_mm_xor_si128(c2[4321],simde_mm_xor_si128(c2[4981],simde_mm_xor_si128(c2[8314],simde_mm_xor_si128(c2[5010],simde_mm_xor_si128(c2[5671],simde_mm_xor_si128(c2[6331],simde_mm_xor_si128(c2[426],simde_mm_xor_si128(c2[4386],simde_mm_xor_si128(c2[5050],simde_mm_xor_si128(c2[5710],simde_mm_xor_si128(c2[5738],simde_mm_xor_si128(c2[4415],simde_mm_xor_si128(c2[1772],simde_mm_xor_si128(c2[7093],simde_mm_xor_si128(c2[7089],simde_mm_xor_si128(c2[5772],simde_mm_xor_si128(c2[6432],simde_mm_xor_si128(c2[5795],simde_mm_xor_si128(c2[1176],simde_mm_xor_si128(c2[512],simde_mm_xor_si128(c2[1172],simde_mm_xor_si128(c2[9133],simde_mm_xor_si128(c2[9787],simde_mm_xor_si128(c2[7814],simde_mm_xor_si128(c2[8474],simde_mm_xor_si128(c2[7835],simde_mm_xor_si128(c2[3220],simde_mm_xor_si128(c2[3214],simde_mm_xor_si128(c2[7206],simde_mm_xor_si128(c2[5234],simde_mm_xor_si128(c2[8530],simde_mm_xor_si128(c2[9190],simde_mm_xor_si128(c2[7238],simde_mm_xor_si128(c2[6583],simde_mm_xor_si128(c2[7903],c2[8563])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 4
+     d2[60]=simde_mm_xor_si128(c2[4],c2[2023]);
+
+//row: 5
+     d2[75]=simde_mm_xor_si128(c2[7264],simde_mm_xor_si128(c2[2642],simde_mm_xor_si128(c2[668],simde_mm_xor_si128(c2[2644],simde_mm_xor_si128(c2[1320],simde_mm_xor_si128(c2[4660],simde_mm_xor_si128(c2[5321],simde_mm_xor_si128(c2[7302],simde_mm_xor_si128(c2[7300],simde_mm_xor_si128(c2[1384],simde_mm_xor_si128(c2[71],simde_mm_xor_si128(c2[3370],simde_mm_xor_si128(c2[6694],simde_mm_xor_si128(c2[8014],simde_mm_xor_si128(c2[4056],simde_mm_xor_si128(c2[9992],simde_mm_xor_si128(c2[790],simde_mm_xor_si128(c2[7385],simde_mm_xor_si128(c2[130],simde_mm_xor_si128(c2[5431],simde_mm_xor_si128(c2[2794],simde_mm_xor_si128(c2[161],simde_mm_xor_si128(c2[4801],simde_mm_xor_si128(c2[6123],simde_mm_xor_si128(c2[10093],simde_mm_xor_si128(c2[212],simde_mm_xor_si128(c2[4837],simde_mm_xor_si128(c2[9457],simde_mm_xor_si128(c2[247],simde_mm_xor_si128(c2[242],simde_mm_xor_si128(c2[2882],simde_mm_xor_si128(c2[6224],simde_mm_xor_si128(c2[8204],simde_mm_xor_si128(c2[8202],simde_mm_xor_si128(c2[3610],simde_mm_xor_si128(c2[5580],simde_mm_xor_si128(c2[3611],simde_mm_xor_si128(c2[999],simde_mm_xor_si128(c2[10241],simde_mm_xor_si128(c2[8259],simde_mm_xor_si128(c2[3004],simde_mm_xor_si128(c2[1028],simde_mm_xor_si128(c2[6972],simde_mm_xor_si128(c2[10268],simde_mm_xor_si128(c2[10290],simde_mm_xor_si128(c2[7001],simde_mm_xor_si128(c2[8322],simde_mm_xor_si128(c2[2402],simde_mm_xor_si128(c2[6362],simde_mm_xor_si128(c2[7686],simde_mm_xor_si128(c2[7714],simde_mm_xor_si128(c2[6391],simde_mm_xor_si128(c2[3763],simde_mm_xor_si128(c2[9069],simde_mm_xor_si128(c2[9065],simde_mm_xor_si128(c2[8408],simde_mm_xor_si128(c2[1813],simde_mm_xor_si128(c2[7771],simde_mm_xor_si128(c2[3152],simde_mm_xor_si128(c2[3163],simde_mm_xor_si128(c2[550],simde_mm_xor_si128(c2[1204],simde_mm_xor_si128(c2[10450],simde_mm_xor_si128(c2[9811],simde_mm_xor_si128(c2[5196],simde_mm_xor_si128(c2[5190],simde_mm_xor_si128(c2[9182],simde_mm_xor_si128(c2[7210],simde_mm_xor_si128(c2[607],simde_mm_xor_si128(c2[9214],simde_mm_xor_si128(c2[8559],simde_mm_xor_si128(c2[10539],c2[3281]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 6
+     d2[90]=simde_mm_xor_si128(c2[4632],simde_mm_xor_si128(c2[4141],simde_mm_xor_si128(c2[4921],simde_mm_xor_si128(c2[2976],simde_mm_xor_si128(c2[8315],simde_mm_xor_si128(c2[10422],simde_mm_xor_si128(c2[3183],c2[9180])))))));
+
+//row: 7
+     d2[105]=simde_mm_xor_si128(c2[8584],simde_mm_xor_si128(c2[1359],simde_mm_xor_si128(c2[793],simde_mm_xor_si128(c2[212],simde_mm_xor_si128(c2[4210],c2[1741])))));
+
+//row: 8
+     d2[120]=simde_mm_xor_si128(c2[3],simde_mm_xor_si128(c2[9250],simde_mm_xor_si128(c2[5940],simde_mm_xor_si128(c2[4628],simde_mm_xor_si128(c2[3966],simde_mm_xor_si128(c2[1994],simde_mm_xor_si128(c2[2654],simde_mm_xor_si128(c2[5942],simde_mm_xor_si128(c2[3970],simde_mm_xor_si128(c2[4630],simde_mm_xor_si128(c2[3311],simde_mm_xor_si128(c2[7958],simde_mm_xor_si128(c2[6631],simde_mm_xor_si128(c2[8619],simde_mm_xor_si128(c2[6632],simde_mm_xor_si128(c2[7292],simde_mm_xor_si128(c2[41],simde_mm_xor_si128(c2[8613],simde_mm_xor_si128(c2[9273],simde_mm_xor_si128(c2[5976],simde_mm_xor_si128(c2[4682],simde_mm_xor_si128(c2[3370],simde_mm_xor_si128(c2[3369],simde_mm_xor_si128(c2[2042],simde_mm_xor_si128(c2[6668],simde_mm_xor_si128(c2[4681],simde_mm_xor_si128(c2[5341],simde_mm_xor_si128(c2[9992],simde_mm_xor_si128(c2[8680],simde_mm_xor_si128(c2[753],simde_mm_xor_si128(c2[10000],simde_mm_xor_si128(c2[7354],simde_mm_xor_si128(c2[5382],simde_mm_xor_si128(c2[6042],simde_mm_xor_si128(c2[8678],simde_mm_xor_si128(c2[4088],simde_mm_xor_si128(c2[2761],simde_mm_xor_si128(c2[124],simde_mm_xor_si128(c2[8711],simde_mm_xor_si128(c2[9371],simde_mm_xor_si128(c2[3428],simde_mm_xor_si128(c2[1441],simde_mm_xor_si128(c2[2101],simde_mm_xor_si128(c2[8744],simde_mm_xor_si128(c2[7417],simde_mm_xor_si128(c2[6092],simde_mm_xor_si128(c2[4780],simde_mm_xor_si128(c2[3459],simde_mm_xor_si128(c2[1472],simde_mm_xor_si128(c2[2132],simde_mm_xor_si128(c2[8114],simde_mm_xor_si128(c2[6787],simde_mm_xor_si128(c2[9421],simde_mm_xor_si128(c2[7449],simde_mm_xor_si128(c2[8109],simde_mm_xor_si128(c2[2832],simde_mm_xor_si128(c2[845],simde_mm_xor_si128(c2[1505],simde_mm_xor_si128(c2[3510],simde_mm_xor_si128(c2[2198],simde_mm_xor_si128(c2[8135],simde_mm_xor_si128(c2[6163],simde_mm_xor_si128(c2[6823],simde_mm_xor_si128(c2[2196],simde_mm_xor_si128(c2[224],simde_mm_xor_si128(c2[884],simde_mm_xor_si128(c2[3545],simde_mm_xor_si128(c2[2233],simde_mm_xor_si128(c2[3540],simde_mm_xor_si128(c2[1568],simde_mm_xor_si128(c2[2228],simde_mm_xor_si128(c2[6180],simde_mm_xor_si128(c2[4208],simde_mm_xor_si128(c2[4868],simde_mm_xor_si128(c2[9522],simde_mm_xor_si128(c2[8195],simde_mm_xor_si128(c2[943],simde_mm_xor_si128(c2[10175],simde_mm_xor_si128(c2[941],simde_mm_xor_si128(c2[9513],simde_mm_xor_si128(c2[10173],simde_mm_xor_si128(c2[6908],simde_mm_xor_si128(c2[5581],simde_mm_xor_si128(c2[8893],simde_mm_xor_si128(c2[6906],simde_mm_xor_si128(c2[7566],simde_mm_xor_si128(c2[6909],simde_mm_xor_si128(c2[4922],simde_mm_xor_si128(c2[5582],simde_mm_xor_si128(c2[4297],simde_mm_xor_si128(c2[2970],simde_mm_xor_si128(c2[2980],simde_mm_xor_si128(c2[1653],simde_mm_xor_si128(c2[998],simde_mm_xor_si128(c2[9570],simde_mm_xor_si128(c2[10230],simde_mm_xor_si128(c2[6302],simde_mm_xor_si128(c2[4990],simde_mm_xor_si128(c2[4326],simde_mm_xor_si128(c2[3014],simde_mm_xor_si128(c2[10270],simde_mm_xor_si128(c2[8283],simde_mm_xor_si128(c2[8943],simde_mm_xor_si128(c2[10273],simde_mm_xor_si128(c2[3044],simde_mm_xor_si128(c2[1717],simde_mm_xor_si128(c2[10299],simde_mm_xor_si128(c2[8312],simde_mm_xor_si128(c2[8972],simde_mm_xor_si128(c2[1061],simde_mm_xor_si128(c2[9633],simde_mm_xor_si128(c2[10293],simde_mm_xor_si128(c2[5700],simde_mm_xor_si128(c2[4388],simde_mm_xor_si128(c2[9660],simde_mm_xor_si128(c2[7688],simde_mm_xor_si128(c2[8348],simde_mm_xor_si128(c2[425],simde_mm_xor_si128(c2[9012],simde_mm_xor_si128(c2[9672],simde_mm_xor_si128(c2[453],simde_mm_xor_si128(c2[9700],simde_mm_xor_si128(c2[9704],simde_mm_xor_si128(c2[8377],simde_mm_xor_si128(c2[7061],simde_mm_xor_si128(c2[5074],simde_mm_xor_si128(c2[5734],simde_mm_xor_si128(c2[1808],simde_mm_xor_si128(c2[481],simde_mm_xor_si128(c2[1804],simde_mm_xor_si128(c2[492],simde_mm_xor_si128(c2[1147],simde_mm_xor_si128(c2[9734],simde_mm_xor_si128(c2[10394],simde_mm_xor_si128(c2[1151],simde_mm_xor_si128(c2[510],simde_mm_xor_si128(c2[9757],simde_mm_xor_si128(c2[6450],simde_mm_xor_si128(c2[4478],simde_mm_xor_si128(c2[5138],simde_mm_xor_si128(c2[6461],simde_mm_xor_si128(c2[4474],simde_mm_xor_si128(c2[5134],simde_mm_xor_si128(c2[3848],simde_mm_xor_si128(c2[2521],simde_mm_xor_si128(c2[4502],simde_mm_xor_si128(c2[2530],simde_mm_xor_si128(c2[3190],simde_mm_xor_si128(c2[3189],simde_mm_xor_si128(c2[1202],simde_mm_xor_si128(c2[1862],simde_mm_xor_si128(c2[2550],simde_mm_xor_si128(c2[1238],simde_mm_xor_si128(c2[8494],simde_mm_xor_si128(c2[7182],simde_mm_xor_si128(c2[8503],simde_mm_xor_si128(c2[6516],simde_mm_xor_si128(c2[7176],simde_mm_xor_si128(c2[1239],simde_mm_xor_si128(c2[1921],simde_mm_xor_si128(c2[609],simde_mm_xor_si128(c2[10508],simde_mm_xor_si128(c2[8521],simde_mm_xor_si128(c2[9181],simde_mm_xor_si128(c2[3905],simde_mm_xor_si128(c2[1933],simde_mm_xor_si128(c2[2593],simde_mm_xor_si128(c2[1953],simde_mm_xor_si128(c2[641],simde_mm_xor_si128(c2[1298],simde_mm_xor_si128(c2[10530],simde_mm_xor_si128(c2[3278],simde_mm_xor_si128(c2[1291],simde_mm_xor_si128(c2[1951],c2[5262]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 9
+     d2[135]=simde_mm_xor_si128(c2[5950],simde_mm_xor_si128(c2[7950],simde_mm_xor_si128(c2[9552],simde_mm_xor_si128(c2[8923],simde_mm_xor_si128(c2[1051],simde_mm_xor_si128(c2[3163],simde_mm_xor_si128(c2[8465],c2[9192])))))));
+
+//row: 10
+     d2[150]=simde_mm_xor_si128(c2[2677],simde_mm_xor_si128(c2[4689],simde_mm_xor_si128(c2[4084],simde_mm_xor_si128(c2[4184],simde_mm_xor_si128(c2[2227],c2[3065])))));
+
+//row: 11
+     d2[165]=simde_mm_xor_si128(c2[7923],simde_mm_xor_si128(c2[1320],simde_mm_xor_si128(c2[1980],simde_mm_xor_si128(c2[3301],simde_mm_xor_si128(c2[7932],simde_mm_xor_si128(c2[1327],simde_mm_xor_si128(c2[5943],simde_mm_xor_si128(c2[3303],simde_mm_xor_si128(c2[7934],simde_mm_xor_si128(c2[8582],simde_mm_xor_si128(c2[5319],simde_mm_xor_si128(c2[9275],simde_mm_xor_si128(c2[9935],simde_mm_xor_si128(c2[5980],simde_mm_xor_si128(c2[37],simde_mm_xor_si128(c2[7961],simde_mm_xor_si128(c2[2018],simde_mm_xor_si128(c2[2017],simde_mm_xor_si128(c2[2043],simde_mm_xor_si128(c2[6014],simde_mm_xor_si128(c2[6674],simde_mm_xor_si128(c2[730],simde_mm_xor_si128(c2[5346],simde_mm_xor_si128(c2[4029],simde_mm_xor_si128(c2[8645],simde_mm_xor_si128(c2[7353],simde_mm_xor_si128(c2[750],simde_mm_xor_si128(c2[1410],simde_mm_xor_si128(c2[8673],simde_mm_xor_si128(c2[2730],simde_mm_xor_si128(c2[4715],simde_mm_xor_si128(c2[9331],simde_mm_xor_si128(c2[1449],simde_mm_xor_si128(c2[6065],simde_mm_xor_si128(c2[8044],simde_mm_xor_si128(c2[2101],simde_mm_xor_si128(c2[789],simde_mm_xor_si128(c2[5405],simde_mm_xor_si128(c2[6090],simde_mm_xor_si128(c2[10061],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[3453],simde_mm_xor_si128(c2[8084],simde_mm_xor_si128(c2[820],simde_mm_xor_si128(c2[5436],simde_mm_xor_si128(c2[5460],simde_mm_xor_si128(c2[9431],simde_mm_xor_si128(c2[10091],simde_mm_xor_si128(c2[6782],simde_mm_xor_si128(c2[854],simde_mm_xor_si128(c2[193],simde_mm_xor_si128(c2[4809],simde_mm_xor_si128(c2[871],simde_mm_xor_si128(c2[5502],simde_mm_xor_si128(c2[5496],simde_mm_xor_si128(c2[10112],simde_mm_xor_si128(c2[10116],simde_mm_xor_si128(c2[4173],simde_mm_xor_si128(c2[906],simde_mm_xor_si128(c2[5522],simde_mm_xor_si128(c2[901],simde_mm_xor_si128(c2[5532],simde_mm_xor_si128(c2[3541],simde_mm_xor_si128(c2[8172],simde_mm_xor_si128(c2[6883],simde_mm_xor_si128(c2[280],simde_mm_xor_si128(c2[940],simde_mm_xor_si128(c2[8863],simde_mm_xor_si128(c2[2920],simde_mm_xor_si128(c2[8861],simde_mm_xor_si128(c2[2918],simde_mm_xor_si128(c2[4269],simde_mm_xor_si128(c2[8225],simde_mm_xor_si128(c2[8885],simde_mm_xor_si128(c2[6254],simde_mm_xor_si128(c2[311],simde_mm_xor_si128(c2[4270],simde_mm_xor_si128(c2[8886],simde_mm_xor_si128(c2[1658],simde_mm_xor_si128(c2[5614],simde_mm_xor_si128(c2[6274],simde_mm_xor_si128(c2[341],simde_mm_xor_si128(c2[4957],simde_mm_xor_si128(c2[8918],simde_mm_xor_si128(c2[2975],simde_mm_xor_si128(c2[3663],simde_mm_xor_si128(c2[7634],simde_mm_xor_si128(c2[8294],simde_mm_xor_si128(c2[1687],simde_mm_xor_si128(c2[6303],simde_mm_xor_si128(c2[7631],simde_mm_xor_si128(c2[1688],simde_mm_xor_si128(c2[4328],simde_mm_xor_si128(c2[390],simde_mm_xor_si128(c2[4361],simde_mm_xor_si128(c2[5021],simde_mm_xor_si128(c2[7660],simde_mm_xor_si128(c2[1717],simde_mm_xor_si128(c2[8981],simde_mm_xor_si128(c2[3038],simde_mm_xor_si128(c2[3061],simde_mm_xor_si128(c2[7692],simde_mm_xor_si128(c2[7021],simde_mm_xor_si128(c2[1093],simde_mm_xor_si128(c2[8345],simde_mm_xor_si128(c2[2402],simde_mm_xor_si128(c2[8373],simde_mm_xor_si128(c2[1770],simde_mm_xor_si128(c2[2430],simde_mm_xor_si128(c2[7050],simde_mm_xor_si128(c2[1122],simde_mm_xor_si128(c2[4422],simde_mm_xor_si128(c2[9038],simde_mm_xor_si128(c2[9728],simde_mm_xor_si128(c2[3125],simde_mm_xor_si128(c2[3785],simde_mm_xor_si128(c2[9724],simde_mm_xor_si128(c2[3781],simde_mm_xor_si128(c2[9067],simde_mm_xor_si128(c2[3124],simde_mm_xor_si128(c2[1140],simde_mm_xor_si128(c2[8430],simde_mm_xor_si128(c2[2502],simde_mm_xor_si128(c2[3811],simde_mm_xor_si128(c2[8442],simde_mm_xor_si128(c2[3822],simde_mm_xor_si128(c2[8438],simde_mm_xor_si128(c2[1209],simde_mm_xor_si128(c2[5165],simde_mm_xor_si128(c2[5825],simde_mm_xor_si128(c2[1863],simde_mm_xor_si128(c2[6494],simde_mm_xor_si128(c2[550],simde_mm_xor_si128(c2[5166],simde_mm_xor_si128(c2[10470],simde_mm_xor_si128(c2[3882],simde_mm_xor_si128(c2[4542],simde_mm_xor_si128(c2[5855],simde_mm_xor_si128(c2[10471],simde_mm_xor_si128(c2[5864],simde_mm_xor_si128(c2[10480],simde_mm_xor_si128(c2[9841],simde_mm_xor_si128(c2[3253],simde_mm_xor_si128(c2[3913],simde_mm_xor_si128(c2[7869],simde_mm_xor_si128(c2[1926],simde_mm_xor_si128(c2[1266],simde_mm_xor_si128(c2[5882],simde_mm_xor_si128(c2[9873],simde_mm_xor_si128(c2[3270],simde_mm_xor_si128(c2[3930],simde_mm_xor_si128(c2[9218],simde_mm_xor_si128(c2[3275],simde_mm_xor_si128(c2[639],simde_mm_xor_si128(c2[5255],c2[5919])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 12
+     d2[180]=simde_mm_xor_si128(c2[6611],simde_mm_xor_si128(c2[4663],simde_mm_xor_si128(c2[8227],simde_mm_xor_si128(c2[2981],simde_mm_xor_si128(c2[1716],c2[545])))));
+
+//row: 13
+     d2[195]=simde_mm_xor_si128(c2[8582],simde_mm_xor_si128(c2[9242],simde_mm_xor_si128(c2[4620],simde_mm_xor_si128(c2[2646],simde_mm_xor_si128(c2[4622],simde_mm_xor_si128(c2[7933],simde_mm_xor_si128(c2[5978],simde_mm_xor_si128(c2[6638],simde_mm_xor_si128(c2[7299],simde_mm_xor_si128(c2[9280],simde_mm_xor_si128(c2[2702],simde_mm_xor_si128(c2[3362],simde_mm_xor_si128(c2[2049],simde_mm_xor_si128(c2[5348],simde_mm_xor_si128(c2[8012],simde_mm_xor_si128(c2[8672],simde_mm_xor_si128(c2[9992],simde_mm_xor_si128(c2[6034],simde_mm_xor_si128(c2[6041],simde_mm_xor_si128(c2[2768],simde_mm_xor_si128(c2[9363],simde_mm_xor_si128(c2[2108],simde_mm_xor_si128(c2[6764],simde_mm_xor_si128(c2[7424],simde_mm_xor_si128(c2[4772],simde_mm_xor_si128(c2[2139],simde_mm_xor_si128(c2[6134],simde_mm_xor_si128(c2[6794],simde_mm_xor_si128(c2[8101],simde_mm_xor_si128(c2[1512],simde_mm_xor_si128(c2[2190],simde_mm_xor_si128(c2[6815],simde_mm_xor_si128(c2[876],simde_mm_xor_si128(c2[6819],simde_mm_xor_si128(c2[2225],simde_mm_xor_si128(c2[2220],simde_mm_xor_si128(c2[4860],simde_mm_xor_si128(c2[7542],simde_mm_xor_si128(c2[8202],simde_mm_xor_si128(c2[10182],simde_mm_xor_si128(c2[10180],simde_mm_xor_si128(c2[4928],simde_mm_xor_si128(c2[5588],simde_mm_xor_si128(c2[7573],simde_mm_xor_si128(c2[5589],simde_mm_xor_si128(c2[2317],simde_mm_xor_si128(c2[2977],simde_mm_xor_si128(c2[1660],simde_mm_xor_si128(c2[10237],simde_mm_xor_si128(c2[4322],simde_mm_xor_si128(c2[4982],simde_mm_xor_si128(c2[3006],simde_mm_xor_si128(c2[8950],simde_mm_xor_si128(c2[1064],simde_mm_xor_si128(c2[1724],simde_mm_xor_si128(c2[8979],simde_mm_xor_si128(c2[10300],simde_mm_xor_si128(c2[4380],simde_mm_xor_si128(c2[8340],simde_mm_xor_si128(c2[9664],simde_mm_xor_si128(c2[9032],simde_mm_xor_si128(c2[9692],simde_mm_xor_si128(c2[8384],simde_mm_xor_si128(c2[5741],simde_mm_xor_si128(c2[10387],simde_mm_xor_si128(c2[488],simde_mm_xor_si128(c2[484],simde_mm_xor_si128(c2[10386],simde_mm_xor_si128(c2[9764],simde_mm_xor_si128(c2[5130],simde_mm_xor_si128(c2[5141],simde_mm_xor_si128(c2[1868],simde_mm_xor_si128(c2[2528],simde_mm_xor_si128(c2[3182],simde_mm_xor_si128(c2[1869],simde_mm_xor_si128(c2[570],simde_mm_xor_si128(c2[1230],simde_mm_xor_si128(c2[7174],simde_mm_xor_si128(c2[7183],simde_mm_xor_si128(c2[10500],simde_mm_xor_si128(c2[601],simde_mm_xor_si128(c2[9188],simde_mm_xor_si128(c2[2585],simde_mm_xor_si128(c2[1931],simde_mm_xor_si128(c2[10532],simde_mm_xor_si128(c2[633],simde_mm_xor_si128(c2[10537],c2[1958])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 14
+     d2[210]=simde_mm_xor_si128(c2[7927],simde_mm_xor_si128(c2[369],simde_mm_xor_si128(c2[4421],simde_mm_xor_si128(c2[10385],simde_mm_xor_si128(c2[5794],c2[8554])))));
+
+//row: 15
+     d2[225]=simde_mm_xor_si128(c2[5951],simde_mm_xor_si128(c2[1329],simde_mm_xor_si128(c2[9914],simde_mm_xor_si128(c2[671],simde_mm_xor_si128(c2[1331],simde_mm_xor_si128(c2[4622],simde_mm_xor_si128(c2[3332],simde_mm_xor_si128(c2[3993],simde_mm_xor_si128(c2[5314],simde_mm_xor_si128(c2[5974],simde_mm_xor_si128(c2[6638],simde_mm_xor_si128(c2[71],simde_mm_xor_si128(c2[9302],simde_mm_xor_si128(c2[2042],simde_mm_xor_si128(c2[5381],simde_mm_xor_si128(c2[6701],simde_mm_xor_si128(c2[2083],simde_mm_xor_si128(c2[2743],simde_mm_xor_si128(c2[10021],simde_mm_xor_si128(c2[6072],simde_mm_xor_si128(c2[8701],simde_mm_xor_si128(c2[9361],simde_mm_xor_si128(c2[4118],simde_mm_xor_si128(c2[1481],simde_mm_xor_si128(c2[9392],simde_mm_xor_si128(c2[3488],simde_mm_xor_si128(c2[4810],simde_mm_xor_si128(c2[8105],simde_mm_xor_si128(c2[8765],simde_mm_xor_si128(c2[9458],simde_mm_xor_si128(c2[3524],simde_mm_xor_si128(c2[7484],simde_mm_xor_si128(c2[8144],simde_mm_xor_si128(c2[9493],simde_mm_xor_si128(c2[9488],simde_mm_xor_si128(c2[909],simde_mm_xor_si128(c2[1569],simde_mm_xor_si128(c2[4896],simde_mm_xor_si128(c2[6876],simde_mm_xor_si128(c2[6874],simde_mm_xor_si128(c2[2282],simde_mm_xor_si128(c2[4267],simde_mm_xor_si128(c2[1623],simde_mm_xor_si128(c2[2283],simde_mm_xor_si128(c2[8233],simde_mm_xor_si128(c2[10230],simde_mm_xor_si128(c2[8913],simde_mm_xor_si128(c2[6271],simde_mm_xor_si128(c2[6931],simde_mm_xor_si128(c2[1691],simde_mm_xor_si128(c2[10274],simde_mm_xor_si128(c2[4984],simde_mm_xor_si128(c2[5644],simde_mm_xor_si128(c2[8977],simde_mm_xor_si128(c2[5673],simde_mm_xor_si128(c2[6334],simde_mm_xor_si128(c2[6994],simde_mm_xor_si128(c2[8980],simde_mm_xor_si128(c2[1089],simde_mm_xor_si128(c2[5049],simde_mm_xor_si128(c2[5713],simde_mm_xor_si128(c2[6373],simde_mm_xor_si128(c2[6401],simde_mm_xor_si128(c2[5078],simde_mm_xor_si128(c2[2435],simde_mm_xor_si128(c2[7741],simde_mm_xor_si128(c2[7752],simde_mm_xor_si128(c2[6420],simde_mm_xor_si128(c2[7080],simde_mm_xor_si128(c2[6458],simde_mm_xor_si128(c2[1839],simde_mm_xor_si128(c2[1175],simde_mm_xor_si128(c2[1835],simde_mm_xor_si128(c2[9781],simde_mm_xor_si128(c2[10450],simde_mm_xor_si128(c2[8462],simde_mm_xor_si128(c2[9122],simde_mm_xor_si128(c2[9788],simde_mm_xor_si128(c2[8498],simde_mm_xor_si128(c2[3883],simde_mm_xor_si128(c2[3877],simde_mm_xor_si128(c2[7869],simde_mm_xor_si128(c2[5882],simde_mm_xor_si128(c2[9193],simde_mm_xor_si128(c2[9853],simde_mm_xor_si128(c2[7901],simde_mm_xor_si128(c2[7231],simde_mm_xor_si128(c2[8551],c2[9211]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 16
+     d2[240]=simde_mm_xor_si128(c2[2646],simde_mm_xor_si128(c2[8583],simde_mm_xor_si128(c2[6609],simde_mm_xor_si128(c2[8585],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[703],simde_mm_xor_si128(c2[2684],simde_mm_xor_si128(c2[9274],simde_mm_xor_si128(c2[7325],simde_mm_xor_si128(c2[6012],simde_mm_xor_si128(c2[9311],simde_mm_xor_si128(c2[2076],simde_mm_xor_si128(c2[3396],simde_mm_xor_si128(c2[9997],simde_mm_xor_si128(c2[5379],simde_mm_xor_si128(c2[6731],simde_mm_xor_si128(c2[2767],simde_mm_xor_si128(c2[6071],simde_mm_xor_si128(c2[813],simde_mm_xor_si128(c2[8735],simde_mm_xor_si128(c2[6102],simde_mm_xor_si128(c2[183],simde_mm_xor_si128(c2[1505],simde_mm_xor_si128(c2[5460],simde_mm_xor_si128(c2[6153],simde_mm_xor_si128(c2[219],simde_mm_xor_si128(c2[4839],simde_mm_xor_si128(c2[6188],simde_mm_xor_si128(c2[6183],simde_mm_xor_si128(c2[8823],simde_mm_xor_si128(c2[1591],simde_mm_xor_si128(c2[3571],simde_mm_xor_si128(c2[3584],simde_mm_xor_si128(c2[9551],simde_mm_xor_si128(c2[962],simde_mm_xor_si128(c2[9552],simde_mm_xor_si128(c2[6940],simde_mm_xor_si128(c2[5623],simde_mm_xor_si128(c2[3641],simde_mm_xor_si128(c2[2975],simde_mm_xor_si128(c2[8945],simde_mm_xor_si128(c2[6969],simde_mm_xor_si128(c2[2354],simde_mm_xor_si128(c2[5672],simde_mm_xor_si128(c2[2383],simde_mm_xor_si128(c2[3704],simde_mm_xor_si128(c2[8343],simde_mm_xor_si128(c2[1744],simde_mm_xor_si128(c2[3068],simde_mm_xor_si128(c2[3096],simde_mm_xor_si128(c2[1773],simde_mm_xor_si128(c2[9704],simde_mm_xor_si128(c2[4451],simde_mm_xor_si128(c2[4447],simde_mm_xor_si128(c2[3790],simde_mm_xor_si128(c2[3153],simde_mm_xor_si128(c2[9093],simde_mm_xor_si128(c2[9104],simde_mm_xor_si128(c2[6491],simde_mm_xor_si128(c2[7145],simde_mm_xor_si128(c2[5832],simde_mm_xor_si128(c2[5193],simde_mm_xor_si128(c2[578],simde_mm_xor_si128(c2[572],simde_mm_xor_si128(c2[4564],simde_mm_xor_si128(c2[2592],simde_mm_xor_si128(c2[6548],simde_mm_xor_si128(c2[3900],simde_mm_xor_si128(c2[4596],simde_mm_xor_si128(c2[3941],c2[5921]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 17
+     d2[255]=simde_mm_xor_si128(c2[4631],simde_mm_xor_si128(c2[427],simde_mm_xor_si128(c2[7086],simde_mm_xor_si128(c2[7783],c2[1298]))));
+
+//row: 18
+     d2[270]=simde_mm_xor_si128(c2[4661],simde_mm_xor_si128(c2[4993],simde_mm_xor_si128(c2[3041],simde_mm_xor_si128(c2[10448],c2[9810]))));
+
+//row: 19
+     d2[285]=simde_mm_xor_si128(c2[1991],simde_mm_xor_si128(c2[7956],simde_mm_xor_si128(c2[10119],simde_mm_xor_si128(c2[6848],c2[2952]))));
+
+//row: 20
+     d2[300]=simde_mm_xor_si128(c2[5941],simde_mm_xor_si128(c2[1334],simde_mm_xor_si128(c2[9904],simde_mm_xor_si128(c2[1321],simde_mm_xor_si128(c2[8584],simde_mm_xor_si128(c2[3337],simde_mm_xor_si128(c2[3998],simde_mm_xor_si128(c2[5979],simde_mm_xor_si128(c2[61],simde_mm_xor_si128(c2[9307],simde_mm_xor_si128(c2[2047],simde_mm_xor_si128(c2[5371],simde_mm_xor_si128(c2[6691],simde_mm_xor_si128(c2[2733],simde_mm_xor_si128(c2[7361],simde_mm_xor_si128(c2[10026],simde_mm_xor_si128(c2[6062],simde_mm_xor_si128(c2[9366],simde_mm_xor_si128(c2[4123],simde_mm_xor_si128(c2[1471],simde_mm_xor_si128(c2[9397],simde_mm_xor_si128(c2[3493],simde_mm_xor_si128(c2[4800],simde_mm_xor_si128(c2[8770],simde_mm_xor_si128(c2[9463],simde_mm_xor_si128(c2[3514],simde_mm_xor_si128(c2[8134],simde_mm_xor_si128(c2[9483],simde_mm_xor_si128(c2[9493],simde_mm_xor_si128(c2[1574],simde_mm_xor_si128(c2[4901],simde_mm_xor_si128(c2[6881],simde_mm_xor_si128(c2[6879],simde_mm_xor_si128(c2[7542],simde_mm_xor_si128(c2[2287],simde_mm_xor_si128(c2[4272],simde_mm_xor_si128(c2[2288],simde_mm_xor_si128(c2[10235],simde_mm_xor_si128(c2[8918],simde_mm_xor_si128(c2[6936],simde_mm_xor_si128(c2[4960],simde_mm_xor_si128(c2[1681],simde_mm_xor_si128(c2[10264],simde_mm_xor_si128(c2[5649],simde_mm_xor_si128(c2[8982],simde_mm_xor_si128(c2[5678],simde_mm_xor_si128(c2[6999],simde_mm_xor_si128(c2[1094],simde_mm_xor_si128(c2[5054],simde_mm_xor_si128(c2[6363],simde_mm_xor_si128(c2[6391],simde_mm_xor_si128(c2[5083],simde_mm_xor_si128(c2[2440],simde_mm_xor_si128(c2[7746],simde_mm_xor_si128(c2[7742],simde_mm_xor_si128(c2[7085],simde_mm_xor_si128(c2[6463],simde_mm_xor_si128(c2[1844],simde_mm_xor_si128(c2[1840],simde_mm_xor_si128(c2[9786],simde_mm_xor_si128(c2[10440],simde_mm_xor_si128(c2[9127],simde_mm_xor_si128(c2[8503],simde_mm_xor_si128(c2[3873],simde_mm_xor_si128(c2[3882],simde_mm_xor_si128(c2[7874],simde_mm_xor_si128(c2[5887],simde_mm_xor_si128(c2[9843],simde_mm_xor_si128(c2[7891],simde_mm_xor_si128(c2[7236],c2[9216]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 21
+     d2[315]=simde_mm_xor_si128(c2[3342],simde_mm_xor_si128(c2[6757],simde_mm_xor_si128(c2[5113],simde_mm_xor_si128(c2[1264],c2[5923]))));
+
+//row: 22
+     d2[330]=simde_mm_xor_si128(c2[5941],simde_mm_xor_si128(c2[10262],simde_mm_xor_si128(c2[9637],c2[1841])));
+
+//row: 23
+     d2[345]=simde_mm_xor_si128(c2[5981],simde_mm_xor_si128(c2[9967],simde_mm_xor_si128(c2[3607],c2[5172])));
+
+//row: 24
+     d2[360]=simde_mm_xor_si128(c2[672],simde_mm_xor_si128(c2[6609],simde_mm_xor_si128(c2[4620],simde_mm_xor_si128(c2[6611],simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[8612],simde_mm_xor_si128(c2[9273],simde_mm_xor_si128(c2[695],simde_mm_xor_si128(c2[5351],simde_mm_xor_si128(c2[4023],simde_mm_xor_si128(c2[7322],simde_mm_xor_si128(c2[102],simde_mm_xor_si128(c2[1422],simde_mm_xor_si128(c2[8023],simde_mm_xor_si128(c2[1421],simde_mm_xor_si128(c2[4742],simde_mm_xor_si128(c2[793],simde_mm_xor_si128(c2[4082],simde_mm_xor_si128(c2[1440],simde_mm_xor_si128(c2[9398],simde_mm_xor_si128(c2[6761],simde_mm_xor_si128(c2[4113],simde_mm_xor_si128(c2[8768],simde_mm_xor_si128(c2[10090],simde_mm_xor_si128(c2[3486],simde_mm_xor_si128(c2[4179],simde_mm_xor_si128(c2[8804],simde_mm_xor_si128(c2[2850],simde_mm_xor_si128(c2[4214],simde_mm_xor_si128(c2[4209],simde_mm_xor_si128(c2[6849],simde_mm_xor_si128(c2[10176],simde_mm_xor_si128(c2[1597],simde_mm_xor_si128(c2[1595],simde_mm_xor_si128(c2[7562],simde_mm_xor_si128(c2[9547],simde_mm_xor_si128(c2[7563],simde_mm_xor_si128(c2[4951],simde_mm_xor_si128(c2[3634],simde_mm_xor_si128(c2[1652],simde_mm_xor_si128(c2[8259],simde_mm_xor_si128(c2[6971],simde_mm_xor_si128(c2[4980],simde_mm_xor_si128(c2[365],simde_mm_xor_si128(c2[3698],simde_mm_xor_si128(c2[394],simde_mm_xor_si128(c2[1715],simde_mm_xor_si128(c2[6369],simde_mm_xor_si128(c2[10329],simde_mm_xor_si128(c2[1094],simde_mm_xor_si128(c2[1122],simde_mm_xor_si128(c2[10358],simde_mm_xor_si128(c2[7715],simde_mm_xor_si128(c2[2462],simde_mm_xor_si128(c2[2473],simde_mm_xor_si128(c2[1801],simde_mm_xor_si128(c2[1179],simde_mm_xor_si128(c2[7119],simde_mm_xor_si128(c2[7115],simde_mm_xor_si128(c2[4502],simde_mm_xor_si128(c2[5171],simde_mm_xor_si128(c2[3843],simde_mm_xor_si128(c2[3219],simde_mm_xor_si128(c2[9163],simde_mm_xor_si128(c2[9157],simde_mm_xor_si128(c2[2590],simde_mm_xor_si128(c2[603],simde_mm_xor_si128(c2[4574],simde_mm_xor_si128(c2[2622],simde_mm_xor_si128(c2[1952],c2[3932]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 25
+     d2[375]=simde_mm_xor_si128(c2[7291],simde_mm_xor_si128(c2[8768],simde_mm_xor_si128(c2[7470],c2[3731])));
+
+//row: 26
+     d2[390]=simde_mm_xor_si128(c2[1990],simde_mm_xor_si128(c2[2048],simde_mm_xor_si128(c2[6070],c2[1776])));
+
+//row: 27
+     d2[405]=simde_mm_xor_si128(c2[3340],simde_mm_xor_si128(c2[5474],c2[6180]));
+
+//row: 28
+     d2[420]=simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[7382],simde_mm_xor_si128(c2[5862],c2[9222])));
+
+//row: 29
+     d2[435]=simde_mm_xor_si128(c2[9914],simde_mm_xor_si128(c2[5292],simde_mm_xor_si128(c2[3303],simde_mm_xor_si128(c2[4634],simde_mm_xor_si128(c2[5294],simde_mm_xor_si128(c2[7295],simde_mm_xor_si128(c2[7956],simde_mm_xor_si128(c2[9277],simde_mm_xor_si128(c2[9937],simde_mm_xor_si128(c2[5324],simde_mm_xor_si128(c2[4034],simde_mm_xor_si128(c2[2706],simde_mm_xor_si128(c2[6005],simde_mm_xor_si128(c2[9344],simde_mm_xor_si128(c2[90],simde_mm_xor_si128(c2[6031],simde_mm_xor_si128(c2[6691],simde_mm_xor_si128(c2[3425],simde_mm_xor_si128(c2[10020],simde_mm_xor_si128(c2[2105],simde_mm_xor_si128(c2[2765],simde_mm_xor_si128(c2[8081],simde_mm_xor_si128(c2[5444],simde_mm_xor_si128(c2[2796],simde_mm_xor_si128(c2[7451],simde_mm_xor_si128(c2[8773],simde_mm_xor_si128(c2[1509],simde_mm_xor_si128(c2[2169],simde_mm_xor_si128(c2[2862],simde_mm_xor_si128(c2[7472],simde_mm_xor_si128(c2[873],simde_mm_xor_si128(c2[1533],simde_mm_xor_si128(c2[2882],simde_mm_xor_si128(c2[2892],simde_mm_xor_si128(c2[4872],simde_mm_xor_si128(c2[5532],simde_mm_xor_si128(c2[8859],simde_mm_xor_si128(c2[280],simde_mm_xor_si128(c2[278],simde_mm_xor_si128(c2[6245],simde_mm_xor_si128(c2[8230],simde_mm_xor_si128(c2[5586],simde_mm_xor_si128(c2[6246],simde_mm_xor_si128(c2[3634],simde_mm_xor_si128(c2[2317],simde_mm_xor_si128(c2[10234],simde_mm_xor_si128(c2[335],simde_mm_xor_si128(c2[5654],simde_mm_xor_si128(c2[3663],simde_mm_xor_si128(c2[8947],simde_mm_xor_si128(c2[9607],simde_mm_xor_si128(c2[2381],simde_mm_xor_si128(c2[9636],simde_mm_xor_si128(c2[10297],simde_mm_xor_si128(c2[398],simde_mm_xor_si128(c2[5052],simde_mm_xor_si128(c2[9012],simde_mm_xor_si128(c2[9661],simde_mm_xor_si128(c2[10321],simde_mm_xor_si128(c2[422],simde_mm_xor_si128(c2[10364],simde_mm_xor_si128(c2[9041],simde_mm_xor_si128(c2[6398],simde_mm_xor_si128(c2[1145],simde_mm_xor_si128(c2[1141],simde_mm_xor_si128(c2[10383],simde_mm_xor_si128(c2[484],simde_mm_xor_si128(c2[10421],simde_mm_xor_si128(c2[5802],simde_mm_xor_si128(c2[5138],simde_mm_xor_si128(c2[5798],simde_mm_xor_si128(c2[3185],simde_mm_xor_si128(c2[3854],simde_mm_xor_si128(c2[1866],simde_mm_xor_si128(c2[2526],simde_mm_xor_si128(c2[4507],simde_mm_xor_si128(c2[1902],simde_mm_xor_si128(c2[7831],simde_mm_xor_si128(c2[7840],simde_mm_xor_si128(c2[1273],simde_mm_xor_si128(c2[9845],simde_mm_xor_si128(c2[2582],simde_mm_xor_si128(c2[3242],simde_mm_xor_si128(c2[1290],simde_mm_xor_si128(c2[635],simde_mm_xor_si128(c2[1955],c2[2615]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 30
+     d2[450]=simde_mm_xor_si128(c2[660],simde_mm_xor_si128(c2[6612],simde_mm_xor_si128(c2[3963],simde_mm_xor_si128(c2[4623],simde_mm_xor_si128(c2[5954],simde_mm_xor_si128(c2[6614],simde_mm_xor_si128(c2[6610],simde_mm_xor_si128(c2[8615],simde_mm_xor_si128(c2[8616],simde_mm_xor_si128(c2[9276],simde_mm_xor_si128(c2[38],simde_mm_xor_si128(c2[698],simde_mm_xor_si128(c2[5354],simde_mm_xor_si128(c2[4026],simde_mm_xor_si128(c2[6665],simde_mm_xor_si128(c2[7325],simde_mm_xor_si128(c2[90],simde_mm_xor_si128(c2[1410],simde_mm_xor_si128(c2[7351],simde_mm_xor_si128(c2[8011],simde_mm_xor_si128(c2[4745],simde_mm_xor_si128(c2[121],simde_mm_xor_si128(c2[781],simde_mm_xor_si128(c2[3425],simde_mm_xor_si128(c2[4085],simde_mm_xor_si128(c2[9401],simde_mm_xor_si128(c2[6764],simde_mm_xor_si128(c2[3456],simde_mm_xor_si128(c2[4116],simde_mm_xor_si128(c2[8771],simde_mm_xor_si128(c2[9433],simde_mm_xor_si128(c2[10093],simde_mm_xor_si128(c2[2829],simde_mm_xor_si128(c2[3489],simde_mm_xor_si128(c2[4182],simde_mm_xor_si128(c2[8132],simde_mm_xor_si128(c2[8792],simde_mm_xor_si128(c2[2193],simde_mm_xor_si128(c2[2853],simde_mm_xor_si128(c2[4202],simde_mm_xor_si128(c2[3552],simde_mm_xor_si128(c2[4212],simde_mm_xor_si128(c2[6192],simde_mm_xor_si128(c2[6852],simde_mm_xor_si128(c2[10179],simde_mm_xor_si128(c2[1600],simde_mm_xor_si128(c2[938],simde_mm_xor_si128(c2[1598],simde_mm_xor_si128(c2[7565],simde_mm_xor_si128(c2[8890],simde_mm_xor_si128(c2[9550],simde_mm_xor_si128(c2[6906],simde_mm_xor_si128(c2[7566],simde_mm_xor_si128(c2[4932],simde_mm_xor_si128(c2[4954],simde_mm_xor_si128(c2[3637],simde_mm_xor_si128(c2[995],simde_mm_xor_si128(c2[1655],simde_mm_xor_si128(c2[6974],simde_mm_xor_si128(c2[4983],simde_mm_xor_si128(c2[10267],simde_mm_xor_si128(c2[368],simde_mm_xor_si128(c2[3701],simde_mm_xor_si128(c2[10296],simde_mm_xor_si128(c2[397],simde_mm_xor_si128(c2[1058],simde_mm_xor_si128(c2[1718],simde_mm_xor_si128(c2[6991],simde_mm_xor_si128(c2[6372],simde_mm_xor_si128(c2[9672],simde_mm_xor_si128(c2[10332],simde_mm_xor_si128(c2[422],simde_mm_xor_si128(c2[1082],simde_mm_xor_si128(c2[1110],simde_mm_xor_si128(c2[10361],simde_mm_xor_si128(c2[7058],simde_mm_xor_si128(c2[7718],simde_mm_xor_si128(c2[2465],simde_mm_xor_si128(c2[2461],simde_mm_xor_si128(c2[1144],simde_mm_xor_si128(c2[1804],simde_mm_xor_si128(c2[1182],simde_mm_xor_si128(c2[6462],simde_mm_xor_si128(c2[7122],simde_mm_xor_si128(c2[6458],simde_mm_xor_si128(c2[7118],simde_mm_xor_si128(c2[4505],simde_mm_xor_si128(c2[4514],simde_mm_xor_si128(c2[5174],simde_mm_xor_si128(c2[3186],simde_mm_xor_si128(c2[3846],simde_mm_xor_si128(c2[3222],simde_mm_xor_si128(c2[9151],simde_mm_xor_si128(c2[8500],simde_mm_xor_si128(c2[9160],simde_mm_xor_si128(c2[2593],simde_mm_xor_si128(c2[10505],simde_mm_xor_si128(c2[606],simde_mm_xor_si128(c2[3902],simde_mm_xor_si128(c2[4562],simde_mm_xor_si128(c2[2610],simde_mm_xor_si128(c2[1955],simde_mm_xor_si128(c2[3275],c2[3935])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 31
+     d2[465]=simde_mm_xor_si128(c2[3964],simde_mm_xor_si128(c2[2640],simde_mm_xor_si128(c2[9901],simde_mm_xor_si128(c2[8592],simde_mm_xor_si128(c2[7927],simde_mm_xor_si128(c2[6603],simde_mm_xor_si128(c2[9903],simde_mm_xor_si128(c2[7934],simde_mm_xor_si128(c2[8594],simde_mm_xor_si128(c2[1360],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[2021],simde_mm_xor_si128(c2[697],simde_mm_xor_si128(c2[4002],simde_mm_xor_si128(c2[2018],simde_mm_xor_si128(c2[2678],simde_mm_xor_si128(c2[5974],simde_mm_xor_si128(c2[8643],simde_mm_xor_si128(c2[7334],simde_mm_xor_si128(c2[7330],simde_mm_xor_si128(c2[6006],simde_mm_xor_si128(c2[70],simde_mm_xor_si128(c2[9305],simde_mm_xor_si128(c2[3394],simde_mm_xor_si128(c2[2070],simde_mm_xor_si128(c2[4714],simde_mm_xor_si128(c2[3390],simde_mm_xor_si128(c2[756],simde_mm_xor_si128(c2[9331],simde_mm_xor_si128(c2[9991],simde_mm_xor_si128(c2[8049],simde_mm_xor_si128(c2[6725],simde_mm_xor_si128(c2[4085],simde_mm_xor_si128(c2[2761],simde_mm_xor_si128(c2[7389],simde_mm_xor_si128(c2[5405],simde_mm_xor_si128(c2[6065],simde_mm_xor_si128(c2[2131],simde_mm_xor_si128(c2[822],simde_mm_xor_si128(c2[10053],simde_mm_xor_si128(c2[8744],simde_mm_xor_si128(c2[7420],simde_mm_xor_si128(c2[6096],simde_mm_xor_si128(c2[1501],simde_mm_xor_si128(c2[192],simde_mm_xor_si128(c2[2823],simde_mm_xor_si128(c2[1514],simde_mm_xor_si128(c2[6793],simde_mm_xor_si128(c2[4809],simde_mm_xor_si128(c2[5469],simde_mm_xor_si128(c2[7471],simde_mm_xor_si128(c2[6162],simde_mm_xor_si128(c2[1537],simde_mm_xor_si128(c2[213],simde_mm_xor_si128(c2[6157],simde_mm_xor_si128(c2[4173],simde_mm_xor_si128(c2[4833],simde_mm_xor_si128(c2[3519],simde_mm_xor_si128(c2[7506],simde_mm_xor_si128(c2[6182],simde_mm_xor_si128(c2[7501],simde_mm_xor_si128(c2[6192],simde_mm_xor_si128(c2[10141],simde_mm_xor_si128(c2[8172],simde_mm_xor_si128(c2[8832],simde_mm_xor_si128(c2[2924],simde_mm_xor_si128(c2[1600],simde_mm_xor_si128(c2[4904],simde_mm_xor_si128(c2[3580],simde_mm_xor_si128(c2[4902],simde_mm_xor_si128(c2[3578],simde_mm_xor_si128(c2[310],simde_mm_xor_si128(c2[9545],simde_mm_xor_si128(c2[2280],simde_mm_xor_si128(c2[971],simde_mm_xor_si128(c2[311],simde_mm_xor_si128(c2[8886],simde_mm_xor_si128(c2[9546],simde_mm_xor_si128(c2[8258],simde_mm_xor_si128(c2[6934],simde_mm_xor_si128(c2[6941],simde_mm_xor_si128(c2[5617],simde_mm_xor_si128(c2[4959],simde_mm_xor_si128(c2[2975],simde_mm_xor_si128(c2[3635],simde_mm_xor_si128(c2[10263],simde_mm_xor_si128(c2[8954],simde_mm_xor_si128(c2[8287],simde_mm_xor_si128(c2[6963],simde_mm_xor_si128(c2[3672],simde_mm_xor_si128(c2[1688],simde_mm_xor_si128(c2[2348],simde_mm_xor_si128(c2[6990],simde_mm_xor_si128(c2[5681],simde_mm_xor_si128(c2[3701],simde_mm_xor_si128(c2[2377],simde_mm_xor_si128(c2[5022],simde_mm_xor_si128(c2[3038],simde_mm_xor_si128(c2[3698],simde_mm_xor_si128(c2[9661],simde_mm_xor_si128(c2[8352],simde_mm_xor_si128(c2[3062],simde_mm_xor_si128(c2[1753],simde_mm_xor_si128(c2[4386],simde_mm_xor_si128(c2[2402],simde_mm_xor_si128(c2[3062],simde_mm_xor_si128(c2[4414],simde_mm_xor_si128(c2[3090],simde_mm_xor_si128(c2[3091],simde_mm_xor_si128(c2[1782],simde_mm_xor_si128(c2[463],simde_mm_xor_si128(c2[9698],simde_mm_xor_si128(c2[5769],simde_mm_xor_si128(c2[4445],simde_mm_xor_si128(c2[5765],simde_mm_xor_si128(c2[4441],simde_mm_xor_si128(c2[5108],simde_mm_xor_si128(c2[3124],simde_mm_xor_si128(c2[3784],simde_mm_xor_si128(c2[4471],simde_mm_xor_si128(c2[3162],simde_mm_xor_si128(c2[10411],simde_mm_xor_si128(c2[9102],simde_mm_xor_si128(c2[10422],simde_mm_xor_si128(c2[8438],simde_mm_xor_si128(c2[9098],simde_mm_xor_si128(c2[7809],simde_mm_xor_si128(c2[6485],simde_mm_xor_si128(c2[8463],simde_mm_xor_si128(c2[7154],simde_mm_xor_si128(c2[7150],simde_mm_xor_si128(c2[5166],simde_mm_xor_si128(c2[5826],simde_mm_xor_si128(c2[6511],simde_mm_xor_si128(c2[5202],simde_mm_xor_si128(c2[1896],simde_mm_xor_si128(c2[572],simde_mm_xor_si128(c2[1890],simde_mm_xor_si128(c2[581],simde_mm_xor_si128(c2[5882],simde_mm_xor_si128(c2[4573],simde_mm_xor_si128(c2[3910],simde_mm_xor_si128(c2[2586],simde_mm_xor_si128(c2[7866],simde_mm_xor_si128(c2[5882],simde_mm_xor_si128(c2[6542],simde_mm_xor_si128(c2[5914],simde_mm_xor_si128(c2[4590],simde_mm_xor_si128(c2[5259],simde_mm_xor_si128(c2[3935],simde_mm_xor_si128(c2[7239],simde_mm_xor_si128(c2[5255],c2[5915]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 32
+     d2[480]=simde_mm_xor_si128(c2[7266],simde_mm_xor_si128(c2[2644],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[670],simde_mm_xor_si128(c2[1986],simde_mm_xor_si128(c2[2646],simde_mm_xor_si128(c2[4626],simde_mm_xor_si128(c2[4662],simde_mm_xor_si128(c2[4663],simde_mm_xor_si128(c2[5323],simde_mm_xor_si128(c2[6644],simde_mm_xor_si128(c2[7304],simde_mm_xor_si128(c2[1386],simde_mm_xor_si128(c2[73],simde_mm_xor_si128(c2[2712],simde_mm_xor_si128(c2[3372],simde_mm_xor_si128(c2[6696],simde_mm_xor_si128(c2[8016],simde_mm_xor_si128(c2[3398],simde_mm_xor_si128(c2[4058],simde_mm_xor_si128(c2[792],simde_mm_xor_si128(c2[6727],simde_mm_xor_si128(c2[7387],simde_mm_xor_si128(c2[10031],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[5433],simde_mm_xor_si128(c2[2796],simde_mm_xor_si128(c2[10062],simde_mm_xor_si128(c2[163],simde_mm_xor_si128(c2[4803],simde_mm_xor_si128(c2[5465],simde_mm_xor_si128(c2[6125],simde_mm_xor_si128(c2[9420],simde_mm_xor_si128(c2[10080],simde_mm_xor_si128(c2[214],simde_mm_xor_si128(c2[4179],simde_mm_xor_si128(c2[4839],simde_mm_xor_si128(c2[8799],simde_mm_xor_si128(c2[9459],simde_mm_xor_si128(c2[249],simde_mm_xor_si128(c2[10143],simde_mm_xor_si128(c2[244],simde_mm_xor_si128(c2[2224],simde_mm_xor_si128(c2[2884],simde_mm_xor_si128(c2[6211],simde_mm_xor_si128(c2[8191],simde_mm_xor_si128(c2[7544],simde_mm_xor_si128(c2[8204],simde_mm_xor_si128(c2[3612],simde_mm_xor_si128(c2[4922],simde_mm_xor_si128(c2[5582],simde_mm_xor_si128(c2[2953],simde_mm_xor_si128(c2[3613],simde_mm_xor_si128(c2[1001],simde_mm_xor_si128(c2[10243],simde_mm_xor_si128(c2[7601],simde_mm_xor_si128(c2[8261],simde_mm_xor_si128(c2[3006],simde_mm_xor_si128(c2[1030],simde_mm_xor_si128(c2[6314],simde_mm_xor_si128(c2[6974],simde_mm_xor_si128(c2[9606],simde_mm_xor_si128(c2[10292],simde_mm_xor_si128(c2[6343],simde_mm_xor_si128(c2[7003],simde_mm_xor_si128(c2[7664],simde_mm_xor_si128(c2[8324],simde_mm_xor_si128(c2[2404],simde_mm_xor_si128(c2[5704],simde_mm_xor_si128(c2[6364],simde_mm_xor_si128(c2[7028],simde_mm_xor_si128(c2[7688],simde_mm_xor_si128(c2[5049],simde_mm_xor_si128(c2[7716],simde_mm_xor_si128(c2[6393],simde_mm_xor_si128(c2[3090],simde_mm_xor_si128(c2[3750],simde_mm_xor_si128(c2[9071],simde_mm_xor_si128(c2[9067],simde_mm_xor_si128(c2[7750],simde_mm_xor_si128(c2[8410],simde_mm_xor_si128(c2[7773],simde_mm_xor_si128(c2[2494],simde_mm_xor_si128(c2[3154],simde_mm_xor_si128(c2[2490],simde_mm_xor_si128(c2[3150],simde_mm_xor_si128(c2[552],simde_mm_xor_si128(c2[546],simde_mm_xor_si128(c2[1206],simde_mm_xor_si128(c2[9792],simde_mm_xor_si128(c2[10452],simde_mm_xor_si128(c2[9813],simde_mm_xor_si128(c2[5198],simde_mm_xor_si128(c2[4532],simde_mm_xor_si128(c2[5192],simde_mm_xor_si128(c2[9184],simde_mm_xor_si128(c2[6552],simde_mm_xor_si128(c2[7212],simde_mm_xor_si128(c2[10508],simde_mm_xor_si128(c2[609],simde_mm_xor_si128(c2[9216],simde_mm_xor_si128(c2[8561],simde_mm_xor_si128(c2[9881],c2[10541])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 33
+     d2[495]=simde_mm_xor_si128(c2[4662],simde_mm_xor_si128(c2[2708],simde_mm_xor_si128(c2[8260],c2[1294])));
+
+//row: 34
+     d2[510]=simde_mm_xor_si128(c2[670],simde_mm_xor_si128(c2[8804],simde_mm_xor_si128(c2[9698],c2[3161])));
+
+//row: 35
+     d2[525]=simde_mm_xor_si128(c2[7274],simde_mm_xor_si128(c2[2652],simde_mm_xor_si128(c2[663],simde_mm_xor_si128(c2[2654],simde_mm_xor_si128(c2[4655],simde_mm_xor_si128(c2[5316],simde_mm_xor_si128(c2[7297],simde_mm_xor_si128(c2[4664],simde_mm_xor_si128(c2[1394],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[3365],simde_mm_xor_si128(c2[6704],simde_mm_xor_si128(c2[8024],simde_mm_xor_si128(c2[4051],simde_mm_xor_si128(c2[785],simde_mm_xor_si128(c2[7380],simde_mm_xor_si128(c2[125],simde_mm_xor_si128(c2[5441],simde_mm_xor_si128(c2[2804],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[4811],simde_mm_xor_si128(c2[6133],simde_mm_xor_si128(c2[10088],simde_mm_xor_si128(c2[9430],simde_mm_xor_si128(c2[222],simde_mm_xor_si128(c2[4832],simde_mm_xor_si128(c2[9452],simde_mm_xor_si128(c2[242],simde_mm_xor_si128(c2[252],simde_mm_xor_si128(c2[2892],simde_mm_xor_si128(c2[6219],simde_mm_xor_si128(c2[8199],simde_mm_xor_si128(c2[8197],simde_mm_xor_si128(c2[3605],simde_mm_xor_si128(c2[5590],simde_mm_xor_si128(c2[3606],simde_mm_xor_si128(c2[994],simde_mm_xor_si128(c2[10236],simde_mm_xor_si128(c2[8254],simde_mm_xor_si128(c2[3014],simde_mm_xor_si128(c2[1023],simde_mm_xor_si128(c2[6967],simde_mm_xor_si128(c2[1029],simde_mm_xor_si128(c2[10300],simde_mm_xor_si128(c2[6996],simde_mm_xor_si128(c2[8317],simde_mm_xor_si128(c2[2412],simde_mm_xor_si128(c2[6372],simde_mm_xor_si128(c2[7681],simde_mm_xor_si128(c2[7724],simde_mm_xor_si128(c2[6401],simde_mm_xor_si128(c2[3758],simde_mm_xor_si128(c2[9064],simde_mm_xor_si128(c2[9060],simde_mm_xor_si128(c2[8403],simde_mm_xor_si128(c2[7781],simde_mm_xor_si128(c2[3162],simde_mm_xor_si128(c2[3158],simde_mm_xor_si128(c2[545],simde_mm_xor_si128(c2[1214],simde_mm_xor_si128(c2[10445],simde_mm_xor_si128(c2[9821],simde_mm_xor_si128(c2[5191],simde_mm_xor_si128(c2[5200],simde_mm_xor_si128(c2[9192],simde_mm_xor_si128(c2[7205],simde_mm_xor_si128(c2[602],simde_mm_xor_si128(c2[9224],simde_mm_xor_si128(c2[8554],c2[10534])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 36
+     d2[540]=simde_mm_xor_si128(c2[7260],simde_mm_xor_si128(c2[10332],simde_mm_xor_si128(c2[7052],c2[3186])));
+
+//row: 37
+     d2[555]=simde_mm_xor_si128(c2[5283],simde_mm_xor_si128(c2[5943],simde_mm_xor_si128(c2[1321],simde_mm_xor_si128(c2[9906],simde_mm_xor_si128(c2[1323],simde_mm_xor_si128(c2[2679],simde_mm_xor_si128(c2[3339],simde_mm_xor_si128(c2[4000],simde_mm_xor_si128(c2[5981],simde_mm_xor_si128(c2[7293],simde_mm_xor_si128(c2[9962],simde_mm_xor_si128(c2[63],simde_mm_xor_si128(c2[9309],simde_mm_xor_si128(c2[2049],simde_mm_xor_si128(c2[4713],simde_mm_xor_si128(c2[5373],simde_mm_xor_si128(c2[6693],simde_mm_xor_si128(c2[2735],simde_mm_xor_si128(c2[10028],simde_mm_xor_si128(c2[6064],simde_mm_xor_si128(c2[9368],simde_mm_xor_si128(c2[3450],simde_mm_xor_si128(c2[4110],simde_mm_xor_si128(c2[1473],simde_mm_xor_si128(c2[9399],simde_mm_xor_si128(c2[2820],simde_mm_xor_si128(c2[3480],simde_mm_xor_si128(c2[4802],simde_mm_xor_si128(c2[8772],simde_mm_xor_si128(c2[9450],simde_mm_xor_si128(c2[3516],simde_mm_xor_si128(c2[8136],simde_mm_xor_si128(c2[9485],simde_mm_xor_si128(c2[9480],simde_mm_xor_si128(c2[1561],simde_mm_xor_si128(c2[4243],simde_mm_xor_si128(c2[4903],simde_mm_xor_si128(c2[6883],simde_mm_xor_si128(c2[6881],simde_mm_xor_si128(c2[1629],simde_mm_xor_si128(c2[2289],simde_mm_xor_si128(c2[4274],simde_mm_xor_si128(c2[2290],simde_mm_xor_si128(c2[9577],simde_mm_xor_si128(c2[10237],simde_mm_xor_si128(c2[8920],simde_mm_xor_si128(c2[6938],simde_mm_xor_si128(c2[1023],simde_mm_xor_si128(c2[1683],simde_mm_xor_si128(c2[10266],simde_mm_xor_si128(c2[5651],simde_mm_xor_si128(c2[8324],simde_mm_xor_si128(c2[8984],simde_mm_xor_si128(c2[5680],simde_mm_xor_si128(c2[7001],simde_mm_xor_si128(c2[8322],simde_mm_xor_si128(c2[1081],simde_mm_xor_si128(c2[5041],simde_mm_xor_si128(c2[6365],simde_mm_xor_si128(c2[5733],simde_mm_xor_si128(c2[6393],simde_mm_xor_si128(c2[5070],simde_mm_xor_si128(c2[2442],simde_mm_xor_si128(c2[7088],simde_mm_xor_si128(c2[7748],simde_mm_xor_si128(c2[7744],simde_mm_xor_si128(c2[7087],simde_mm_xor_si128(c2[6450],simde_mm_xor_si128(c2[1831],simde_mm_xor_si128(c2[1842],simde_mm_xor_si128(c2[9128],simde_mm_xor_si128(c2[9788],simde_mm_xor_si128(c2[10442],simde_mm_xor_si128(c2[9129],simde_mm_xor_si128(c2[7830],simde_mm_xor_si128(c2[8490],simde_mm_xor_si128(c2[3875],simde_mm_xor_si128(c2[3884],simde_mm_xor_si128(c2[7201],simde_mm_xor_si128(c2[7861],simde_mm_xor_si128(c2[5889],simde_mm_xor_si128(c2[9845],simde_mm_xor_si128(c2[7233],simde_mm_xor_si128(c2[7893],simde_mm_xor_si128(c2[7238],c2[9218])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 38
+     d2[570]=simde_mm_xor_si128(c2[5947],simde_mm_xor_si128(c2[6875],simde_mm_xor_si128(c2[6901],c2[8288])));
+
+//row: 39
+     d2[585]=simde_mm_xor_si128(c2[2017],simde_mm_xor_si128(c2[8021],simde_mm_xor_si128(c2[5500],c2[3213])));
+
+//row: 40
+     d2[600]=simde_mm_xor_si128(c2[2640],simde_mm_xor_si128(c2[4866],c2[9751]));
+
+//row: 41
+     d2[615]=simde_mm_xor_si128(c2[3333],simde_mm_xor_si128(c2[8681],simde_mm_xor_si128(c2[4903],c2[5821])));
+
+//row: 42
+     d2[630]=simde_mm_xor_si128(c2[9912],simde_mm_xor_si128(c2[5290],simde_mm_xor_si128(c2[2641],simde_mm_xor_si128(c2[3301],simde_mm_xor_si128(c2[4632],simde_mm_xor_si128(c2[5292],simde_mm_xor_si128(c2[9253],simde_mm_xor_si128(c2[7293],simde_mm_xor_si128(c2[7294],simde_mm_xor_si128(c2[7954],simde_mm_xor_si128(c2[9275],simde_mm_xor_si128(c2[9935],simde_mm_xor_si128(c2[4032],simde_mm_xor_si128(c2[2704],simde_mm_xor_si128(c2[5343],simde_mm_xor_si128(c2[6003],simde_mm_xor_si128(c2[9342],simde_mm_xor_si128(c2[103],simde_mm_xor_si128(c2[6044],simde_mm_xor_si128(c2[6704],simde_mm_xor_si128(c2[3423],simde_mm_xor_si128(c2[9373],simde_mm_xor_si128(c2[10033],simde_mm_xor_si128(c2[2103],simde_mm_xor_si128(c2[2763],simde_mm_xor_si128(c2[6730],simde_mm_xor_si128(c2[8079],simde_mm_xor_si128(c2[5442],simde_mm_xor_si128(c2[2134],simde_mm_xor_si128(c2[2794],simde_mm_xor_si128(c2[7449],simde_mm_xor_si128(c2[8111],simde_mm_xor_si128(c2[8771],simde_mm_xor_si128(c2[1507],simde_mm_xor_si128(c2[2167],simde_mm_xor_si128(c2[2860],simde_mm_xor_si128(c2[6810],simde_mm_xor_si128(c2[7470],simde_mm_xor_si128(c2[871],simde_mm_xor_si128(c2[1531],simde_mm_xor_si128(c2[2880],simde_mm_xor_si128(c2[2230],simde_mm_xor_si128(c2[2890],simde_mm_xor_si128(c2[4870],simde_mm_xor_si128(c2[5530],simde_mm_xor_si128(c2[8857],simde_mm_xor_si128(c2[278],simde_mm_xor_si128(c2[10175],simde_mm_xor_si128(c2[276],simde_mm_xor_si128(c2[6243],simde_mm_xor_si128(c2[7568],simde_mm_xor_si128(c2[8228],simde_mm_xor_si128(c2[5584],simde_mm_xor_si128(c2[6244],simde_mm_xor_si128(c2[3632],simde_mm_xor_si128(c2[2315],simde_mm_xor_si128(c2[10232],simde_mm_xor_si128(c2[333],simde_mm_xor_si128(c2[5652],simde_mm_xor_si128(c2[3661],simde_mm_xor_si128(c2[8945],simde_mm_xor_si128(c2[9605],simde_mm_xor_si128(c2[2379],simde_mm_xor_si128(c2[8974],simde_mm_xor_si128(c2[9634],simde_mm_xor_si128(c2[10295],simde_mm_xor_si128(c2[396],simde_mm_xor_si128(c2[5050],simde_mm_xor_si128(c2[8350],simde_mm_xor_si128(c2[9010],simde_mm_xor_si128(c2[9674],simde_mm_xor_si128(c2[10334],simde_mm_xor_si128(c2[10362],simde_mm_xor_si128(c2[9039],simde_mm_xor_si128(c2[5736],simde_mm_xor_si128(c2[6396],simde_mm_xor_si128(c2[1143],simde_mm_xor_si128(c2[1154],simde_mm_xor_si128(c2[10381],simde_mm_xor_si128(c2[482],simde_mm_xor_si128(c2[10419],simde_mm_xor_si128(c2[5140],simde_mm_xor_si128(c2[5800],simde_mm_xor_si128(c2[5136],simde_mm_xor_si128(c2[5796],simde_mm_xor_si128(c2[3183],simde_mm_xor_si128(c2[3192],simde_mm_xor_si128(c2[3852],simde_mm_xor_si128(c2[1864],simde_mm_xor_si128(c2[2524],simde_mm_xor_si128(c2[1900],simde_mm_xor_si128(c2[7844],simde_mm_xor_si128(c2[7178],simde_mm_xor_si128(c2[7838],simde_mm_xor_si128(c2[1271],simde_mm_xor_si128(c2[9183],simde_mm_xor_si128(c2[9843],simde_mm_xor_si128(c2[2580],simde_mm_xor_si128(c2[3240],simde_mm_xor_si128(c2[1303],simde_mm_xor_si128(c2[633],simde_mm_xor_si128(c2[1953],c2[2613]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 43
+     d2[645]=simde_mm_xor_si128(c2[3301],simde_mm_xor_si128(c2[9253],simde_mm_xor_si128(c2[7264],simde_mm_xor_si128(c2[8580],simde_mm_xor_si128(c2[9240],simde_mm_xor_si128(c2[697],simde_mm_xor_si128(c2[1358],simde_mm_xor_si128(c2[2679],simde_mm_xor_si128(c2[3339],simde_mm_xor_si128(c2[3991],simde_mm_xor_si128(c2[7980],simde_mm_xor_si128(c2[6667],simde_mm_xor_si128(c2[9966],simde_mm_xor_si128(c2[2731],simde_mm_xor_si128(c2[4051],simde_mm_xor_si128(c2[9992],simde_mm_xor_si128(c2[93],simde_mm_xor_si128(c2[7386],simde_mm_xor_si128(c2[3422],simde_mm_xor_si128(c2[6066],simde_mm_xor_si128(c2[6726],simde_mm_xor_si128(c2[1483],simde_mm_xor_si128(c2[9390],simde_mm_xor_si128(c2[6757],simde_mm_xor_si128(c2[853],simde_mm_xor_si128(c2[2160],simde_mm_xor_si128(c2[5470],simde_mm_xor_si128(c2[6130],simde_mm_xor_si128(c2[6823],simde_mm_xor_si128(c2[874],simde_mm_xor_si128(c2[4834],simde_mm_xor_si128(c2[5494],simde_mm_xor_si128(c2[6843],simde_mm_xor_si128(c2[6853],simde_mm_xor_si128(c2[8833],simde_mm_xor_si128(c2[9493],simde_mm_xor_si128(c2[2261],simde_mm_xor_si128(c2[4241],simde_mm_xor_si128(c2[4239],simde_mm_xor_si128(c2[10206],simde_mm_xor_si128(c2[1632],simde_mm_xor_si128(c2[9547],simde_mm_xor_si128(c2[10207],simde_mm_xor_si128(c2[7595],simde_mm_xor_si128(c2[6278],simde_mm_xor_si128(c2[3636],simde_mm_xor_si128(c2[4296],simde_mm_xor_si128(c2[9600],simde_mm_xor_si128(c2[7624],simde_mm_xor_si128(c2[2349],simde_mm_xor_si128(c2[3009],simde_mm_xor_si128(c2[6342],simde_mm_xor_si128(c2[3038],simde_mm_xor_si128(c2[3699],simde_mm_xor_si128(c2[4359],simde_mm_xor_si128(c2[9013],simde_mm_xor_si128(c2[2414],simde_mm_xor_si128(c2[3063],simde_mm_xor_si128(c2[3723],simde_mm_xor_si128(c2[3751],simde_mm_xor_si128(c2[2443],simde_mm_xor_si128(c2[10359],simde_mm_xor_si128(c2[5106],simde_mm_xor_si128(c2[5102],simde_mm_xor_si128(c2[3785],simde_mm_xor_si128(c2[4445],simde_mm_xor_si128(c2[10387],simde_mm_xor_si128(c2[3823],simde_mm_xor_si128(c2[9763],simde_mm_xor_si128(c2[9099],simde_mm_xor_si128(c2[9759],simde_mm_xor_si128(c2[7146],simde_mm_xor_si128(c2[7800],simde_mm_xor_si128(c2[5827],simde_mm_xor_si128(c2[6487],simde_mm_xor_si128(c2[1203],simde_mm_xor_si128(c2[5863],simde_mm_xor_si128(c2[1233],simde_mm_xor_si128(c2[1242],simde_mm_xor_si128(c2[5234],simde_mm_xor_si128(c2[3247],simde_mm_xor_si128(c2[6543],simde_mm_xor_si128(c2[7203],simde_mm_xor_si128(c2[5251],simde_mm_xor_si128(c2[4596],simde_mm_xor_si128(c2[5916],c2[6576]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 44
+     d2[660]=simde_mm_xor_si128(c2[7262],simde_mm_xor_si128(c2[2640],simde_mm_xor_si128(c2[666],simde_mm_xor_si128(c2[2642],simde_mm_xor_si128(c2[9911],simde_mm_xor_si128(c2[4658],simde_mm_xor_si128(c2[5319],simde_mm_xor_si128(c2[7300],simde_mm_xor_si128(c2[1382],simde_mm_xor_si128(c2[69],simde_mm_xor_si128(c2[3368],simde_mm_xor_si128(c2[6692],simde_mm_xor_si128(c2[8012],simde_mm_xor_si128(c2[4054],simde_mm_xor_si128(c2[788],simde_mm_xor_si128(c2[7383],simde_mm_xor_si128(c2[128],simde_mm_xor_si128(c2[5444],simde_mm_xor_si128(c2[2792],simde_mm_xor_si128(c2[159],simde_mm_xor_si128(c2[4814],simde_mm_xor_si128(c2[6121],simde_mm_xor_si128(c2[10091],simde_mm_xor_si128(c2[210],simde_mm_xor_si128(c2[4835],simde_mm_xor_si128(c2[9455],simde_mm_xor_si128(c2[2203],simde_mm_xor_si128(c2[245],simde_mm_xor_si128(c2[240],simde_mm_xor_si128(c2[2880],simde_mm_xor_si128(c2[6222],simde_mm_xor_si128(c2[8202],simde_mm_xor_si128(c2[8200],simde_mm_xor_si128(c2[7541],simde_mm_xor_si128(c2[3608],simde_mm_xor_si128(c2[5593],simde_mm_xor_si128(c2[3609],simde_mm_xor_si128(c2[997],simde_mm_xor_si128(c2[10239],simde_mm_xor_si128(c2[8257],simde_mm_xor_si128(c2[3002],simde_mm_xor_si128(c2[1026],simde_mm_xor_si128(c2[6970],simde_mm_xor_si128(c2[10303],simde_mm_xor_si128(c2[6999],simde_mm_xor_si128(c2[8320],simde_mm_xor_si128(c2[2400],simde_mm_xor_si128(c2[6360],simde_mm_xor_si128(c2[7684],simde_mm_xor_si128(c2[7712],simde_mm_xor_si128(c2[6404],simde_mm_xor_si128(c2[3761],simde_mm_xor_si128(c2[9067],simde_mm_xor_si128(c2[9063],simde_mm_xor_si128(c2[8406],simde_mm_xor_si128(c2[7784],simde_mm_xor_si128(c2[3150],simde_mm_xor_si128(c2[3161],simde_mm_xor_si128(c2[548],simde_mm_xor_si128(c2[1202],simde_mm_xor_si128(c2[10448],simde_mm_xor_si128(c2[9824],simde_mm_xor_si128(c2[5194],simde_mm_xor_si128(c2[5203],simde_mm_xor_si128(c2[9180],simde_mm_xor_si128(c2[7208],simde_mm_xor_si128(c2[605],simde_mm_xor_si128(c2[9212],simde_mm_xor_si128(c2[8557],c2[10537])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 45
+     d2[675]=simde_mm_xor_si128(c2[701],simde_mm_xor_si128(c2[1507],c2[8885]));
+  }
+}
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc256_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc256_byte.c
index 90448aa121c0022cb57084fec2f3e7d72b4bf893..4be5d7bbfce7857938402a37b9d74707bf679990 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc256_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc256_byte.c
@@ -1,9 +1,10 @@
+#ifdef __AVX2__
 #include "PHY/sse_intrin.h"
 // generated code for Zc=256, byte encoding
 static inline void ldpc256_byte(uint8_t *c,uint8_t *d) {
-  __m256i *csimd=(__m256i *)c,*dsimd=(__m256i *)d;
+  simde__m256i *csimd=(simde__m256i *)c,*dsimd=(simde__m256i *)d;
 
-  __m256i *c2,*d2;
+  simde__m256i *c2,*d2;
 
   int i2;
   for (i2=0; i2<8; i2++) {
@@ -149,3 +150,4 @@ static inline void ldpc256_byte(uint8_t *c,uint8_t *d) {
      d2[360]=simde_mm256_xor_si256(c2[7412],simde_mm256_xor_si256(c2[8196],c2[2629]));
   }
 }
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc256_byte_128.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc256_byte_128.c
new file mode 100644
index 0000000000000000000000000000000000000000..51c904e793005fbc8804c6eac8da441781254c4e
--- /dev/null
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc256_byte_128.c
@@ -0,0 +1,153 @@
+#ifndef __AVX2__
+#include "PHY/sse_intrin.h"
+// generated code for Zc=256, byte encoding
+static inline void ldpc256_byte(uint8_t *c,uint8_t *d) {
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
+
+  simde__m128i *c2,*d2;
+
+  int i2;
+  for (i2=0; i2<16; i2++) {
+     c2=&csimd[i2];
+     d2=&dsimd[i2];
+
+//row: 0
+     d2[0]=simde_mm_xor_si128(c2[7055],simde_mm_xor_si128(c2[1408],simde_mm_xor_si128(c2[7046],simde_mm_xor_si128(c2[6343],simde_mm_xor_si128(c2[3556],simde_mm_xor_si128(c2[10598],simde_mm_xor_si128(c2[6373],simde_mm_xor_si128(c2[1486],simde_mm_xor_si128(c2[10638],simde_mm_xor_si128(c2[6411],simde_mm_xor_si128(c2[10665],simde_mm_xor_si128(c2[3623],simde_mm_xor_si128(c2[2917],simde_mm_xor_si128(c2[8583],simde_mm_xor_si128(c2[10691],simde_mm_xor_si128(c2[2945],simde_mm_xor_si128(c2[2982],simde_mm_xor_si128(c2[5092],simde_mm_xor_si128(c2[3687],simde_mm_xor_si128(c2[7232],simde_mm_xor_si128(c2[9349],simde_mm_xor_si128(c2[4425],simde_mm_xor_si128(c2[10093],simde_mm_xor_si128(c2[3758],simde_mm_xor_si128(c2[2344],simde_mm_xor_si128(c2[5894],simde_mm_xor_si128(c2[971],simde_mm_xor_si128(c2[2383],simde_mm_xor_si128(c2[8035],simde_mm_xor_si128(c2[9450],simde_mm_xor_si128(c2[10853],simde_mm_xor_si128(c2[3854],simde_mm_xor_si128(c2[5250],simde_mm_xor_si128(c2[5960],simde_mm_xor_si128(c2[10214],simde_mm_xor_si128(c2[8813],simde_mm_xor_si128(c2[4581],simde_mm_xor_si128(c2[10955],simde_mm_xor_si128(c2[4614],simde_mm_xor_si128(c2[4623],simde_mm_xor_si128(c2[6752],simde_mm_xor_si128(c2[10280],simde_mm_xor_si128(c2[8173],simde_mm_xor_si128(c2[9606],simde_mm_xor_si128(c2[1166],simde_mm_xor_si128(c2[2573],simde_mm_xor_si128(c2[2604],simde_mm_xor_si128(c2[3304],simde_mm_xor_si128(c2[1198],simde_mm_xor_si128(c2[5441],simde_mm_xor_si128(c2[10376],simde_mm_xor_si128(c2[527],simde_mm_xor_si128(c2[8297],simde_mm_xor_si128(c2[4079],simde_mm_xor_si128(c2[8996],simde_mm_xor_si128(c2[10443],simde_mm_xor_si128(c2[9740],simde_mm_xor_si128(c2[3407],simde_mm_xor_si128(c2[2722],simde_mm_xor_si128(c2[11183],simde_mm_xor_si128(c2[8367],simde_mm_xor_si128(c2[11214],simde_mm_xor_si128(c2[4167],simde_mm_xor_si128(c2[649],simde_mm_xor_si128(c2[11233],simde_mm_xor_si128(c2[9121],c2[9120]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 1
+     d2[16]=simde_mm_xor_si128(c2[7055],simde_mm_xor_si128(c2[7759],simde_mm_xor_si128(c2[2112],simde_mm_xor_si128(c2[7750],simde_mm_xor_si128(c2[7047],simde_mm_xor_si128(c2[3556],simde_mm_xor_si128(c2[4260],simde_mm_xor_si128(c2[39],simde_mm_xor_si128(c2[7077],simde_mm_xor_si128(c2[1486],simde_mm_xor_si128(c2[2190],simde_mm_xor_si128(c2[79],simde_mm_xor_si128(c2[7115],simde_mm_xor_si128(c2[10665],simde_mm_xor_si128(c2[106],simde_mm_xor_si128(c2[4327],simde_mm_xor_si128(c2[3621],simde_mm_xor_si128(c2[9287],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[3649],simde_mm_xor_si128(c2[2982],simde_mm_xor_si128(c2[3686],simde_mm_xor_si128(c2[5796],simde_mm_xor_si128(c2[4391],simde_mm_xor_si128(c2[7232],simde_mm_xor_si128(c2[7936],simde_mm_xor_si128(c2[10053],simde_mm_xor_si128(c2[5129],simde_mm_xor_si128(c2[10797],simde_mm_xor_si128(c2[4462],simde_mm_xor_si128(c2[3048],simde_mm_xor_si128(c2[6598],simde_mm_xor_si128(c2[1675],simde_mm_xor_si128(c2[3087],simde_mm_xor_si128(c2[8035],simde_mm_xor_si128(c2[8739],simde_mm_xor_si128(c2[10154],simde_mm_xor_si128(c2[294],simde_mm_xor_si128(c2[3854],simde_mm_xor_si128(c2[4558],simde_mm_xor_si128(c2[5954],simde_mm_xor_si128(c2[6664],simde_mm_xor_si128(c2[10214],simde_mm_xor_si128(c2[10918],simde_mm_xor_si128(c2[9517],simde_mm_xor_si128(c2[5285],simde_mm_xor_si128(c2[10955],simde_mm_xor_si128(c2[396],simde_mm_xor_si128(c2[5318],simde_mm_xor_si128(c2[5327],simde_mm_xor_si128(c2[6752],simde_mm_xor_si128(c2[7456],simde_mm_xor_si128(c2[10984],simde_mm_xor_si128(c2[8877],simde_mm_xor_si128(c2[10310],simde_mm_xor_si128(c2[1870],simde_mm_xor_si128(c2[3277],simde_mm_xor_si128(c2[2604],simde_mm_xor_si128(c2[3308],simde_mm_xor_si128(c2[4008],simde_mm_xor_si128(c2[1902],simde_mm_xor_si128(c2[5441],simde_mm_xor_si128(c2[6145],simde_mm_xor_si128(c2[11080],simde_mm_xor_si128(c2[1231],simde_mm_xor_si128(c2[9001],simde_mm_xor_si128(c2[4783],simde_mm_xor_si128(c2[9700],simde_mm_xor_si128(c2[10443],simde_mm_xor_si128(c2[11147],simde_mm_xor_si128(c2[10444],simde_mm_xor_si128(c2[4111],simde_mm_xor_si128(c2[2722],simde_mm_xor_si128(c2[3426],simde_mm_xor_si128(c2[608],simde_mm_xor_si128(c2[9071],simde_mm_xor_si128(c2[11214],simde_mm_xor_si128(c2[655],simde_mm_xor_si128(c2[4871],simde_mm_xor_si128(c2[1353],simde_mm_xor_si128(c2[11233],simde_mm_xor_si128(c2[674],simde_mm_xor_si128(c2[9825],c2[9824])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 2
+     d2[32]=simde_mm_xor_si128(c2[7759],simde_mm_xor_si128(c2[2112],simde_mm_xor_si128(c2[7046],simde_mm_xor_si128(c2[7750],simde_mm_xor_si128(c2[6343],simde_mm_xor_si128(c2[7047],simde_mm_xor_si128(c2[4260],simde_mm_xor_si128(c2[10598],simde_mm_xor_si128(c2[39],simde_mm_xor_si128(c2[6373],simde_mm_xor_si128(c2[7077],simde_mm_xor_si128(c2[2190],simde_mm_xor_si128(c2[79],simde_mm_xor_si128(c2[6411],simde_mm_xor_si128(c2[7115],simde_mm_xor_si128(c2[106],simde_mm_xor_si128(c2[4327],simde_mm_xor_si128(c2[2917],simde_mm_xor_si128(c2[3621],simde_mm_xor_si128(c2[9287],simde_mm_xor_si128(c2[10691],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[2945],simde_mm_xor_si128(c2[3649],simde_mm_xor_si128(c2[3686],simde_mm_xor_si128(c2[5796],simde_mm_xor_si128(c2[3687],simde_mm_xor_si128(c2[4391],simde_mm_xor_si128(c2[7936],simde_mm_xor_si128(c2[9349],simde_mm_xor_si128(c2[10053],simde_mm_xor_si128(c2[4425],simde_mm_xor_si128(c2[5129],simde_mm_xor_si128(c2[10797],simde_mm_xor_si128(c2[3758],simde_mm_xor_si128(c2[4462],simde_mm_xor_si128(c2[2344],simde_mm_xor_si128(c2[3048],simde_mm_xor_si128(c2[6598],simde_mm_xor_si128(c2[971],simde_mm_xor_si128(c2[1675],simde_mm_xor_si128(c2[2383],simde_mm_xor_si128(c2[3087],simde_mm_xor_si128(c2[8739],simde_mm_xor_si128(c2[10154],simde_mm_xor_si128(c2[10853],simde_mm_xor_si128(c2[294],simde_mm_xor_si128(c2[4558],simde_mm_xor_si128(c2[5250],simde_mm_xor_si128(c2[5954],simde_mm_xor_si128(c2[5960],simde_mm_xor_si128(c2[6664],simde_mm_xor_si128(c2[10918],simde_mm_xor_si128(c2[9517],simde_mm_xor_si128(c2[4581],simde_mm_xor_si128(c2[5285],simde_mm_xor_si128(c2[396],simde_mm_xor_si128(c2[5318],simde_mm_xor_si128(c2[4623],simde_mm_xor_si128(c2[5327],simde_mm_xor_si128(c2[7456],simde_mm_xor_si128(c2[10280],simde_mm_xor_si128(c2[10984],simde_mm_xor_si128(c2[8173],simde_mm_xor_si128(c2[8877],simde_mm_xor_si128(c2[10310],simde_mm_xor_si128(c2[1166],simde_mm_xor_si128(c2[1870],simde_mm_xor_si128(c2[2573],simde_mm_xor_si128(c2[3277],simde_mm_xor_si128(c2[3308],simde_mm_xor_si128(c2[4008],simde_mm_xor_si128(c2[1198],simde_mm_xor_si128(c2[1902],simde_mm_xor_si128(c2[6145],simde_mm_xor_si128(c2[11080],simde_mm_xor_si128(c2[527],simde_mm_xor_si128(c2[1231],simde_mm_xor_si128(c2[9001],simde_mm_xor_si128(c2[4079],simde_mm_xor_si128(c2[4783],simde_mm_xor_si128(c2[8996],simde_mm_xor_si128(c2[9700],simde_mm_xor_si128(c2[11147],simde_mm_xor_si128(c2[9740],simde_mm_xor_si128(c2[10444],simde_mm_xor_si128(c2[3407],simde_mm_xor_si128(c2[4111],simde_mm_xor_si128(c2[3426],simde_mm_xor_si128(c2[608],simde_mm_xor_si128(c2[8367],simde_mm_xor_si128(c2[9071],simde_mm_xor_si128(c2[655],simde_mm_xor_si128(c2[4167],simde_mm_xor_si128(c2[4871],simde_mm_xor_si128(c2[649],simde_mm_xor_si128(c2[1353],simde_mm_xor_si128(c2[674],simde_mm_xor_si128(c2[9825],simde_mm_xor_si128(c2[9120],c2[9824]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 3
+     d2[48]=simde_mm_xor_si128(c2[7759],simde_mm_xor_si128(c2[2112],simde_mm_xor_si128(c2[7750],simde_mm_xor_si128(c2[6343],simde_mm_xor_si128(c2[7047],simde_mm_xor_si128(c2[4260],simde_mm_xor_si128(c2[39],simde_mm_xor_si128(c2[6373],simde_mm_xor_si128(c2[7077],simde_mm_xor_si128(c2[2190],simde_mm_xor_si128(c2[79],simde_mm_xor_si128(c2[7115],simde_mm_xor_si128(c2[106],simde_mm_xor_si128(c2[4327],simde_mm_xor_si128(c2[2917],simde_mm_xor_si128(c2[3621],simde_mm_xor_si128(c2[9287],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[2945],simde_mm_xor_si128(c2[3649],simde_mm_xor_si128(c2[3686],simde_mm_xor_si128(c2[5796],simde_mm_xor_si128(c2[4391],simde_mm_xor_si128(c2[7936],simde_mm_xor_si128(c2[10053],simde_mm_xor_si128(c2[4425],simde_mm_xor_si128(c2[5129],simde_mm_xor_si128(c2[10797],simde_mm_xor_si128(c2[4462],simde_mm_xor_si128(c2[2344],simde_mm_xor_si128(c2[3048],simde_mm_xor_si128(c2[6598],simde_mm_xor_si128(c2[1675],simde_mm_xor_si128(c2[2383],simde_mm_xor_si128(c2[3087],simde_mm_xor_si128(c2[8739],simde_mm_xor_si128(c2[10154],simde_mm_xor_si128(c2[294],simde_mm_xor_si128(c2[4558],simde_mm_xor_si128(c2[5954],simde_mm_xor_si128(c2[5960],simde_mm_xor_si128(c2[6664],simde_mm_xor_si128(c2[10918],simde_mm_xor_si128(c2[9517],simde_mm_xor_si128(c2[4581],simde_mm_xor_si128(c2[5285],simde_mm_xor_si128(c2[396],simde_mm_xor_si128(c2[5318],simde_mm_xor_si128(c2[4623],simde_mm_xor_si128(c2[5327],simde_mm_xor_si128(c2[7456],simde_mm_xor_si128(c2[10984],simde_mm_xor_si128(c2[8173],simde_mm_xor_si128(c2[8877],simde_mm_xor_si128(c2[10310],simde_mm_xor_si128(c2[1870],simde_mm_xor_si128(c2[2573],simde_mm_xor_si128(c2[3277],simde_mm_xor_si128(c2[3308],simde_mm_xor_si128(c2[4008],simde_mm_xor_si128(c2[1902],simde_mm_xor_si128(c2[6145],simde_mm_xor_si128(c2[11080],simde_mm_xor_si128(c2[527],simde_mm_xor_si128(c2[1231],simde_mm_xor_si128(c2[9001],simde_mm_xor_si128(c2[4783],simde_mm_xor_si128(c2[8996],simde_mm_xor_si128(c2[9700],simde_mm_xor_si128(c2[11147],simde_mm_xor_si128(c2[10444],simde_mm_xor_si128(c2[3407],simde_mm_xor_si128(c2[4111],simde_mm_xor_si128(c2[3426],simde_mm_xor_si128(c2[608],simde_mm_xor_si128(c2[9071],simde_mm_xor_si128(c2[655],simde_mm_xor_si128(c2[4871],simde_mm_xor_si128(c2[649],simde_mm_xor_si128(c2[1353],simde_mm_xor_si128(c2[674],simde_mm_xor_si128(c2[9825],simde_mm_xor_si128(c2[9120],c2[9824])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 4
+     d2[64]=simde_mm_xor_si128(c2[9161],c2[4262]);
+
+//row: 5
+     d2[80]=simde_mm_xor_si128(c2[9158],simde_mm_xor_si128(c2[3527],simde_mm_xor_si128(c2[9165],simde_mm_xor_si128(c2[8462],simde_mm_xor_si128(c2[9164],simde_mm_xor_si128(c2[5675],simde_mm_xor_si128(c2[1454],simde_mm_xor_si128(c2[8492],simde_mm_xor_si128(c2[8494],simde_mm_xor_si128(c2[3589],simde_mm_xor_si128(c2[1478],simde_mm_xor_si128(c2[8514],simde_mm_xor_si128(c2[1505],simde_mm_xor_si128(c2[5742],simde_mm_xor_si128(c2[5036],simde_mm_xor_si128(c2[1516],simde_mm_xor_si128(c2[10702],simde_mm_xor_si128(c2[1547],simde_mm_xor_si128(c2[5064],simde_mm_xor_si128(c2[5101],simde_mm_xor_si128(c2[7211],simde_mm_xor_si128(c2[5806],simde_mm_xor_si128(c2[9351],simde_mm_xor_si128(c2[205],simde_mm_xor_si128(c2[6528],simde_mm_xor_si128(c2[933],simde_mm_xor_si128(c2[5861],simde_mm_xor_si128(c2[4463],simde_mm_xor_si128(c2[8013],simde_mm_xor_si128(c2[3074],simde_mm_xor_si128(c2[4486],simde_mm_xor_si128(c2[10154],simde_mm_xor_si128(c2[290],simde_mm_xor_si128(c2[1709],simde_mm_xor_si128(c2[5957],simde_mm_xor_si128(c2[7369],simde_mm_xor_si128(c2[8079],simde_mm_xor_si128(c2[1070],simde_mm_xor_si128(c2[10916],simde_mm_xor_si128(c2[6700],simde_mm_xor_si128(c2[1795],simde_mm_xor_si128(c2[6733],simde_mm_xor_si128(c2[6726],simde_mm_xor_si128(c2[5326],simde_mm_xor_si128(c2[8871],simde_mm_xor_si128(c2[1120],simde_mm_xor_si128(c2[10276],simde_mm_xor_si128(c2[462],simde_mm_xor_si128(c2[3269],simde_mm_xor_si128(c2[4676],simde_mm_xor_si128(c2[4707],simde_mm_xor_si128(c2[5423],simde_mm_xor_si128(c2[3301],simde_mm_xor_si128(c2[7560],simde_mm_xor_si128(c2[1216],simde_mm_xor_si128(c2[2630],simde_mm_xor_si128(c2[8961],simde_mm_xor_si128(c2[10400],simde_mm_xor_si128(c2[6182],simde_mm_xor_si128(c2[11115],simde_mm_xor_si128(c2[1283],simde_mm_xor_si128(c2[580],simde_mm_xor_si128(c2[5510],simde_mm_xor_si128(c2[4841],simde_mm_xor_si128(c2[2023],simde_mm_xor_si128(c2[10470],simde_mm_xor_si128(c2[2054],simde_mm_xor_si128(c2[6286],simde_mm_xor_si128(c2[2752],simde_mm_xor_si128(c2[2089],simde_mm_xor_si128(c2[11240],simde_mm_xor_si128(c2[11239],c2[8423]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 6
+     d2[96]=simde_mm_xor_si128(c2[4939],simde_mm_xor_si128(c2[4417],simde_mm_xor_si128(c2[8769],simde_mm_xor_si128(c2[2468],simde_mm_xor_si128(c2[3247],simde_mm_xor_si128(c2[8288],simde_mm_xor_si128(c2[9737],c2[2765])))))));
+
+//row: 7
+     d2[112]=simde_mm_xor_si128(c2[8461],simde_mm_xor_si128(c2[8482],simde_mm_xor_si128(c2[10697],simde_mm_xor_si128(c2[10785],simde_mm_xor_si128(c2[5194],c2[6086])))));
+
+//row: 8
+     d2[128]=simde_mm_xor_si128(c2[7758],simde_mm_xor_si128(c2[3525],simde_mm_xor_si128(c2[2127],simde_mm_xor_si128(c2[9157],simde_mm_xor_si128(c2[7749],simde_mm_xor_si128(c2[2828],simde_mm_xor_si128(c2[3532],simde_mm_xor_si128(c2[7046],simde_mm_xor_si128(c2[2125],simde_mm_xor_si128(c2[2829],simde_mm_xor_si128(c2[7],simde_mm_xor_si128(c2[4259],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[38],simde_mm_xor_si128(c2[6380],simde_mm_xor_si128(c2[7084],simde_mm_xor_si128(c2[7076],simde_mm_xor_si128(c2[2155],simde_mm_xor_si128(c2[2859],simde_mm_xor_si128(c2[2848],simde_mm_xor_si128(c2[2189],simde_mm_xor_si128(c2[9219],simde_mm_xor_si128(c2[78],simde_mm_xor_si128(c2[7108],simde_mm_xor_si128(c2[7114],simde_mm_xor_si128(c2[2177],simde_mm_xor_si128(c2[2881],simde_mm_xor_si128(c2[105],simde_mm_xor_si128(c2[7151],simde_mm_xor_si128(c2[4326],simde_mm_xor_si128(c2[109],simde_mm_xor_si128(c2[3620],simde_mm_xor_si128(c2[9962],simde_mm_xor_si128(c2[10666],simde_mm_xor_si128(c2[5024],simde_mm_xor_si128(c2[9286],simde_mm_xor_si128(c2[5069],simde_mm_xor_si128(c2[131],simde_mm_xor_si128(c2[6473],simde_mm_xor_si128(c2[7177],simde_mm_xor_si128(c2[3648],simde_mm_xor_si128(c2[9990],simde_mm_xor_si128(c2[10694],simde_mm_xor_si128(c2[3685],simde_mm_xor_si128(c2[10731],simde_mm_xor_si128(c2[5795],simde_mm_xor_si128(c2[1578],simde_mm_xor_si128(c2[4390],simde_mm_xor_si128(c2[10732],simde_mm_xor_si128(c2[173],simde_mm_xor_si128(c2[7951],simde_mm_xor_si128(c2[3718],simde_mm_xor_si128(c2[10052],simde_mm_xor_si128(c2[5131],simde_mm_xor_si128(c2[5835],simde_mm_xor_si128(c2[5128],simde_mm_xor_si128(c2[207],simde_mm_xor_si128(c2[911],simde_mm_xor_si128(c2[10796],simde_mm_xor_si128(c2[6563],simde_mm_xor_si128(c2[4461],simde_mm_xor_si128(c2[10787],simde_mm_xor_si128(c2[228],simde_mm_xor_si128(c2[3047],simde_mm_xor_si128(c2[9389],simde_mm_xor_si128(c2[10093],simde_mm_xor_si128(c2[6597],simde_mm_xor_si128(c2[2380],simde_mm_xor_si128(c2[1674],simde_mm_xor_si128(c2[8000],simde_mm_xor_si128(c2[8704],simde_mm_xor_si128(c2[3086],simde_mm_xor_si128(c2[9412],simde_mm_xor_si128(c2[10116],simde_mm_xor_si128(c2[8738],simde_mm_xor_si128(c2[4521],simde_mm_xor_si128(c2[10153],simde_mm_xor_si128(c2[5920],simde_mm_xor_si128(c2[293],simde_mm_xor_si128(c2[6635],simde_mm_xor_si128(c2[7339],simde_mm_xor_si128(c2[4557],simde_mm_xor_si128(c2[324],simde_mm_xor_si128(c2[5953],simde_mm_xor_si128(c2[1032],simde_mm_xor_si128(c2[1736],simde_mm_xor_si128(c2[6663],simde_mm_xor_si128(c2[1742],simde_mm_xor_si128(c2[2446],simde_mm_xor_si128(c2[10917],simde_mm_xor_si128(c2[6700],simde_mm_xor_si128(c2[9516],simde_mm_xor_si128(c2[5283],simde_mm_xor_si128(c2[5284],simde_mm_xor_si128(c2[363],simde_mm_xor_si128(c2[1067],simde_mm_xor_si128(c2[395],simde_mm_xor_si128(c2[7425],simde_mm_xor_si128(c2[5317],simde_mm_xor_si128(c2[1100],simde_mm_xor_si128(c2[5326],simde_mm_xor_si128(c2[389],simde_mm_xor_si128(c2[1093],simde_mm_xor_si128(c2[2509],simde_mm_xor_si128(c2[7471],simde_mm_xor_si128(c2[3238],simde_mm_xor_si128(c2[10983],simde_mm_xor_si128(c2[6062],simde_mm_xor_si128(c2[6766],simde_mm_xor_si128(c2[8876],simde_mm_xor_si128(c2[3939],simde_mm_xor_si128(c2[4643],simde_mm_xor_si128(c2[10309],simde_mm_xor_si128(c2[6092],simde_mm_xor_si128(c2[1869],simde_mm_xor_si128(c2[8195],simde_mm_xor_si128(c2[8899],simde_mm_xor_si128(c2[3276],simde_mm_xor_si128(c2[9602],simde_mm_xor_si128(c2[10306],simde_mm_xor_si128(c2[3307],simde_mm_xor_si128(c2[10337],simde_mm_xor_si128(c2[4007],simde_mm_xor_si128(c2[11053],simde_mm_xor_si128(c2[1901],simde_mm_xor_si128(c2[8227],simde_mm_xor_si128(c2[8931],simde_mm_xor_si128(c2[6144],simde_mm_xor_si128(c2[1927],simde_mm_xor_si128(c2[11079],simde_mm_xor_si128(c2[6862],simde_mm_xor_si128(c2[1230],simde_mm_xor_si128(c2[7556],simde_mm_xor_si128(c2[8260],simde_mm_xor_si128(c2[4742],simde_mm_xor_si128(c2[9000],simde_mm_xor_si128(c2[4783],simde_mm_xor_si128(c2[4782],simde_mm_xor_si128(c2[11108],simde_mm_xor_si128(c2[549],simde_mm_xor_si128(c2[9699],simde_mm_xor_si128(c2[4778],simde_mm_xor_si128(c2[5482],simde_mm_xor_si128(c2[11146],simde_mm_xor_si128(c2[6913],simde_mm_xor_si128(c2[10443],simde_mm_xor_si128(c2[5506],simde_mm_xor_si128(c2[6210],simde_mm_xor_si128(c2[4110],simde_mm_xor_si128(c2[10436],simde_mm_xor_si128(c2[11140],simde_mm_xor_si128(c2[3425],simde_mm_xor_si128(c2[10471],simde_mm_xor_si128(c2[623],simde_mm_xor_si128(c2[7653],simde_mm_xor_si128(c2[9070],simde_mm_xor_si128(c2[4133],simde_mm_xor_si128(c2[4837],simde_mm_xor_si128(c2[3434],simde_mm_xor_si128(c2[654],simde_mm_xor_si128(c2[7684],simde_mm_xor_si128(c2[4870],simde_mm_xor_si128(c2[11212],simde_mm_xor_si128(c2[653],simde_mm_xor_si128(c2[1352],simde_mm_xor_si128(c2[7694],simde_mm_xor_si128(c2[8398],simde_mm_xor_si128(c2[673],simde_mm_xor_si128(c2[7719],simde_mm_xor_si128(c2[9824],simde_mm_xor_si128(c2[5607],simde_mm_xor_si128(c2[9839],simde_mm_xor_si128(c2[4902],simde_mm_xor_si128(c2[5606],c2[9830]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 9
+     d2[144]=simde_mm_xor_si128(c2[4934],simde_mm_xor_si128(c2[4267],simde_mm_xor_si128(c2[9478],simde_mm_xor_si128(c2[3873],simde_mm_xor_si128(c2[10280],simde_mm_xor_si128(c2[10400],simde_mm_xor_si128(c2[9731],c2[6285])))))));
+
+//row: 10
+     d2[160]=simde_mm_xor_si128(c2[1446],simde_mm_xor_si128(c2[3593],simde_mm_xor_si128(c2[5066],simde_mm_xor_si128(c2[234],simde_mm_xor_si128(c2[963],c2[7491])))));
+
+//row: 11
+     d2[176]=simde_mm_xor_si128(c2[4239],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[705],simde_mm_xor_si128(c2[9871],simde_mm_xor_si128(c2[6337],simde_mm_xor_si128(c2[4230],simde_mm_xor_si128(c2[712],simde_mm_xor_si128(c2[3527],simde_mm_xor_si128(c2[9],simde_mm_xor_si128(c2[9156],simde_mm_xor_si128(c2[740],simde_mm_xor_si128(c2[7781],simde_mm_xor_si128(c2[8485],simde_mm_xor_si128(c2[7782],simde_mm_xor_si128(c2[4264],simde_mm_xor_si128(c2[3557],simde_mm_xor_si128(c2[39],simde_mm_xor_si128(c2[6370],simde_mm_xor_si128(c2[9933],simde_mm_xor_si128(c2[5711],simde_mm_xor_si128(c2[6415],simde_mm_xor_si128(c2[7822],simde_mm_xor_si128(c2[4288],simde_mm_xor_si128(c2[3595],simde_mm_xor_si128(c2[77],simde_mm_xor_si128(c2[7849],simde_mm_xor_si128(c2[3627],simde_mm_xor_si128(c2[4331],simde_mm_xor_si128(c2[807],simde_mm_xor_si128(c2[8552],simde_mm_xor_si128(c2[101],simde_mm_xor_si128(c2[7846],simde_mm_xor_si128(c2[5767],simde_mm_xor_si128(c2[2249],simde_mm_xor_si128(c2[7875],simde_mm_xor_si128(c2[4357],simde_mm_xor_si128(c2[129],simde_mm_xor_si128(c2[7874],simde_mm_xor_si128(c2[166],simde_mm_xor_si128(c2[7207],simde_mm_xor_si128(c2[7911],simde_mm_xor_si128(c2[2276],simde_mm_xor_si128(c2[10021],simde_mm_xor_si128(c2[871],simde_mm_xor_si128(c2[8616],simde_mm_xor_si128(c2[4416],simde_mm_xor_si128(c2[194],simde_mm_xor_si128(c2[898],simde_mm_xor_si128(c2[6533],simde_mm_xor_si128(c2[3015],simde_mm_xor_si128(c2[1609],simde_mm_xor_si128(c2[9354],simde_mm_xor_si128(c2[7277],simde_mm_xor_si128(c2[3759],simde_mm_xor_si128(c2[942],simde_mm_xor_si128(c2[8687],simde_mm_xor_si128(c2[10791],simde_mm_xor_si128(c2[7273],simde_mm_xor_si128(c2[3078],simde_mm_xor_si128(c2[10823],simde_mm_xor_si128(c2[9418],simde_mm_xor_si128(c2[5900],simde_mm_xor_si128(c2[10830],simde_mm_xor_si128(c2[7296],simde_mm_xor_si128(c2[5219],simde_mm_xor_si128(c2[997],simde_mm_xor_si128(c2[1701],simde_mm_xor_si128(c2[6634],simde_mm_xor_si128(c2[3116],simde_mm_xor_si128(c2[8037],simde_mm_xor_si128(c2[4519],simde_mm_xor_si128(c2[1038],simde_mm_xor_si128(c2[8079],simde_mm_xor_si128(c2[8783],simde_mm_xor_si128(c2[2434],simde_mm_xor_si128(c2[10179],simde_mm_xor_si128(c2[3144],simde_mm_xor_si128(c2[10889],simde_mm_xor_si128(c2[7398],simde_mm_xor_si128(c2[3176],simde_mm_xor_si128(c2[3880],simde_mm_xor_si128(c2[5997],simde_mm_xor_si128(c2[2479],simde_mm_xor_si128(c2[1765],simde_mm_xor_si128(c2[9510],simde_mm_xor_si128(c2[8139],simde_mm_xor_si128(c2[3917],simde_mm_xor_si128(c2[4621],simde_mm_xor_si128(c2[1798],simde_mm_xor_si128(c2[9543],simde_mm_xor_si128(c2[1807],simde_mm_xor_si128(c2[9536],simde_mm_xor_si128(c2[2501],simde_mm_xor_si128(c2[3936],simde_mm_xor_si128(c2[10977],simde_mm_xor_si128(c2[418],simde_mm_xor_si128(c2[7464],simde_mm_xor_si128(c2[3946],simde_mm_xor_si128(c2[5357],simde_mm_xor_si128(c2[1839],simde_mm_xor_si128(c2[6790],simde_mm_xor_si128(c2[3272],simde_mm_xor_si128(c2[9613],simde_mm_xor_si128(c2[6095],simde_mm_xor_si128(c2[11020],simde_mm_xor_si128(c2[7502],simde_mm_xor_si128(c2[11051],simde_mm_xor_si128(c2[6829],simde_mm_xor_si128(c2[7533],simde_mm_xor_si128(c2[488],simde_mm_xor_si128(c2[8233],simde_mm_xor_si128(c2[9645],simde_mm_xor_si128(c2[6127],simde_mm_xor_si128(c2[2625],simde_mm_xor_si128(c2[9666],simde_mm_xor_si128(c2[10370],simde_mm_xor_si128(c2[7560],simde_mm_xor_si128(c2[4042],simde_mm_xor_si128(c2[8974],simde_mm_xor_si128(c2[5440],simde_mm_xor_si128(c2[4747],simde_mm_xor_si128(c2[5481],simde_mm_xor_si128(c2[1963],simde_mm_xor_si128(c2[1263],simde_mm_xor_si128(c2[8992],simde_mm_xor_si128(c2[6180],simde_mm_xor_si128(c2[2662],simde_mm_xor_si128(c2[7627],simde_mm_xor_si128(c2[3405],simde_mm_xor_si128(c2[4109],simde_mm_xor_si128(c2[6924],simde_mm_xor_si128(c2[3406],simde_mm_xor_si128(c2[591],simde_mm_xor_si128(c2[8320],simde_mm_xor_si128(c2[11169],simde_mm_xor_si128(c2[6947],simde_mm_xor_si128(c2[7651],simde_mm_xor_si128(c2[8367],simde_mm_xor_si128(c2[4833],simde_mm_xor_si128(c2[5551],simde_mm_xor_si128(c2[2017],simde_mm_xor_si128(c2[8398],simde_mm_xor_si128(c2[4160],simde_mm_xor_si128(c2[4864],simde_mm_xor_si128(c2[1351],simde_mm_xor_si128(c2[9096],simde_mm_xor_si128(c2[9096],simde_mm_xor_si128(c2[5578],simde_mm_xor_si128(c2[8417],simde_mm_xor_si128(c2[4195],simde_mm_xor_si128(c2[4899],simde_mm_xor_si128(c2[6305],simde_mm_xor_si128(c2[2787],simde_mm_xor_si128(c2[6304],simde_mm_xor_si128(c2[2786],c2[10532])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 12
+     d2[192]=simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[7074],simde_mm_xor_si128(c2[3841],simde_mm_xor_si128(c2[354],simde_mm_xor_si128(c2[7470],c2[5504])))));
+
+//row: 13
+     d2[208]=simde_mm_xor_si128(c2[5635],simde_mm_xor_si128(c2[6339],simde_mm_xor_si128(c2[708],simde_mm_xor_si128(c2[6346],simde_mm_xor_si128(c2[5643],simde_mm_xor_si128(c2[715],simde_mm_xor_si128(c2[2152],simde_mm_xor_si128(c2[2856],simde_mm_xor_si128(c2[9898],simde_mm_xor_si128(c2[5673],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[770],simde_mm_xor_si128(c2[9922],simde_mm_xor_si128(c2[5711],simde_mm_xor_si128(c2[9261],simde_mm_xor_si128(c2[9965],simde_mm_xor_si128(c2[2923],simde_mm_xor_si128(c2[2217],simde_mm_xor_si128(c2[5743],simde_mm_xor_si128(c2[7883],simde_mm_xor_si128(c2[9991],simde_mm_xor_si128(c2[2245],simde_mm_xor_si128(c2[1578],simde_mm_xor_si128(c2[2282],simde_mm_xor_si128(c2[4392],simde_mm_xor_si128(c2[2987],simde_mm_xor_si128(c2[5828],simde_mm_xor_si128(c2[6532],simde_mm_xor_si128(c2[8649],simde_mm_xor_si128(c2[3725],simde_mm_xor_si128(c2[9377],simde_mm_xor_si128(c2[3042],simde_mm_xor_si128(c2[1644],simde_mm_xor_si128(c2[5161],simde_mm_xor_si128(c2[5194],simde_mm_xor_si128(c2[271],simde_mm_xor_si128(c2[1667],simde_mm_xor_si128(c2[6631],simde_mm_xor_si128(c2[7335],simde_mm_xor_si128(c2[8750],simde_mm_xor_si128(c2[10153],simde_mm_xor_si128(c2[2434],simde_mm_xor_si128(c2[3138],simde_mm_xor_si128(c2[4550],simde_mm_xor_si128(c2[5260],simde_mm_xor_si128(c2[8810],simde_mm_xor_si128(c2[9514],simde_mm_xor_si128(c2[8097],simde_mm_xor_si128(c2[3881],simde_mm_xor_si128(c2[9551],simde_mm_xor_si128(c2[10255],simde_mm_xor_si128(c2[3914],simde_mm_xor_si128(c2[3907],simde_mm_xor_si128(c2[5348],simde_mm_xor_si128(c2[6052],simde_mm_xor_si128(c2[9580],simde_mm_xor_si128(c2[7457],simde_mm_xor_si128(c2[8906],simde_mm_xor_si128(c2[450],simde_mm_xor_si128(c2[1857],simde_mm_xor_si128(c2[1184],simde_mm_xor_si128(c2[1888],simde_mm_xor_si128(c2[2604],simde_mm_xor_si128(c2[482],simde_mm_xor_si128(c2[4037],simde_mm_xor_si128(c2[4741],simde_mm_xor_si128(c2[9676],simde_mm_xor_si128(c2[11074],simde_mm_xor_si128(c2[7597],simde_mm_xor_si128(c2[3363],simde_mm_xor_si128(c2[8296],simde_mm_xor_si128(c2[9039],simde_mm_xor_si128(c2[9743],simde_mm_xor_si128(c2[9024],simde_mm_xor_si128(c2[2691],simde_mm_xor_si128(c2[1318],simde_mm_xor_si128(c2[2022],simde_mm_xor_si128(c2[10467],simde_mm_xor_si128(c2[7651],simde_mm_xor_si128(c2[9794],simde_mm_xor_si128(c2[10498],simde_mm_xor_si128(c2[3467],simde_mm_xor_si128(c2[11212],simde_mm_xor_si128(c2[6987],simde_mm_xor_si128(c2[9829],simde_mm_xor_si128(c2[10533],simde_mm_xor_si128(c2[8421],c2[8420])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 14
+     d2[224]=simde_mm_xor_si128(c2[9868],simde_mm_xor_si128(c2[5315],simde_mm_xor_si128(c2[10348],simde_mm_xor_si128(c2[11079],simde_mm_xor_si128(c2[545],c2[4206])))));
+
+//row: 15
+     d2[240]=simde_mm_xor_si128(c2[9866],simde_mm_xor_si128(c2[4235],simde_mm_xor_si128(c2[9857],simde_mm_xor_si128(c2[8450],simde_mm_xor_si128(c2[9154],simde_mm_xor_si128(c2[5634],simde_mm_xor_si128(c2[6383],simde_mm_xor_si128(c2[2146],simde_mm_xor_si128(c2[8480],simde_mm_xor_si128(c2[9184],simde_mm_xor_si128(c2[38],simde_mm_xor_si128(c2[4297],simde_mm_xor_si128(c2[2186],simde_mm_xor_si128(c2[9222],simde_mm_xor_si128(c2[2213],simde_mm_xor_si128(c2[6434],simde_mm_xor_si128(c2[5024],simde_mm_xor_si128(c2[5728],simde_mm_xor_si128(c2[131],simde_mm_xor_si128(c2[2255],simde_mm_xor_si128(c2[5068],simde_mm_xor_si128(c2[5772],simde_mm_xor_si128(c2[5793],simde_mm_xor_si128(c2[7919],simde_mm_xor_si128(c2[6498],simde_mm_xor_si128(c2[10059],simde_mm_xor_si128(c2[897],simde_mm_xor_si128(c2[6532],simde_mm_xor_si128(c2[7236],simde_mm_xor_si128(c2[1641],simde_mm_xor_si128(c2[6569],simde_mm_xor_si128(c2[4451],simde_mm_xor_si128(c2[5155],simde_mm_xor_si128(c2[8705],simde_mm_xor_si128(c2[3782],simde_mm_xor_si128(c2[4490],simde_mm_xor_si128(c2[5194],simde_mm_xor_si128(c2[10862],simde_mm_xor_si128(c2[998],simde_mm_xor_si128(c2[2401],simde_mm_xor_si128(c2[6665],simde_mm_xor_si128(c2[8077],simde_mm_xor_si128(c2[8067],simde_mm_xor_si128(c2[8771],simde_mm_xor_si128(c2[1028],simde_mm_xor_si128(c2[1762],simde_mm_xor_si128(c2[361],simde_mm_xor_si128(c2[6688],simde_mm_xor_si128(c2[7392],simde_mm_xor_si128(c2[2503],simde_mm_xor_si128(c2[7425],simde_mm_xor_si128(c2[6730],simde_mm_xor_si128(c2[7434],simde_mm_xor_si128(c2[9579],simde_mm_xor_si128(c2[1828],simde_mm_xor_si128(c2[10280],simde_mm_xor_si128(c2[10984],simde_mm_xor_si128(c2[10979],simde_mm_xor_si128(c2[1154],simde_mm_xor_si128(c2[3977],simde_mm_xor_si128(c2[4680],simde_mm_xor_si128(c2[5384],simde_mm_xor_si128(c2[5415],simde_mm_xor_si128(c2[6115],simde_mm_xor_si128(c2[4009],simde_mm_xor_si128(c2[8268],simde_mm_xor_si128(c2[1924],simde_mm_xor_si128(c2[2634],simde_mm_xor_si128(c2[3338],simde_mm_xor_si128(c2[11108],simde_mm_xor_si128(c2[6890],simde_mm_xor_si128(c2[11119],simde_mm_xor_si128(c2[544],simde_mm_xor_si128(c2[1991],simde_mm_xor_si128(c2[1288],simde_mm_xor_si128(c2[5514],simde_mm_xor_si128(c2[6218],simde_mm_xor_si128(c2[8324],simde_mm_xor_si128(c2[5549],simde_mm_xor_si128(c2[2731],simde_mm_xor_si128(c2[11178],simde_mm_xor_si128(c2[2762],simde_mm_xor_si128(c2[6978],simde_mm_xor_si128(c2[2756],simde_mm_xor_si128(c2[3460],simde_mm_xor_si128(c2[2797],simde_mm_xor_si128(c2[685],simde_mm_xor_si128(c2[11243],c2[684]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 16
+     d2[256]=simde_mm_xor_si128(c2[2825],simde_mm_xor_si128(c2[8457],simde_mm_xor_si128(c2[2816],simde_mm_xor_si128(c2[2113],simde_mm_xor_si128(c2[10605],simde_mm_xor_si128(c2[6368],simde_mm_xor_si128(c2[2159],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[8519],simde_mm_xor_si128(c2[6408],simde_mm_xor_si128(c2[2181],simde_mm_xor_si128(c2[6435],simde_mm_xor_si128(c2[10656],simde_mm_xor_si128(c2[9966],simde_mm_xor_si128(c2[803],simde_mm_xor_si128(c2[4353],simde_mm_xor_si128(c2[6477],simde_mm_xor_si128(c2[9994],simde_mm_xor_si128(c2[10031],simde_mm_xor_si128(c2[878],simde_mm_xor_si128(c2[10720],simde_mm_xor_si128(c2[3018],simde_mm_xor_si128(c2[5135],simde_mm_xor_si128(c2[195],simde_mm_xor_si128(c2[5863],simde_mm_xor_si128(c2[10791],simde_mm_xor_si128(c2[9377],simde_mm_xor_si128(c2[1664],simde_mm_xor_si128(c2[8004],simde_mm_xor_si128(c2[9416],simde_mm_xor_si128(c2[3821],simde_mm_xor_si128(c2[5220],simde_mm_xor_si128(c2[6639],simde_mm_xor_si128(c2[10887],simde_mm_xor_si128(c2[1036],simde_mm_xor_si128(c2[1730],simde_mm_xor_si128(c2[5984],simde_mm_xor_si128(c2[4583],simde_mm_xor_si128(c2[367],simde_mm_xor_si128(c2[1059],simde_mm_xor_si128(c2[6725],simde_mm_xor_si128(c2[384],simde_mm_xor_si128(c2[393],simde_mm_xor_si128(c2[2538],simde_mm_xor_si128(c2[6050],simde_mm_xor_si128(c2[3943],simde_mm_xor_si128(c2[5376],simde_mm_xor_si128(c2[8199],simde_mm_xor_si128(c2[9606],simde_mm_xor_si128(c2[9637],simde_mm_xor_si128(c2[10337],simde_mm_xor_si128(c2[8231],simde_mm_xor_si128(c2[1227],simde_mm_xor_si128(c2[6146],simde_mm_xor_si128(c2[7560],simde_mm_xor_si128(c2[4067],simde_mm_xor_si128(c2[11112],simde_mm_xor_si128(c2[4782],simde_mm_xor_si128(c2[6213],simde_mm_xor_si128(c2[5510],simde_mm_xor_si128(c2[10440],simde_mm_xor_si128(c2[9771],simde_mm_xor_si128(c2[6953],simde_mm_xor_si128(c2[4137],simde_mm_xor_si128(c2[6984],simde_mm_xor_si128(c2[11200],simde_mm_xor_si128(c2[7682],simde_mm_xor_si128(c2[2755],simde_mm_xor_si128(c2[7019],simde_mm_xor_si128(c2[4907],c2[4906]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 17
+     d2[272]=simde_mm_xor_si128(c2[4928],simde_mm_xor_si128(c2[3274],simde_mm_xor_si128(c2[8259],simde_mm_xor_si128(c2[1248],c2[681]))));
+
+//row: 18
+     d2[288]=simde_mm_xor_si128(c2[7074],simde_mm_xor_si128(c2[6734],simde_mm_xor_si128(c2[6048],simde_mm_xor_si128(c2[8329],c2[2729]))));
+
+//row: 19
+     d2[304]=simde_mm_xor_si128(c2[8451],simde_mm_xor_si128(c2[6372],simde_mm_xor_si128(c2[5860],simde_mm_xor_si128(c2[10823],c2[334]))));
+
+//row: 20
+     d2[320]=simde_mm_xor_si128(c2[7049],simde_mm_xor_si128(c2[1418],simde_mm_xor_si128(c2[7040],simde_mm_xor_si128(c2[6337],simde_mm_xor_si128(c2[4937],simde_mm_xor_si128(c2[3566],simde_mm_xor_si128(c2[10592],simde_mm_xor_si128(c2[6383],simde_mm_xor_si128(c2[1480],simde_mm_xor_si128(c2[10632],simde_mm_xor_si128(c2[6405],simde_mm_xor_si128(c2[10659],simde_mm_xor_si128(c2[3617],simde_mm_xor_si128(c2[2927],simde_mm_xor_si128(c2[7147],simde_mm_xor_si128(c2[8577],simde_mm_xor_si128(c2[10701],simde_mm_xor_si128(c2[2955],simde_mm_xor_si128(c2[2976],simde_mm_xor_si128(c2[5102],simde_mm_xor_si128(c2[3681],simde_mm_xor_si128(c2[7242],simde_mm_xor_si128(c2[9359],simde_mm_xor_si128(c2[4419],simde_mm_xor_si128(c2[10087],simde_mm_xor_si128(c2[3752],simde_mm_xor_si128(c2[2338],simde_mm_xor_si128(c2[5888],simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[2377],simde_mm_xor_si128(c2[8045],simde_mm_xor_si128(c2[9444],simde_mm_xor_si128(c2[10863],simde_mm_xor_si128(c2[6637],simde_mm_xor_si128(c2[3848],simde_mm_xor_si128(c2[5260],simde_mm_xor_si128(c2[5954],simde_mm_xor_si128(c2[10208],simde_mm_xor_si128(c2[8807],simde_mm_xor_si128(c2[4591],simde_mm_xor_si128(c2[10914],simde_mm_xor_si128(c2[10949],simde_mm_xor_si128(c2[4608],simde_mm_xor_si128(c2[4617],simde_mm_xor_si128(c2[6762],simde_mm_xor_si128(c2[10274],simde_mm_xor_si128(c2[8167],simde_mm_xor_si128(c2[9600],simde_mm_xor_si128(c2[1160],simde_mm_xor_si128(c2[2567],simde_mm_xor_si128(c2[2598],simde_mm_xor_si128(c2[3298],simde_mm_xor_si128(c2[1192],simde_mm_xor_si128(c2[5451],simde_mm_xor_si128(c2[10370],simde_mm_xor_si128(c2[521],simde_mm_xor_si128(c2[8291],simde_mm_xor_si128(c2[4073],simde_mm_xor_si128(c2[9006],simde_mm_xor_si128(c2[10437],simde_mm_xor_si128(c2[9734],simde_mm_xor_si128(c2[3401],simde_mm_xor_si128(c2[2732],simde_mm_xor_si128(c2[11177],simde_mm_xor_si128(c2[8361],simde_mm_xor_si128(c2[11208],simde_mm_xor_si128(c2[4161],simde_mm_xor_si128(c2[643],simde_mm_xor_si128(c2[11243],simde_mm_xor_si128(c2[9131],c2[9130]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 21
+     d2[336]=simde_mm_xor_si128(c2[6383],simde_mm_xor_si128(c2[6503],simde_mm_xor_si128(c2[9670],simde_mm_xor_si128(c2[2760],c2[8426]))));
+
+//row: 22
+     d2[352]=simde_mm_xor_si128(c2[4],simde_mm_xor_si128(c2[10248],simde_mm_xor_si128(c2[8875],c2[10409])));
+
+//row: 23
+     d2[368]=simde_mm_xor_si128(c2[8489],simde_mm_xor_si128(c2[2185],simde_mm_xor_si128(c2[7370],c2[6217])));
+
+//row: 24
+     d2[384]=simde_mm_xor_si128(c2[5645],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[5636],simde_mm_xor_si128(c2[4933],simde_mm_xor_si128(c2[7],simde_mm_xor_si128(c2[2146],simde_mm_xor_si128(c2[9188],simde_mm_xor_si128(c2[4963],simde_mm_xor_si128(c2[76],simde_mm_xor_si128(c2[9228],simde_mm_xor_si128(c2[5001],simde_mm_xor_si128(c2[9255],simde_mm_xor_si128(c2[2213],simde_mm_xor_si128(c2[1507],simde_mm_xor_si128(c2[4325],simde_mm_xor_si128(c2[7173],simde_mm_xor_si128(c2[9281],simde_mm_xor_si128(c2[1551],simde_mm_xor_si128(c2[8590],simde_mm_xor_si128(c2[1572],simde_mm_xor_si128(c2[3682],simde_mm_xor_si128(c2[2277],simde_mm_xor_si128(c2[5838],simde_mm_xor_si128(c2[7939],simde_mm_xor_si128(c2[3015],simde_mm_xor_si128(c2[8683],simde_mm_xor_si128(c2[2348],simde_mm_xor_si128(c2[934],simde_mm_xor_si128(c2[4484],simde_mm_xor_si128(c2[10824],simde_mm_xor_si128(c2[973],simde_mm_xor_si128(c2[6625],simde_mm_xor_si128(c2[8040],simde_mm_xor_si128(c2[9443],simde_mm_xor_si128(c2[2444],simde_mm_xor_si128(c2[3840],simde_mm_xor_si128(c2[4550],simde_mm_xor_si128(c2[8804],simde_mm_xor_si128(c2[7403],simde_mm_xor_si128(c2[3171],simde_mm_xor_si128(c2[3175],simde_mm_xor_si128(c2[9545],simde_mm_xor_si128(c2[3204],simde_mm_xor_si128(c2[3213],simde_mm_xor_si128(c2[5358],simde_mm_xor_si128(c2[8870],simde_mm_xor_si128(c2[6763],simde_mm_xor_si128(c2[8196],simde_mm_xor_si128(c2[11019],simde_mm_xor_si128(c2[1163],simde_mm_xor_si128(c2[1194],simde_mm_xor_si128(c2[1894],simde_mm_xor_si128(c2[11051],simde_mm_xor_si128(c2[4047],simde_mm_xor_si128(c2[8966],simde_mm_xor_si128(c2[10380],simde_mm_xor_si128(c2[6887],simde_mm_xor_si128(c2[2669],simde_mm_xor_si128(c2[7586],simde_mm_xor_si128(c2[9033],simde_mm_xor_si128(c2[8330],simde_mm_xor_si128(c2[1997],simde_mm_xor_si128(c2[1312],simde_mm_xor_si128(c2[9773],simde_mm_xor_si128(c2[6957],simde_mm_xor_si128(c2[9804],simde_mm_xor_si128(c2[2757],simde_mm_xor_si128(c2[10502],simde_mm_xor_si128(c2[9839],simde_mm_xor_si128(c2[7727],c2[7726]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 25
+     d2[400]=simde_mm_xor_si128(c2[4961],simde_mm_xor_si128(c2[5832],simde_mm_xor_si128(c2[3047],c2[4683])));
+
+//row: 26
+     d2[416]=simde_mm_xor_si128(c2[2124],simde_mm_xor_si128(c2[2191],simde_mm_xor_si128(c2[5069],c2[9635])));
+
+//row: 27
+     d2[432]=simde_mm_xor_si128(c2[6369],simde_mm_xor_si128(c2[5830],c2[1676]));
+
+//row: 28
+     d2[448]=simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[3658],simde_mm_xor_si128(c2[4139],c2[11235])));
+
+//row: 29
+     d2[464]=simde_mm_xor_si128(c2[704],simde_mm_xor_si128(c2[6336],simde_mm_xor_si128(c2[711],simde_mm_xor_si128(c2[10567],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[8484],simde_mm_xor_si128(c2[4263],simde_mm_xor_si128(c2[10597],simde_mm_xor_si128(c2[38],simde_mm_xor_si128(c2[4261],simde_mm_xor_si128(c2[6414],simde_mm_xor_si128(c2[4303],simde_mm_xor_si128(c2[76],simde_mm_xor_si128(c2[4330],simde_mm_xor_si128(c2[8551],simde_mm_xor_si128(c2[7141],simde_mm_xor_si128(c2[7845],simde_mm_xor_si128(c2[2248],simde_mm_xor_si128(c2[4356],simde_mm_xor_si128(c2[7169],simde_mm_xor_si128(c2[7873],simde_mm_xor_si128(c2[7910],simde_mm_xor_si128(c2[10020],simde_mm_xor_si128(c2[8615],simde_mm_xor_si128(c2[897],simde_mm_xor_si128(c2[3014],simde_mm_xor_si128(c2[8649],simde_mm_xor_si128(c2[9353],simde_mm_xor_si128(c2[3758],simde_mm_xor_si128(c2[8686],simde_mm_xor_si128(c2[6568],simde_mm_xor_si128(c2[7272],simde_mm_xor_si128(c2[10822],simde_mm_xor_si128(c2[5899],simde_mm_xor_si128(c2[6607],simde_mm_xor_si128(c2[7311],simde_mm_xor_si128(c2[1700],simde_mm_xor_si128(c2[3115],simde_mm_xor_si128(c2[4518],simde_mm_xor_si128(c2[8782],simde_mm_xor_si128(c2[10178],simde_mm_xor_si128(c2[10184],simde_mm_xor_si128(c2[10888],simde_mm_xor_si128(c2[3879],simde_mm_xor_si128(c2[2478],simde_mm_xor_si128(c2[8805],simde_mm_xor_si128(c2[9509],simde_mm_xor_si128(c2[4620],simde_mm_xor_si128(c2[9542],simde_mm_xor_si128(c2[8847],simde_mm_xor_si128(c2[9551],simde_mm_xor_si128(c2[417],simde_mm_xor_si128(c2[3945],simde_mm_xor_si128(c2[1134],simde_mm_xor_si128(c2[1838],simde_mm_xor_si128(c2[3271],simde_mm_xor_si128(c2[6094],simde_mm_xor_si128(c2[6797],simde_mm_xor_si128(c2[7501],simde_mm_xor_si128(c2[8910],simde_mm_xor_si128(c2[7532],simde_mm_xor_si128(c2[8232],simde_mm_xor_si128(c2[6126],simde_mm_xor_si128(c2[10369],simde_mm_xor_si128(c2[4041],simde_mm_xor_si128(c2[4751],simde_mm_xor_si128(c2[5455],simde_mm_xor_si128(c2[1962],simde_mm_xor_si128(c2[9007],simde_mm_xor_si128(c2[1957],simde_mm_xor_si128(c2[2661],simde_mm_xor_si128(c2[4108],simde_mm_xor_si128(c2[3405],simde_mm_xor_si128(c2[7631],simde_mm_xor_si128(c2[8335],simde_mm_xor_si128(c2[3397],simde_mm_xor_si128(c2[7650],simde_mm_xor_si128(c2[4832],simde_mm_xor_si128(c2[2016],simde_mm_xor_si128(c2[4879],simde_mm_xor_si128(c2[9095],simde_mm_xor_si128(c2[4873],simde_mm_xor_si128(c2[5577],simde_mm_xor_si128(c2[4898],simde_mm_xor_si128(c2[2786],simde_mm_xor_si128(c2[2081],c2[2785]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 30
+     d2[480]=simde_mm_xor_si128(c2[2816],simde_mm_xor_si128(c2[8448],simde_mm_xor_si128(c2[2119],simde_mm_xor_si128(c2[2823],simde_mm_xor_si128(c2[1416],simde_mm_xor_si128(c2[2120],simde_mm_xor_si128(c2[5645],simde_mm_xor_si128(c2[10596],simde_mm_xor_si128(c2[5671],simde_mm_xor_si128(c2[6375],simde_mm_xor_si128(c2[1446],simde_mm_xor_si128(c2[2150],simde_mm_xor_si128(c2[8526],simde_mm_xor_si128(c2[6415],simde_mm_xor_si128(c2[1484],simde_mm_xor_si128(c2[2188],simde_mm_xor_si128(c2[6442],simde_mm_xor_si128(c2[10663],simde_mm_xor_si128(c2[9253],simde_mm_xor_si128(c2[9957],simde_mm_xor_si128(c2[4360],simde_mm_xor_si128(c2[5764],simde_mm_xor_si128(c2[6468],simde_mm_xor_si128(c2[9281],simde_mm_xor_si128(c2[9985],simde_mm_xor_si128(c2[10022],simde_mm_xor_si128(c2[869],simde_mm_xor_si128(c2[10023],simde_mm_xor_si128(c2[10727],simde_mm_xor_si128(c2[3009],simde_mm_xor_si128(c2[4422],simde_mm_xor_si128(c2[5126],simde_mm_xor_si128(c2[10761],simde_mm_xor_si128(c2[202],simde_mm_xor_si128(c2[5870],simde_mm_xor_si128(c2[10094],simde_mm_xor_si128(c2[10798],simde_mm_xor_si128(c2[8680],simde_mm_xor_si128(c2[9384],simde_mm_xor_si128(c2[1671],simde_mm_xor_si128(c2[7307],simde_mm_xor_si128(c2[8011],simde_mm_xor_si128(c2[8719],simde_mm_xor_si128(c2[9423],simde_mm_xor_si128(c2[3812],simde_mm_xor_si128(c2[5227],simde_mm_xor_si128(c2[5926],simde_mm_xor_si128(c2[6630],simde_mm_xor_si128(c2[10894],simde_mm_xor_si128(c2[323],simde_mm_xor_si128(c2[1027],simde_mm_xor_si128(c2[1033],simde_mm_xor_si128(c2[1737],simde_mm_xor_si128(c2[6660],simde_mm_xor_si128(c2[5991],simde_mm_xor_si128(c2[4590],simde_mm_xor_si128(c2[10917],simde_mm_xor_si128(c2[358],simde_mm_xor_si128(c2[6732],simde_mm_xor_si128(c2[391],simde_mm_xor_si128(c2[10959],simde_mm_xor_si128(c2[384],simde_mm_xor_si128(c2[2529],simde_mm_xor_si128(c2[5353],simde_mm_xor_si128(c2[6057],simde_mm_xor_si128(c2[3246],simde_mm_xor_si128(c2[3950],simde_mm_xor_si128(c2[6055],simde_mm_xor_si128(c2[5383],simde_mm_xor_si128(c2[7502],simde_mm_xor_si128(c2[8206],simde_mm_xor_si128(c2[8909],simde_mm_xor_si128(c2[9613],simde_mm_xor_si128(c2[9644],simde_mm_xor_si128(c2[10344],simde_mm_xor_si128(c2[7534],simde_mm_xor_si128(c2[8238],simde_mm_xor_si128(c2[1218],simde_mm_xor_si128(c2[6153],simde_mm_xor_si128(c2[6863],simde_mm_xor_si128(c2[7567],simde_mm_xor_si128(c2[4074],simde_mm_xor_si128(c2[10415],simde_mm_xor_si128(c2[11119],simde_mm_xor_si128(c2[4069],simde_mm_xor_si128(c2[4773],simde_mm_xor_si128(c2[6220],simde_mm_xor_si128(c2[4813],simde_mm_xor_si128(c2[5517],simde_mm_xor_si128(c2[9743],simde_mm_xor_si128(c2[10447],simde_mm_xor_si128(c2[9762],simde_mm_xor_si128(c2[6944],simde_mm_xor_si128(c2[3424],simde_mm_xor_si128(c2[4128],simde_mm_xor_si128(c2[6991],simde_mm_xor_si128(c2[10503],simde_mm_xor_si128(c2[11207],simde_mm_xor_si128(c2[6985],simde_mm_xor_si128(c2[7689],simde_mm_xor_si128(c2[7010],simde_mm_xor_si128(c2[4898],simde_mm_xor_si128(c2[4193],c2[4897])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 31
+     d2[496]=simde_mm_xor_si128(c2[4234],simde_mm_xor_si128(c2[5635],simde_mm_xor_si128(c2[9866],simde_mm_xor_si128(c2[4],simde_mm_xor_si128(c2[4225],simde_mm_xor_si128(c2[5642],simde_mm_xor_si128(c2[3522],simde_mm_xor_si128(c2[4235],simde_mm_xor_si128(c2[4939],simde_mm_xor_si128(c2[751],simde_mm_xor_si128(c2[2152],simde_mm_xor_si128(c2[7777],simde_mm_xor_si128(c2[9194],simde_mm_xor_si128(c2[3552],simde_mm_xor_si128(c2[4265],simde_mm_xor_si128(c2[4969],simde_mm_xor_si128(c2[10597],simde_mm_xor_si128(c2[9928],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[7817],simde_mm_xor_si128(c2[9218],simde_mm_xor_si128(c2[3590],simde_mm_xor_si128(c2[5007],simde_mm_xor_si128(c2[7844],simde_mm_xor_si128(c2[9261],simde_mm_xor_si128(c2[802],simde_mm_xor_si128(c2[2219],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[809],simde_mm_xor_si128(c2[1513],simde_mm_xor_si128(c2[5762],simde_mm_xor_si128(c2[7179],simde_mm_xor_si128(c2[7886],simde_mm_xor_si128(c2[9287],simde_mm_xor_si128(c2[140],simde_mm_xor_si128(c2[837],simde_mm_xor_si128(c2[1541],simde_mm_xor_si128(c2[161],simde_mm_xor_si128(c2[1578],simde_mm_xor_si128(c2[2287],simde_mm_xor_si128(c2[3688],simde_mm_xor_si128(c2[866],simde_mm_xor_si128(c2[2283],simde_mm_xor_si128(c2[4427],simde_mm_xor_si128(c2[5828],simde_mm_xor_si128(c2[6528],simde_mm_xor_si128(c2[7945],simde_mm_xor_si128(c2[1604],simde_mm_xor_si128(c2[2317],simde_mm_xor_si128(c2[3021],simde_mm_xor_si128(c2[7272],simde_mm_xor_si128(c2[8673],simde_mm_xor_si128(c2[937],simde_mm_xor_si128(c2[2338],simde_mm_xor_si128(c2[10786],simde_mm_xor_si128(c2[236],simde_mm_xor_si128(c2[940],simde_mm_xor_si128(c2[939],simde_mm_xor_si128(c2[3073],simde_mm_xor_si128(c2[4490],simde_mm_xor_si128(c2[9413],simde_mm_xor_si128(c2[10830],simde_mm_xor_si128(c2[10825],simde_mm_xor_si128(c2[259],simde_mm_xor_si128(c2[963],simde_mm_xor_si128(c2[5230],simde_mm_xor_si128(c2[6631],simde_mm_xor_si128(c2[6629],simde_mm_xor_si128(c2[8046],simde_mm_xor_si128(c2[8032],simde_mm_xor_si128(c2[9449],simde_mm_xor_si128(c2[1033],simde_mm_xor_si128(c2[2434],simde_mm_xor_si128(c2[2445],simde_mm_xor_si128(c2[3846],simde_mm_xor_si128(c2[3139],simde_mm_xor_si128(c2[3852],simde_mm_xor_si128(c2[4556],simde_mm_xor_si128(c2[7393],simde_mm_xor_si128(c2[8810],simde_mm_xor_si128(c2[5992],simde_mm_xor_si128(c2[7393],simde_mm_xor_si128(c2[1760],simde_mm_xor_si128(c2[2473],simde_mm_xor_si128(c2[3177],simde_mm_xor_si128(c2[8134],simde_mm_xor_si128(c2[9551],simde_mm_xor_si128(c2[1793],simde_mm_xor_si128(c2[3210],simde_mm_xor_si128(c2[1802],simde_mm_xor_si128(c2[2499],simde_mm_xor_si128(c2[3203],simde_mm_xor_si128(c2[3947],simde_mm_xor_si128(c2[5348],simde_mm_xor_si128(c2[7459],simde_mm_xor_si128(c2[8876],simde_mm_xor_si128(c2[5352],simde_mm_xor_si128(c2[6049],simde_mm_xor_si128(c2[6753],simde_mm_xor_si128(c2[6785],simde_mm_xor_si128(c2[8202],simde_mm_xor_si128(c2[9608],simde_mm_xor_si128(c2[11009],simde_mm_xor_si128(c2[11015],simde_mm_xor_si128(c2[449],simde_mm_xor_si128(c2[1153],simde_mm_xor_si128(c2[11046],simde_mm_xor_si128(c2[1184],simde_mm_xor_si128(c2[483],simde_mm_xor_si128(c2[1900],simde_mm_xor_si128(c2[9640],simde_mm_xor_si128(c2[11041],simde_mm_xor_si128(c2[2636],simde_mm_xor_si128(c2[4037],simde_mm_xor_si128(c2[7555],simde_mm_xor_si128(c2[8972],simde_mm_xor_si128(c2[8969],simde_mm_xor_si128(c2[9666],simde_mm_xor_si128(c2[10370],simde_mm_xor_si128(c2[5476],simde_mm_xor_si128(c2[6893],simde_mm_xor_si128(c2[1258],simde_mm_xor_si128(c2[2659],simde_mm_xor_si128(c2[6191],simde_mm_xor_si128(c2[6888],simde_mm_xor_si128(c2[7592],simde_mm_xor_si128(c2[7622],simde_mm_xor_si128(c2[9039],simde_mm_xor_si128(c2[6919],simde_mm_xor_si128(c2[8320],simde_mm_xor_si128(c2[586],simde_mm_xor_si128(c2[1283],simde_mm_xor_si128(c2[1987],simde_mm_xor_si128(c2[11180],simde_mm_xor_si128(c2[1318],simde_mm_xor_si128(c2[8362],simde_mm_xor_si128(c2[9763],simde_mm_xor_si128(c2[5546],simde_mm_xor_si128(c2[6947],simde_mm_xor_si128(c2[8393],simde_mm_xor_si128(c2[9794],simde_mm_xor_si128(c2[1346],simde_mm_xor_si128(c2[2763],simde_mm_xor_si128(c2[9091],simde_mm_xor_si128(c2[9804],simde_mm_xor_si128(c2[10508],simde_mm_xor_si128(c2[8428],simde_mm_xor_si128(c2[9829],simde_mm_xor_si128(c2[6316],simde_mm_xor_si128(c2[7717],simde_mm_xor_si128(c2[6315],simde_mm_xor_si128(c2[7012],c2[7716]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 32
+     d2[512]=simde_mm_xor_si128(c2[2823],simde_mm_xor_si128(c2[8455],simde_mm_xor_si128(c2[2126],simde_mm_xor_si128(c2[2830],simde_mm_xor_si128(c2[1423],simde_mm_xor_si128(c2[2127],simde_mm_xor_si128(c2[9165],simde_mm_xor_si128(c2[10603],simde_mm_xor_si128(c2[5678],simde_mm_xor_si128(c2[6382],simde_mm_xor_si128(c2[1453],simde_mm_xor_si128(c2[2157],simde_mm_xor_si128(c2[8517],simde_mm_xor_si128(c2[6406],simde_mm_xor_si128(c2[1475],simde_mm_xor_si128(c2[2179],simde_mm_xor_si128(c2[6433],simde_mm_xor_si128(c2[10670],simde_mm_xor_si128(c2[9260],simde_mm_xor_si128(c2[9964],simde_mm_xor_si128(c2[4367],simde_mm_xor_si128(c2[5771],simde_mm_xor_si128(c2[6475],simde_mm_xor_si128(c2[9288],simde_mm_xor_si128(c2[9992],simde_mm_xor_si128(c2[10029],simde_mm_xor_si128(c2[876],simde_mm_xor_si128(c2[10030],simde_mm_xor_si128(c2[10734],simde_mm_xor_si128(c2[3016],simde_mm_xor_si128(c2[4429],simde_mm_xor_si128(c2[5133],simde_mm_xor_si128(c2[10752],simde_mm_xor_si128(c2[193],simde_mm_xor_si128(c2[5861],simde_mm_xor_si128(c2[10085],simde_mm_xor_si128(c2[10789],simde_mm_xor_si128(c2[8687],simde_mm_xor_si128(c2[9391],simde_mm_xor_si128(c2[1678],simde_mm_xor_si128(c2[7298],simde_mm_xor_si128(c2[8002],simde_mm_xor_si128(c2[8710],simde_mm_xor_si128(c2[9414],simde_mm_xor_si128(c2[3819],simde_mm_xor_si128(c2[5218],simde_mm_xor_si128(c2[5933],simde_mm_xor_si128(c2[6637],simde_mm_xor_si128(c2[10885],simde_mm_xor_si128(c2[330],simde_mm_xor_si128(c2[1034],simde_mm_xor_si128(c2[1024],simde_mm_xor_si128(c2[1728],simde_mm_xor_si128(c2[5998],simde_mm_xor_si128(c2[4581],simde_mm_xor_si128(c2[10924],simde_mm_xor_si128(c2[365],simde_mm_xor_si128(c2[6723],simde_mm_xor_si128(c2[398],simde_mm_xor_si128(c2[10950],simde_mm_xor_si128(c2[391],simde_mm_xor_si128(c2[391],simde_mm_xor_si128(c2[2536],simde_mm_xor_si128(c2[5344],simde_mm_xor_si128(c2[6048],simde_mm_xor_si128(c2[3237],simde_mm_xor_si128(c2[3941],simde_mm_xor_si128(c2[5390],simde_mm_xor_si128(c2[7493],simde_mm_xor_si128(c2[8197],simde_mm_xor_si128(c2[8900],simde_mm_xor_si128(c2[9604],simde_mm_xor_si128(c2[5388],simde_mm_xor_si128(c2[9635],simde_mm_xor_si128(c2[10351],simde_mm_xor_si128(c2[7525],simde_mm_xor_si128(c2[8229],simde_mm_xor_si128(c2[1225],simde_mm_xor_si128(c2[6144],simde_mm_xor_si128(c2[6854],simde_mm_xor_si128(c2[7558],simde_mm_xor_si128(c2[4065],simde_mm_xor_si128(c2[10406],simde_mm_xor_si128(c2[11110],simde_mm_xor_si128(c2[4076],simde_mm_xor_si128(c2[4780],simde_mm_xor_si128(c2[6211],simde_mm_xor_si128(c2[4804],simde_mm_xor_si128(c2[5508],simde_mm_xor_si128(c2[9734],simde_mm_xor_si128(c2[10438],simde_mm_xor_si128(c2[9769],simde_mm_xor_si128(c2[6951],simde_mm_xor_si128(c2[3431],simde_mm_xor_si128(c2[4135],simde_mm_xor_si128(c2[6982],simde_mm_xor_si128(c2[10510],simde_mm_xor_si128(c2[11214],simde_mm_xor_si128(c2[6976],simde_mm_xor_si128(c2[7680],simde_mm_xor_si128(c2[7017],simde_mm_xor_si128(c2[4905],simde_mm_xor_si128(c2[4200],c2[4904])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 33
+     d2[528]=simde_mm_xor_si128(c2[1440],simde_mm_xor_si128(c2[7819],simde_mm_xor_si128(c2[6690],c2[2797])));
+
+//row: 34
+     d2[544]=simde_mm_xor_si128(c2[10567],simde_mm_xor_si128(c2[5162],simde_mm_xor_si128(c2[3306],c2[11113])));
+
+//row: 35
+     d2[560]=simde_mm_xor_si128(c2[710],simde_mm_xor_si128(c2[6342],simde_mm_xor_si128(c2[717],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[8490],simde_mm_xor_si128(c2[4269],simde_mm_xor_si128(c2[44],simde_mm_xor_si128(c2[746],simde_mm_xor_si128(c2[6404],simde_mm_xor_si128(c2[4293],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[4320],simde_mm_xor_si128(c2[8557],simde_mm_xor_si128(c2[7851],simde_mm_xor_si128(c2[2254],simde_mm_xor_si128(c2[4362],simde_mm_xor_si128(c2[7879],simde_mm_xor_si128(c2[7916],simde_mm_xor_si128(c2[10026],simde_mm_xor_si128(c2[8621],simde_mm_xor_si128(c2[903],simde_mm_xor_si128(c2[3020],simde_mm_xor_si128(c2[9359],simde_mm_xor_si128(c2[3724],simde_mm_xor_si128(c2[3748],simde_mm_xor_si128(c2[8676],simde_mm_xor_si128(c2[7278],simde_mm_xor_si128(c2[10828],simde_mm_xor_si128(c2[5889],simde_mm_xor_si128(c2[7301],simde_mm_xor_si128(c2[1706],simde_mm_xor_si128(c2[3105],simde_mm_xor_si128(c2[4524],simde_mm_xor_si128(c2[8772],simde_mm_xor_si128(c2[10184],simde_mm_xor_si128(c2[10894],simde_mm_xor_si128(c2[3885],simde_mm_xor_si128(c2[2468],simde_mm_xor_si128(c2[9515],simde_mm_xor_si128(c2[4610],simde_mm_xor_si128(c2[9548],simde_mm_xor_si128(c2[9541],simde_mm_xor_si128(c2[10956],simde_mm_xor_si128(c2[423],simde_mm_xor_si128(c2[3951],simde_mm_xor_si128(c2[1828],simde_mm_xor_si128(c2[3277],simde_mm_xor_si128(c2[6084],simde_mm_xor_si128(c2[7491],simde_mm_xor_si128(c2[7522],simde_mm_xor_si128(c2[8238],simde_mm_xor_si128(c2[6116],simde_mm_xor_si128(c2[10375],simde_mm_xor_si128(c2[4047],simde_mm_xor_si128(c2[5445],simde_mm_xor_si128(c2[1952],simde_mm_xor_si128(c2[8997],simde_mm_xor_si128(c2[2667],simde_mm_xor_si128(c2[4098],simde_mm_xor_si128(c2[3395],simde_mm_xor_si128(c2[8325],simde_mm_xor_si128(c2[7656],simde_mm_xor_si128(c2[4838],simde_mm_xor_si128(c2[2022],simde_mm_xor_si128(c2[4869],simde_mm_xor_si128(c2[9101],simde_mm_xor_si128(c2[5583],simde_mm_xor_si128(c2[4904],simde_mm_xor_si128(c2[2792],c2[2791])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 36
+     d2[576]=simde_mm_xor_si128(c2[3522],simde_mm_xor_si128(c2[6790],simde_mm_xor_si128(c2[2595],c2[6215])));
+
+//row: 37
+     d2[592]=simde_mm_xor_si128(c2[2823],simde_mm_xor_si128(c2[3527],simde_mm_xor_si128(c2[9159],simde_mm_xor_si128(c2[3534],simde_mm_xor_si128(c2[2831],simde_mm_xor_si128(c2[10603],simde_mm_xor_si128(c2[44],simde_mm_xor_si128(c2[7086],simde_mm_xor_si128(c2[2861],simde_mm_xor_si128(c2[4268],simde_mm_xor_si128(c2[8517],simde_mm_xor_si128(c2[9221],simde_mm_xor_si128(c2[7110],simde_mm_xor_si128(c2[2883],simde_mm_xor_si128(c2[6433],simde_mm_xor_si128(c2[7137],simde_mm_xor_si128(c2[111],simde_mm_xor_si128(c2[10668],simde_mm_xor_si128(c2[5071],simde_mm_xor_si128(c2[7179],simde_mm_xor_si128(c2[10696],simde_mm_xor_si128(c2[10029],simde_mm_xor_si128(c2[10733],simde_mm_xor_si128(c2[1580],simde_mm_xor_si128(c2[175],simde_mm_xor_si128(c2[3016],simde_mm_xor_si128(c2[3720],simde_mm_xor_si128(c2[5837],simde_mm_xor_si128(c2[897],simde_mm_xor_si128(c2[6565],simde_mm_xor_si128(c2[230],simde_mm_xor_si128(c2[10095],simde_mm_xor_si128(c2[2382],simde_mm_xor_si128(c2[8706],simde_mm_xor_si128(c2[10118],simde_mm_xor_si128(c2[3819],simde_mm_xor_si128(c2[4523],simde_mm_xor_si128(c2[5922],simde_mm_xor_si128(c2[7341],simde_mm_xor_si128(c2[10885],simde_mm_xor_si128(c2[326],simde_mm_xor_si128(c2[1738],simde_mm_xor_si128(c2[2432],simde_mm_xor_si128(c2[5998],simde_mm_xor_si128(c2[6702],simde_mm_xor_si128(c2[5285],simde_mm_xor_si128(c2[1069],simde_mm_xor_si128(c2[6723],simde_mm_xor_si128(c2[7427],simde_mm_xor_si128(c2[1102],simde_mm_xor_si128(c2[1095],simde_mm_xor_si128(c2[2536],simde_mm_xor_si128(c2[3240],simde_mm_xor_si128(c2[6752],simde_mm_xor_si128(c2[4645],simde_mm_xor_si128(c2[8877],simde_mm_xor_si128(c2[6094],simde_mm_xor_si128(c2[8901],simde_mm_xor_si128(c2[10308],simde_mm_xor_si128(c2[9635],simde_mm_xor_si128(c2[10339],simde_mm_xor_si128(c2[11055],simde_mm_xor_si128(c2[8933],simde_mm_xor_si128(c2[1225],simde_mm_xor_si128(c2[1929],simde_mm_xor_si128(c2[6848],simde_mm_xor_si128(c2[8262],simde_mm_xor_si128(c2[4769],simde_mm_xor_si128(c2[551],simde_mm_xor_si128(c2[5484],simde_mm_xor_si128(c2[6211],simde_mm_xor_si128(c2[6915],simde_mm_xor_si128(c2[6212],simde_mm_xor_si128(c2[11142],simde_mm_xor_si128(c2[9769],simde_mm_xor_si128(c2[10473],simde_mm_xor_si128(c2[7655],simde_mm_xor_si128(c2[4839],simde_mm_xor_si128(c2[6982],simde_mm_xor_si128(c2[7686],simde_mm_xor_si128(c2[655],simde_mm_xor_si128(c2[8384],simde_mm_xor_si128(c2[7017],simde_mm_xor_si128(c2[7721],simde_mm_xor_si128(c2[5609],c2[5608])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 38
+     d2[608]=simde_mm_xor_si128(c2[4938],simde_mm_xor_si128(c2[5225],simde_mm_xor_si128(c2[9481],c2[2506])));
+
+//row: 39
+     d2[624]=simde_mm_xor_si128(c2[9194],simde_mm_xor_si128(c2[7848],simde_mm_xor_si128(c2[3753],c2[608])));
+
+//row: 40
+     d2[640]=simde_mm_xor_si128(c2[9161],simde_mm_xor_si128(c2[6600],c2[4073]));
+
+//row: 41
+     d2[656]=simde_mm_xor_si128(c2[4970],simde_mm_xor_si128(c2[9258],simde_mm_xor_si128(c2[8040],c2[5513])));
+
+//row: 42
+     d2[672]=simde_mm_xor_si128(c2[2824],simde_mm_xor_si128(c2[8456],simde_mm_xor_si128(c2[2127],simde_mm_xor_si128(c2[2831],simde_mm_xor_si128(c2[1408],simde_mm_xor_si128(c2[2112],simde_mm_xor_si128(c2[3529],simde_mm_xor_si128(c2[10604],simde_mm_xor_si128(c2[5679],simde_mm_xor_si128(c2[6383],simde_mm_xor_si128(c2[1454],simde_mm_xor_si128(c2[2158],simde_mm_xor_si128(c2[8518],simde_mm_xor_si128(c2[6407],simde_mm_xor_si128(c2[1476],simde_mm_xor_si128(c2[2180],simde_mm_xor_si128(c2[6434],simde_mm_xor_si128(c2[10671],simde_mm_xor_si128(c2[9261],simde_mm_xor_si128(c2[9965],simde_mm_xor_si128(c2[4352],simde_mm_xor_si128(c2[5772],simde_mm_xor_si128(c2[6476],simde_mm_xor_si128(c2[9289],simde_mm_xor_si128(c2[9993],simde_mm_xor_si128(c2[9289],simde_mm_xor_si128(c2[10030],simde_mm_xor_si128(c2[877],simde_mm_xor_si128(c2[10031],simde_mm_xor_si128(c2[10735],simde_mm_xor_si128(c2[3017],simde_mm_xor_si128(c2[4430],simde_mm_xor_si128(c2[5134],simde_mm_xor_si128(c2[10753],simde_mm_xor_si128(c2[194],simde_mm_xor_si128(c2[5862],simde_mm_xor_si128(c2[10086],simde_mm_xor_si128(c2[10790],simde_mm_xor_si128(c2[8672],simde_mm_xor_si128(c2[9376],simde_mm_xor_si128(c2[1679],simde_mm_xor_si128(c2[7299],simde_mm_xor_si128(c2[8003],simde_mm_xor_si128(c2[8711],simde_mm_xor_si128(c2[9415],simde_mm_xor_si128(c2[3820],simde_mm_xor_si128(c2[5219],simde_mm_xor_si128(c2[5934],simde_mm_xor_si128(c2[6638],simde_mm_xor_si128(c2[10886],simde_mm_xor_si128(c2[331],simde_mm_xor_si128(c2[1035],simde_mm_xor_si128(c2[1025],simde_mm_xor_si128(c2[1729],simde_mm_xor_si128(c2[5999],simde_mm_xor_si128(c2[4582],simde_mm_xor_si128(c2[10925],simde_mm_xor_si128(c2[366],simde_mm_xor_si128(c2[6724],simde_mm_xor_si128(c2[399],simde_mm_xor_si128(c2[10951],simde_mm_xor_si128(c2[392],simde_mm_xor_si128(c2[2537],simde_mm_xor_si128(c2[5345],simde_mm_xor_si128(c2[6049],simde_mm_xor_si128(c2[3238],simde_mm_xor_si128(c2[3942],simde_mm_xor_si128(c2[5391],simde_mm_xor_si128(c2[7494],simde_mm_xor_si128(c2[8198],simde_mm_xor_si128(c2[8901],simde_mm_xor_si128(c2[9605],simde_mm_xor_si128(c2[9636],simde_mm_xor_si128(c2[10336],simde_mm_xor_si128(c2[7526],simde_mm_xor_si128(c2[8230],simde_mm_xor_si128(c2[1226],simde_mm_xor_si128(c2[6145],simde_mm_xor_si128(c2[6855],simde_mm_xor_si128(c2[7559],simde_mm_xor_si128(c2[4066],simde_mm_xor_si128(c2[10407],simde_mm_xor_si128(c2[11111],simde_mm_xor_si128(c2[4077],simde_mm_xor_si128(c2[4781],simde_mm_xor_si128(c2[6212],simde_mm_xor_si128(c2[4805],simde_mm_xor_si128(c2[5509],simde_mm_xor_si128(c2[9735],simde_mm_xor_si128(c2[10439],simde_mm_xor_si128(c2[9770],simde_mm_xor_si128(c2[6952],simde_mm_xor_si128(c2[3432],simde_mm_xor_si128(c2[4136],simde_mm_xor_si128(c2[6983],simde_mm_xor_si128(c2[10511],simde_mm_xor_si128(c2[11215],simde_mm_xor_si128(c2[6977],simde_mm_xor_si128(c2[7681],simde_mm_xor_si128(c2[7018],simde_mm_xor_si128(c2[4906],simde_mm_xor_si128(c2[4201],c2[4905]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 43
+     d2[688]=simde_mm_xor_si128(c2[4232],simde_mm_xor_si128(c2[9864],simde_mm_xor_si128(c2[4239],simde_mm_xor_si128(c2[2816],simde_mm_xor_si128(c2[3520],simde_mm_xor_si128(c2[749],simde_mm_xor_si128(c2[7791],simde_mm_xor_si128(c2[2862],simde_mm_xor_si128(c2[3566],simde_mm_xor_si128(c2[4969],simde_mm_xor_si128(c2[9926],simde_mm_xor_si128(c2[7815],simde_mm_xor_si128(c2[3588],simde_mm_xor_si128(c2[7842],simde_mm_xor_si128(c2[800],simde_mm_xor_si128(c2[10669],simde_mm_xor_si128(c2[110],simde_mm_xor_si128(c2[5760],simde_mm_xor_si128(c2[7884],simde_mm_xor_si128(c2[10697],simde_mm_xor_si128(c2[138],simde_mm_xor_si128(c2[175],simde_mm_xor_si128(c2[2285],simde_mm_xor_si128(c2[864],simde_mm_xor_si128(c2[4425],simde_mm_xor_si128(c2[6542],simde_mm_xor_si128(c2[898],simde_mm_xor_si128(c2[1602],simde_mm_xor_si128(c2[7270],simde_mm_xor_si128(c2[935],simde_mm_xor_si128(c2[10080],simde_mm_xor_si128(c2[10784],simde_mm_xor_si128(c2[3087],simde_mm_xor_si128(c2[9411],simde_mm_xor_si128(c2[10119],simde_mm_xor_si128(c2[10823],simde_mm_xor_si128(c2[5228],simde_mm_xor_si128(c2[6627],simde_mm_xor_si128(c2[8046],simde_mm_xor_si128(c2[1031],simde_mm_xor_si128(c2[2443],simde_mm_xor_si128(c2[2433],simde_mm_xor_si128(c2[3137],simde_mm_xor_si128(c2[7407],simde_mm_xor_si128(c2[5990],simde_mm_xor_si128(c2[1070],simde_mm_xor_si128(c2[1774],simde_mm_xor_si128(c2[8132],simde_mm_xor_si128(c2[1807],simde_mm_xor_si128(c2[1096],simde_mm_xor_si128(c2[1800],simde_mm_xor_si128(c2[3945],simde_mm_xor_si128(c2[7457],simde_mm_xor_si128(c2[4646],simde_mm_xor_si128(c2[5350],simde_mm_xor_si128(c2[6799],simde_mm_xor_si128(c2[9606],simde_mm_xor_si128(c2[10309],simde_mm_xor_si128(c2[11013],simde_mm_xor_si128(c2[11044],simde_mm_xor_si128(c2[481],simde_mm_xor_si128(c2[9638],simde_mm_xor_si128(c2[2634],simde_mm_xor_si128(c2[7553],simde_mm_xor_si128(c2[8263],simde_mm_xor_si128(c2[8967],simde_mm_xor_si128(c2[2634],simde_mm_xor_si128(c2[5474],simde_mm_xor_si128(c2[1256],simde_mm_xor_si128(c2[5485],simde_mm_xor_si128(c2[6189],simde_mm_xor_si128(c2[7620],simde_mm_xor_si128(c2[6917],simde_mm_xor_si128(c2[11143],simde_mm_xor_si128(c2[584],simde_mm_xor_si128(c2[9738],simde_mm_xor_si128(c2[11178],simde_mm_xor_si128(c2[8360],simde_mm_xor_si128(c2[5544],simde_mm_xor_si128(c2[8391],simde_mm_xor_si128(c2[1344],simde_mm_xor_si128(c2[8385],simde_mm_xor_si128(c2[9089],simde_mm_xor_si128(c2[8426],simde_mm_xor_si128(c2[6314],simde_mm_xor_si128(c2[5609],c2[6313]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 44
+     d2[704]=simde_mm_xor_si128(c2[4938],simde_mm_xor_si128(c2[10570],simde_mm_xor_si128(c2[4929],simde_mm_xor_si128(c2[4226],simde_mm_xor_si128(c2[7752],simde_mm_xor_si128(c2[1455],simde_mm_xor_si128(c2[8481],simde_mm_xor_si128(c2[4256],simde_mm_xor_si128(c2[10632],simde_mm_xor_si128(c2[8521],simde_mm_xor_si128(c2[4294],simde_mm_xor_si128(c2[8548],simde_mm_xor_si128(c2[1506],simde_mm_xor_si128(c2[800],simde_mm_xor_si128(c2[6466],simde_mm_xor_si128(c2[8590],simde_mm_xor_si128(c2[844],simde_mm_xor_si128(c2[865],simde_mm_xor_si128(c2[2991],simde_mm_xor_si128(c2[1570],simde_mm_xor_si128(c2[5131],simde_mm_xor_si128(c2[7232],simde_mm_xor_si128(c2[2308],simde_mm_xor_si128(c2[7976],simde_mm_xor_si128(c2[1641],simde_mm_xor_si128(c2[227],simde_mm_xor_si128(c2[9385],simde_mm_xor_si128(c2[3777],simde_mm_xor_si128(c2[10117],simde_mm_xor_si128(c2[266],simde_mm_xor_si128(c2[5934],simde_mm_xor_si128(c2[7333],simde_mm_xor_si128(c2[8736],simde_mm_xor_si128(c2[2410],simde_mm_xor_si128(c2[1737],simde_mm_xor_si128(c2[3149],simde_mm_xor_si128(c2[3843],simde_mm_xor_si128(c2[8097],simde_mm_xor_si128(c2[6696],simde_mm_xor_si128(c2[2464],simde_mm_xor_si128(c2[8838],simde_mm_xor_si128(c2[2497],simde_mm_xor_si128(c2[2506],simde_mm_xor_si128(c2[4651],simde_mm_xor_si128(c2[8163],simde_mm_xor_si128(c2[6056],simde_mm_xor_si128(c2[7489],simde_mm_xor_si128(c2[10312],simde_mm_xor_si128(c2[456],simde_mm_xor_si128(c2[487],simde_mm_xor_si128(c2[1187],simde_mm_xor_si128(c2[10344],simde_mm_xor_si128(c2[3340],simde_mm_xor_si128(c2[8259],simde_mm_xor_si128(c2[9673],simde_mm_xor_si128(c2[6180],simde_mm_xor_si128(c2[1962],simde_mm_xor_si128(c2[6895],simde_mm_xor_si128(c2[8326],simde_mm_xor_si128(c2[7623],simde_mm_xor_si128(c2[1290],simde_mm_xor_si128(c2[621],simde_mm_xor_si128(c2[9066],simde_mm_xor_si128(c2[6250],simde_mm_xor_si128(c2[9097],simde_mm_xor_si128(c2[2050],simde_mm_xor_si128(c2[9795],simde_mm_xor_si128(c2[9132],simde_mm_xor_si128(c2[7020],c2[7019])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 45
+     d2[720]=simde_mm_xor_si128(c2[3561],simde_mm_xor_si128(c2[5129],c2[5258]));
+  }
+}
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc288_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc288_byte.c
index 92ef1d48f4dbdd6f835f5171993ec9bfa48f526b..cdfbf509ef50966a4c1a985c64d7829aa62c8e00 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc288_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc288_byte.c
@@ -1,9 +1,10 @@
+#ifdef __AVX2__
 #include "PHY/sse_intrin.h"
 // generated code for Zc=288, byte encoding
 static inline void ldpc288_byte(uint8_t *c,uint8_t *d) {
-  __m256i *csimd=(__m256i *)c,*dsimd=(__m256i *)d;
+  simde__m256i *csimd=(simde__m256i *)c,*dsimd=(simde__m256i *)d;
 
-  __m256i *c2,*d2;
+  simde__m256i *c2,*d2;
 
   int i2;
   for (i2=0; i2<9; i2++) {
@@ -149,3 +150,4 @@ static inline void ldpc288_byte(uint8_t *c,uint8_t *d) {
      d2[405]=simde_mm256_xor_si256(c2[3191],simde_mm256_xor_si256(c2[108],c2[6520]));
   }
 }
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc288_byte_128.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc288_byte_128.c
new file mode 100644
index 0000000000000000000000000000000000000000..1ac755aea184fec0358634c688e83d987f233099
--- /dev/null
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc288_byte_128.c
@@ -0,0 +1,153 @@
+#ifndef __AVX2__
+#include "PHY/sse_intrin.h"
+// generated code for Zc=288, byte encoding
+static inline void ldpc288_byte(uint8_t *c,uint8_t *d) {
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
+
+  simde__m128i *c2,*d2;
+
+  int i2;
+  for (i2=0; i2<18; i2++) {
+     c2=&csimd[i2];
+     d2=&dsimd[i2];
+
+//row: 0
+     d2[0]=simde_mm_xor_si128(c2[2389],simde_mm_xor_si128(c2[2387],simde_mm_xor_si128(c2[1600],simde_mm_xor_si128(c2[8723],simde_mm_xor_si128(c2[4800],simde_mm_xor_si128(c2[5590],simde_mm_xor_si128(c2[837],simde_mm_xor_si128(c2[9587],simde_mm_xor_si128(c2[1666],simde_mm_xor_si128(c2[9589],simde_mm_xor_si128(c2[8039],simde_mm_xor_si128(c2[12001],simde_mm_xor_si128(c2[4870],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[4112],simde_mm_xor_si128(c2[9654],simde_mm_xor_si128(c2[8905],simde_mm_xor_si128(c2[190],simde_mm_xor_si128(c2[2571],simde_mm_xor_si128(c2[3384],simde_mm_xor_si128(c2[8148],simde_mm_xor_si128(c2[1805],simde_mm_xor_si128(c2[9760],simde_mm_xor_si128(c2[8185],simde_mm_xor_si128(c2[3428],simde_mm_xor_si128(c2[8220],simde_mm_xor_si128(c2[12171],simde_mm_xor_si128(c2[4260],simde_mm_xor_si128(c2[10621],simde_mm_xor_si128(c2[4291],simde_mm_xor_si128(c2[324],simde_mm_xor_si128(c2[369],simde_mm_xor_si128(c2[2736],simde_mm_xor_si128(c2[7490],simde_mm_xor_si128(c2[3571],simde_mm_xor_si128(c2[10698],simde_mm_xor_si128(c2[1990],simde_mm_xor_si128(c2[6781],simde_mm_xor_si128(c2[12312],simde_mm_xor_si128(c2[7563],simde_mm_xor_si128(c2[2851],simde_mm_xor_si128(c2[8392],simde_mm_xor_si128(c2[3638],simde_mm_xor_si128(c2[6844],simde_mm_xor_si128(c2[4478],simde_mm_xor_si128(c2[2887],simde_mm_xor_si128(c2[7682],simde_mm_xor_si128(c2[6885],simde_mm_xor_si128(c2[540],simde_mm_xor_si128(c2[585],simde_mm_xor_si128(c2[11673],simde_mm_xor_si128(c2[2175],simde_mm_xor_si128(c2[2997],simde_mm_xor_si128(c2[6961],simde_mm_xor_si128(c2[4582],simde_mm_xor_si128(c2[12533],simde_mm_xor_si128(c2[10960],simde_mm_xor_si128(c2[648],simde_mm_xor_si128(c2[7033],simde_mm_xor_si128(c2[10197],simde_mm_xor_si128(c2[7032],simde_mm_xor_si128(c2[7852],simde_mm_xor_si128(c2[8654],simde_mm_xor_si128(c2[1519],simde_mm_xor_si128(c2[4732],simde_mm_xor_si128(c2[6307],c2[10266]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 1
+     d2[18]=simde_mm_xor_si128(c2[2389],simde_mm_xor_si128(c2[3181],simde_mm_xor_si128(c2[3179],simde_mm_xor_si128(c2[2392],simde_mm_xor_si128(c2[9515],simde_mm_xor_si128(c2[4800],simde_mm_xor_si128(c2[5592],simde_mm_xor_si128(c2[6382],simde_mm_xor_si128(c2[1629],simde_mm_xor_si128(c2[9587],simde_mm_xor_si128(c2[10379],simde_mm_xor_si128(c2[2458],simde_mm_xor_si128(c2[10381],simde_mm_xor_si128(c2[8039],simde_mm_xor_si128(c2[8831],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[5662],simde_mm_xor_si128(c2[952],simde_mm_xor_si128(c2[4904],simde_mm_xor_si128(c2[10446],simde_mm_xor_si128(c2[8905],simde_mm_xor_si128(c2[9697],simde_mm_xor_si128(c2[982],simde_mm_xor_si128(c2[3363],simde_mm_xor_si128(c2[3384],simde_mm_xor_si128(c2[4176],simde_mm_xor_si128(c2[8940],simde_mm_xor_si128(c2[2597],simde_mm_xor_si128(c2[10552],simde_mm_xor_si128(c2[8977],simde_mm_xor_si128(c2[4220],simde_mm_xor_si128(c2[9012],simde_mm_xor_si128(c2[292],simde_mm_xor_si128(c2[5052],simde_mm_xor_si128(c2[10621],simde_mm_xor_si128(c2[11413],simde_mm_xor_si128(c2[5083],simde_mm_xor_si128(c2[1116],simde_mm_xor_si128(c2[369],simde_mm_xor_si128(c2[1161],simde_mm_xor_si128(c2[3528],simde_mm_xor_si128(c2[8282],simde_mm_xor_si128(c2[3571],simde_mm_xor_si128(c2[4363],simde_mm_xor_si128(c2[11490],simde_mm_xor_si128(c2[2782],simde_mm_xor_si128(c2[6781],simde_mm_xor_si128(c2[7573],simde_mm_xor_si128(c2[433],simde_mm_xor_si128(c2[8355],simde_mm_xor_si128(c2[2851],simde_mm_xor_si128(c2[3643],simde_mm_xor_si128(c2[9184],simde_mm_xor_si128(c2[4430],simde_mm_xor_si128(c2[7636],simde_mm_xor_si128(c2[5270],simde_mm_xor_si128(c2[3679],simde_mm_xor_si128(c2[7682],simde_mm_xor_si128(c2[8474],simde_mm_xor_si128(c2[7677],simde_mm_xor_si128(c2[1332],simde_mm_xor_si128(c2[585],simde_mm_xor_si128(c2[1377],simde_mm_xor_si128(c2[12465],simde_mm_xor_si128(c2[2967],simde_mm_xor_si128(c2[3789],simde_mm_xor_si128(c2[7753],simde_mm_xor_si128(c2[5374],simde_mm_xor_si128(c2[12533],simde_mm_xor_si128(c2[654],simde_mm_xor_si128(c2[11752],simde_mm_xor_si128(c2[1440],simde_mm_xor_si128(c2[7033],simde_mm_xor_si128(c2[7825],simde_mm_xor_si128(c2[10989],simde_mm_xor_si128(c2[7824],simde_mm_xor_si128(c2[7852],simde_mm_xor_si128(c2[8644],simde_mm_xor_si128(c2[9446],simde_mm_xor_si128(c2[2311],simde_mm_xor_si128(c2[4732],simde_mm_xor_si128(c2[5524],simde_mm_xor_si128(c2[7099],c2[11058])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 2
+     d2[36]=simde_mm_xor_si128(c2[3181],simde_mm_xor_si128(c2[3179],simde_mm_xor_si128(c2[1600],simde_mm_xor_si128(c2[2392],simde_mm_xor_si128(c2[8723],simde_mm_xor_si128(c2[9515],simde_mm_xor_si128(c2[5592],simde_mm_xor_si128(c2[5590],simde_mm_xor_si128(c2[6382],simde_mm_xor_si128(c2[837],simde_mm_xor_si128(c2[1629],simde_mm_xor_si128(c2[10379],simde_mm_xor_si128(c2[2458],simde_mm_xor_si128(c2[9589],simde_mm_xor_si128(c2[10381],simde_mm_xor_si128(c2[8831],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[4870],simde_mm_xor_si128(c2[5662],simde_mm_xor_si128(c2[952],simde_mm_xor_si128(c2[4112],simde_mm_xor_si128(c2[4904],simde_mm_xor_si128(c2[9654],simde_mm_xor_si128(c2[10446],simde_mm_xor_si128(c2[9697],simde_mm_xor_si128(c2[982],simde_mm_xor_si128(c2[2571],simde_mm_xor_si128(c2[3363],simde_mm_xor_si128(c2[4176],simde_mm_xor_si128(c2[8148],simde_mm_xor_si128(c2[8940],simde_mm_xor_si128(c2[1805],simde_mm_xor_si128(c2[2597],simde_mm_xor_si128(c2[10552],simde_mm_xor_si128(c2[8185],simde_mm_xor_si128(c2[8977],simde_mm_xor_si128(c2[3428],simde_mm_xor_si128(c2[4220],simde_mm_xor_si128(c2[9012],simde_mm_xor_si128(c2[12171],simde_mm_xor_si128(c2[292],simde_mm_xor_si128(c2[4260],simde_mm_xor_si128(c2[5052],simde_mm_xor_si128(c2[11413],simde_mm_xor_si128(c2[5083],simde_mm_xor_si128(c2[324],simde_mm_xor_si128(c2[1116],simde_mm_xor_si128(c2[1161],simde_mm_xor_si128(c2[2736],simde_mm_xor_si128(c2[3528],simde_mm_xor_si128(c2[7490],simde_mm_xor_si128(c2[8282],simde_mm_xor_si128(c2[4363],simde_mm_xor_si128(c2[11490],simde_mm_xor_si128(c2[1990],simde_mm_xor_si128(c2[2782],simde_mm_xor_si128(c2[7573],simde_mm_xor_si128(c2[433],simde_mm_xor_si128(c2[7563],simde_mm_xor_si128(c2[8355],simde_mm_xor_si128(c2[3643],simde_mm_xor_si128(c2[8392],simde_mm_xor_si128(c2[9184],simde_mm_xor_si128(c2[3638],simde_mm_xor_si128(c2[4430],simde_mm_xor_si128(c2[7636],simde_mm_xor_si128(c2[4478],simde_mm_xor_si128(c2[5270],simde_mm_xor_si128(c2[2887],simde_mm_xor_si128(c2[3679],simde_mm_xor_si128(c2[8474],simde_mm_xor_si128(c2[7677],simde_mm_xor_si128(c2[540],simde_mm_xor_si128(c2[1332],simde_mm_xor_si128(c2[1377],simde_mm_xor_si128(c2[12465],simde_mm_xor_si128(c2[2175],simde_mm_xor_si128(c2[2967],simde_mm_xor_si128(c2[3789],simde_mm_xor_si128(c2[6961],simde_mm_xor_si128(c2[7753],simde_mm_xor_si128(c2[4582],simde_mm_xor_si128(c2[5374],simde_mm_xor_si128(c2[654],simde_mm_xor_si128(c2[10960],simde_mm_xor_si128(c2[11752],simde_mm_xor_si128(c2[648],simde_mm_xor_si128(c2[1440],simde_mm_xor_si128(c2[7825],simde_mm_xor_si128(c2[10989],simde_mm_xor_si128(c2[7032],simde_mm_xor_si128(c2[7824],simde_mm_xor_si128(c2[8644],simde_mm_xor_si128(c2[8654],simde_mm_xor_si128(c2[9446],simde_mm_xor_si128(c2[1519],simde_mm_xor_si128(c2[2311],simde_mm_xor_si128(c2[5524],simde_mm_xor_si128(c2[7099],simde_mm_xor_si128(c2[10266],c2[11058]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 3
+     d2[54]=simde_mm_xor_si128(c2[3181],simde_mm_xor_si128(c2[3179],simde_mm_xor_si128(c2[2392],simde_mm_xor_si128(c2[8723],simde_mm_xor_si128(c2[9515],simde_mm_xor_si128(c2[5592],simde_mm_xor_si128(c2[6382],simde_mm_xor_si128(c2[837],simde_mm_xor_si128(c2[1629],simde_mm_xor_si128(c2[10379],simde_mm_xor_si128(c2[2458],simde_mm_xor_si128(c2[10381],simde_mm_xor_si128(c2[8831],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[4870],simde_mm_xor_si128(c2[5662],simde_mm_xor_si128(c2[952],simde_mm_xor_si128(c2[4904],simde_mm_xor_si128(c2[9654],simde_mm_xor_si128(c2[10446],simde_mm_xor_si128(c2[9697],simde_mm_xor_si128(c2[982],simde_mm_xor_si128(c2[3363],simde_mm_xor_si128(c2[4176],simde_mm_xor_si128(c2[8940],simde_mm_xor_si128(c2[1805],simde_mm_xor_si128(c2[2597],simde_mm_xor_si128(c2[10552],simde_mm_xor_si128(c2[8977],simde_mm_xor_si128(c2[3428],simde_mm_xor_si128(c2[4220],simde_mm_xor_si128(c2[9012],simde_mm_xor_si128(c2[292],simde_mm_xor_si128(c2[4260],simde_mm_xor_si128(c2[5052],simde_mm_xor_si128(c2[11413],simde_mm_xor_si128(c2[5083],simde_mm_xor_si128(c2[1116],simde_mm_xor_si128(c2[1161],simde_mm_xor_si128(c2[3528],simde_mm_xor_si128(c2[7490],simde_mm_xor_si128(c2[8282],simde_mm_xor_si128(c2[4363],simde_mm_xor_si128(c2[11490],simde_mm_xor_si128(c2[1990],simde_mm_xor_si128(c2[2782],simde_mm_xor_si128(c2[7573],simde_mm_xor_si128(c2[433],simde_mm_xor_si128(c2[7563],simde_mm_xor_si128(c2[8355],simde_mm_xor_si128(c2[3643],simde_mm_xor_si128(c2[9184],simde_mm_xor_si128(c2[3638],simde_mm_xor_si128(c2[4430],simde_mm_xor_si128(c2[7636],simde_mm_xor_si128(c2[5270],simde_mm_xor_si128(c2[2887],simde_mm_xor_si128(c2[3679],simde_mm_xor_si128(c2[8474],simde_mm_xor_si128(c2[7677],simde_mm_xor_si128(c2[1332],simde_mm_xor_si128(c2[1377],simde_mm_xor_si128(c2[12465],simde_mm_xor_si128(c2[2175],simde_mm_xor_si128(c2[2967],simde_mm_xor_si128(c2[3789],simde_mm_xor_si128(c2[7753],simde_mm_xor_si128(c2[4582],simde_mm_xor_si128(c2[5374],simde_mm_xor_si128(c2[654],simde_mm_xor_si128(c2[11752],simde_mm_xor_si128(c2[648],simde_mm_xor_si128(c2[1440],simde_mm_xor_si128(c2[7825],simde_mm_xor_si128(c2[10989],simde_mm_xor_si128(c2[7824],simde_mm_xor_si128(c2[8644],simde_mm_xor_si128(c2[9446],simde_mm_xor_si128(c2[1519],simde_mm_xor_si128(c2[2311],simde_mm_xor_si128(c2[5524],simde_mm_xor_si128(c2[7099],simde_mm_xor_si128(c2[10266],c2[11058])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 4
+     d2[72]=simde_mm_xor_si128(c2[4767],c2[8762]);
+
+//row: 5
+     d2[90]=simde_mm_xor_si128(c2[2380],simde_mm_xor_si128(c2[2378],simde_mm_xor_si128(c2[1591],simde_mm_xor_si128(c2[8714],simde_mm_xor_si128(c2[3976],simde_mm_xor_si128(c2[4791],simde_mm_xor_si128(c2[5581],simde_mm_xor_si128(c2[828],simde_mm_xor_si128(c2[4007],simde_mm_xor_si128(c2[9578],simde_mm_xor_si128(c2[1657],simde_mm_xor_si128(c2[9580],simde_mm_xor_si128(c2[8030],simde_mm_xor_si128(c2[11992],simde_mm_xor_si128(c2[4861],simde_mm_xor_si128(c2[6448],simde_mm_xor_si128(c2[151],simde_mm_xor_si128(c2[4121],simde_mm_xor_si128(c2[9663],simde_mm_xor_si128(c2[8896],simde_mm_xor_si128(c2[181],simde_mm_xor_si128(c2[2562],simde_mm_xor_si128(c2[3393],simde_mm_xor_si128(c2[8139],simde_mm_xor_si128(c2[1814],simde_mm_xor_si128(c2[9769],simde_mm_xor_si128(c2[8176],simde_mm_xor_si128(c2[3437],simde_mm_xor_si128(c2[8211],simde_mm_xor_si128(c2[12180],simde_mm_xor_si128(c2[4251],simde_mm_xor_si128(c2[10630],simde_mm_xor_si128(c2[4300],simde_mm_xor_si128(c2[333],simde_mm_xor_si128(c2[360],simde_mm_xor_si128(c2[2745],simde_mm_xor_si128(c2[7499],simde_mm_xor_si128(c2[3580],simde_mm_xor_si128(c2[10707],simde_mm_xor_si128(c2[1981],simde_mm_xor_si128(c2[6772],simde_mm_xor_si128(c2[12321],simde_mm_xor_si128(c2[7572],simde_mm_xor_si128(c2[9161],simde_mm_xor_si128(c2[2860],simde_mm_xor_si128(c2[8401],simde_mm_xor_si128(c2[3647],simde_mm_xor_si128(c2[6853],simde_mm_xor_si128(c2[4469],simde_mm_xor_si128(c2[2896],simde_mm_xor_si128(c2[7673],simde_mm_xor_si128(c2[6876],simde_mm_xor_si128(c2[549],simde_mm_xor_si128(c2[576],simde_mm_xor_si128(c2[11664],simde_mm_xor_si128(c2[2166],simde_mm_xor_si128(c2[11679],simde_mm_xor_si128(c2[2988],simde_mm_xor_si128(c2[6952],simde_mm_xor_si128(c2[4573],simde_mm_xor_si128(c2[12542],simde_mm_xor_si128(c2[10951],simde_mm_xor_si128(c2[657],simde_mm_xor_si128(c2[7024],simde_mm_xor_si128(c2[10188],simde_mm_xor_si128(c2[7023],simde_mm_xor_si128(c2[7861],simde_mm_xor_si128(c2[8645],simde_mm_xor_si128(c2[1528],simde_mm_xor_si128(c2[4723],simde_mm_xor_si128(c2[6316],simde_mm_xor_si128(c2[10275],c2[12640]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 6
+     d2[108]=simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[225],simde_mm_xor_si128(c2[7498],simde_mm_xor_si128(c2[8321],simde_mm_xor_si128(c2[9183],simde_mm_xor_si128(c2[1415],simde_mm_xor_si128(c2[6201],c2[10230])))))));
+
+//row: 7
+     d2[126]=simde_mm_xor_si128(c2[7138],simde_mm_xor_si128(c2[10343],simde_mm_xor_si128(c2[8073],simde_mm_xor_si128(c2[6599],simde_mm_xor_si128(c2[6630],c2[3682])))));
+
+//row: 8
+     d2[144]=simde_mm_xor_si128(c2[7137],simde_mm_xor_si128(c2[7936],simde_mm_xor_si128(c2[7135],simde_mm_xor_si128(c2[7934],simde_mm_xor_si128(c2[6348],simde_mm_xor_si128(c2[6337],simde_mm_xor_si128(c2[7129],simde_mm_xor_si128(c2[800],simde_mm_xor_si128(c2[807],simde_mm_xor_si128(c2[1599],simde_mm_xor_si128(c2[4755],simde_mm_xor_si128(c2[9548],simde_mm_xor_si128(c2[10347],simde_mm_xor_si128(c2[10338],simde_mm_xor_si128(c2[10345],simde_mm_xor_si128(c2[11137],simde_mm_xor_si128(c2[5585],simde_mm_xor_si128(c2[5592],simde_mm_xor_si128(c2[6384],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[1664],simde_mm_xor_si128(c2[2463],simde_mm_xor_si128(c2[6414],simde_mm_xor_si128(c2[7213],simde_mm_xor_si128(c2[1666],simde_mm_xor_si128(c2[1673],simde_mm_xor_si128(c2[2465],simde_mm_xor_si128(c2[116],simde_mm_xor_si128(c2[915],simde_mm_xor_si128(c2[4078],simde_mm_xor_si128(c2[4877],simde_mm_xor_si128(c2[9618],simde_mm_xor_si128(c2[9625],simde_mm_xor_si128(c2[10417],simde_mm_xor_si128(c2[9627],simde_mm_xor_si128(c2[4908],simde_mm_xor_si128(c2[5689],simde_mm_xor_si128(c2[8860],simde_mm_xor_si128(c2[8867],simde_mm_xor_si128(c2[9659],simde_mm_xor_si128(c2[1731],simde_mm_xor_si128(c2[1738],simde_mm_xor_si128(c2[2530],simde_mm_xor_si128(c2[982],simde_mm_xor_si128(c2[1781],simde_mm_xor_si128(c2[4938],simde_mm_xor_si128(c2[5737],simde_mm_xor_si128(c2[7319],simde_mm_xor_si128(c2[7308],simde_mm_xor_si128(c2[8100],simde_mm_xor_si128(c2[8150],simde_mm_xor_si128(c2[8931],simde_mm_xor_si128(c2[225],simde_mm_xor_si128(c2[232],simde_mm_xor_si128(c2[1024],simde_mm_xor_si128(c2[6553],simde_mm_xor_si128(c2[6560],simde_mm_xor_si128(c2[7352],simde_mm_xor_si128(c2[1837],simde_mm_xor_si128(c2[2636],simde_mm_xor_si128(c2[262],simde_mm_xor_si128(c2[269],simde_mm_xor_si128(c2[1061],simde_mm_xor_si128(c2[8176],simde_mm_xor_si128(c2[8183],simde_mm_xor_si128(c2[8975],simde_mm_xor_si128(c2[297],simde_mm_xor_si128(c2[1096],simde_mm_xor_si128(c2[4248],simde_mm_xor_si128(c2[4255],simde_mm_xor_si128(c2[5047],simde_mm_xor_si128(c2[9008],simde_mm_xor_si128(c2[9015],simde_mm_xor_si128(c2[9807],simde_mm_xor_si128(c2[2716],simde_mm_xor_si128(c2[3497],simde_mm_xor_si128(c2[9039],simde_mm_xor_si128(c2[9838],simde_mm_xor_si128(c2[5090],simde_mm_xor_si128(c2[5079],simde_mm_xor_si128(c2[5871],simde_mm_xor_si128(c2[5117],simde_mm_xor_si128(c2[5916],simde_mm_xor_si128(c2[7502],simde_mm_xor_si128(c2[7491],simde_mm_xor_si128(c2[8283],simde_mm_xor_si128(c2[12256],simde_mm_xor_si128(c2[12245],simde_mm_xor_si128(c2[366],simde_mm_xor_si128(c2[8319],simde_mm_xor_si128(c2[9118],simde_mm_xor_si128(c2[2775],simde_mm_xor_si128(c2[3574],simde_mm_xor_si128(c2[6738],simde_mm_xor_si128(c2[6745],simde_mm_xor_si128(c2[7537],simde_mm_xor_si128(c2[11529],simde_mm_xor_si128(c2[12328],simde_mm_xor_si128(c2[4407],simde_mm_xor_si128(c2[5188],simde_mm_xor_si128(c2[12329],simde_mm_xor_si128(c2[12318],simde_mm_xor_si128(c2[439],simde_mm_xor_si128(c2[7562],simde_mm_xor_si128(c2[7599],simde_mm_xor_si128(c2[8398],simde_mm_xor_si128(c2[469],simde_mm_xor_si128(c2[476],simde_mm_xor_si128(c2[1268],simde_mm_xor_si128(c2[8404],simde_mm_xor_si128(c2[8393],simde_mm_xor_si128(c2[9185],simde_mm_xor_si128(c2[11592],simde_mm_xor_si128(c2[12391],simde_mm_xor_si128(c2[9226],simde_mm_xor_si128(c2[9233],simde_mm_xor_si128(c2[10025],simde_mm_xor_si128(c2[7635],simde_mm_xor_si128(c2[7642],simde_mm_xor_si128(c2[8434],simde_mm_xor_si128(c2[12430],simde_mm_xor_si128(c2[540],simde_mm_xor_si128(c2[11633],simde_mm_xor_si128(c2[12432],simde_mm_xor_si128(c2[5306],simde_mm_xor_si128(c2[5295],simde_mm_xor_si128(c2[6087],simde_mm_xor_si128(c2[5333],simde_mm_xor_si128(c2[6132],simde_mm_xor_si128(c2[3750],simde_mm_xor_si128(c2[4549],simde_mm_xor_si128(c2[6923],simde_mm_xor_si128(c2[6912],simde_mm_xor_si128(c2[7704],simde_mm_xor_si128(c2[2166],simde_mm_xor_si128(c2[7745],simde_mm_xor_si128(c2[8544],simde_mm_xor_si128(c2[11709],simde_mm_xor_si128(c2[11716],simde_mm_xor_si128(c2[12508],simde_mm_xor_si128(c2[9330],simde_mm_xor_si128(c2[9337],simde_mm_xor_si128(c2[10129],simde_mm_xor_si128(c2[4610],simde_mm_xor_si128(c2[5409],simde_mm_xor_si128(c2[3037],simde_mm_xor_si128(c2[3026],simde_mm_xor_si128(c2[3818],simde_mm_xor_si128(c2[5414],simde_mm_xor_si128(c2[5403],simde_mm_xor_si128(c2[6195],simde_mm_xor_si128(c2[11781],simde_mm_xor_si128(c2[12580],simde_mm_xor_si128(c2[2274],simde_mm_xor_si128(c2[3073],simde_mm_xor_si128(c2[11780],simde_mm_xor_si128(c2[11787],simde_mm_xor_si128(c2[12579],simde_mm_xor_si128(c2[11774],simde_mm_xor_si128(c2[12600],simde_mm_xor_si128(c2[728],simde_mm_xor_si128(c2[731],simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[1512],simde_mm_xor_si128(c2[6267],simde_mm_xor_si128(c2[6274],simde_mm_xor_si128(c2[7066],simde_mm_xor_si128(c2[9480],simde_mm_xor_si128(c2[10261],simde_mm_xor_si128(c2[11055],simde_mm_xor_si128(c2[11854],simde_mm_xor_si128(c2[2343],simde_mm_xor_si128(c2[2350],simde_mm_xor_si128(c2[3142],c2[12636]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 9
+     d2[162]=simde_mm_xor_si128(c2[1594],simde_mm_xor_si128(c2[11925],simde_mm_xor_si128(c2[10661],simde_mm_xor_si128(c2[5156],simde_mm_xor_si128(c2[10766],simde_mm_xor_si128(c2[3788],simde_mm_xor_si128(c2[10156],c2[1525])))))));
+
+//row: 10
+     d2[180]=simde_mm_xor_si128(c2[1631],simde_mm_xor_si128(c2[864],simde_mm_xor_si128(c2[9649],simde_mm_xor_si128(c2[8980],simde_mm_xor_si128(c2[8222],c2[7644])))));
+
+//row: 11
+     d2[198]=simde_mm_xor_si128(c2[7935],simde_mm_xor_si128(c2[10310],simde_mm_xor_si128(c2[11102],simde_mm_xor_si128(c2[7933],simde_mm_xor_si128(c2[11100],simde_mm_xor_si128(c2[7128],simde_mm_xor_si128(c2[10313],simde_mm_xor_si128(c2[1598],simde_mm_xor_si128(c2[4765],simde_mm_xor_si128(c2[5547],simde_mm_xor_si128(c2[10346],simde_mm_xor_si128(c2[50],simde_mm_xor_si128(c2[842],simde_mm_xor_si128(c2[11136],simde_mm_xor_si128(c2[1632],simde_mm_xor_si128(c2[6383],simde_mm_xor_si128(c2[9550],simde_mm_xor_si128(c2[5581],simde_mm_xor_si128(c2[2462],simde_mm_xor_si128(c2[4837],simde_mm_xor_si128(c2[5629],simde_mm_xor_si128(c2[7212],simde_mm_xor_si128(c2[10379],simde_mm_xor_si128(c2[2464],simde_mm_xor_si128(c2[5631],simde_mm_xor_si128(c2[914],simde_mm_xor_si128(c2[3289],simde_mm_xor_si128(c2[4081],simde_mm_xor_si128(c2[4876],simde_mm_xor_si128(c2[8043],simde_mm_xor_si128(c2[10416],simde_mm_xor_si128(c2[912],simde_mm_xor_si128(c2[5688],simde_mm_xor_si128(c2[8873],simde_mm_xor_si128(c2[9658],simde_mm_xor_si128(c2[154],simde_mm_xor_si128(c2[2529],simde_mm_xor_si128(c2[5696],simde_mm_xor_si128(c2[1780],simde_mm_xor_si128(c2[4155],simde_mm_xor_si128(c2[4947],simde_mm_xor_si128(c2[5736],simde_mm_xor_si128(c2[8903],simde_mm_xor_si128(c2[8117],simde_mm_xor_si128(c2[11284],simde_mm_xor_si128(c2[8930],simde_mm_xor_si128(c2[11305],simde_mm_xor_si128(c2[12097],simde_mm_xor_si128(c2[1023],simde_mm_xor_si128(c2[4190],simde_mm_xor_si128(c2[7351],simde_mm_xor_si128(c2[10518],simde_mm_xor_si128(c2[2635],simde_mm_xor_si128(c2[5802],simde_mm_xor_si128(c2[1060],simde_mm_xor_si128(c2[4227],simde_mm_xor_si128(c2[8974],simde_mm_xor_si128(c2[12141],simde_mm_xor_si128(c2[1095],simde_mm_xor_si128(c2[4262],simde_mm_xor_si128(c2[5046],simde_mm_xor_si128(c2[8213],simde_mm_xor_si128(c2[9806],simde_mm_xor_si128(c2[302],simde_mm_xor_si128(c2[3496],simde_mm_xor_si128(c2[5871],simde_mm_xor_si128(c2[6663],simde_mm_xor_si128(c2[9837],simde_mm_xor_si128(c2[333],simde_mm_xor_si128(c2[5870],simde_mm_xor_si128(c2[9037],simde_mm_xor_si128(c2[5915],simde_mm_xor_si128(c2[8290],simde_mm_xor_si128(c2[9082],simde_mm_xor_si128(c2[8282],simde_mm_xor_si128(c2[11449],simde_mm_xor_si128(c2[365],simde_mm_xor_si128(c2[3532],simde_mm_xor_si128(c2[9117],simde_mm_xor_si128(c2[11492],simde_mm_xor_si128(c2[12284],simde_mm_xor_si128(c2[3573],simde_mm_xor_si128(c2[6740],simde_mm_xor_si128(c2[7536],simde_mm_xor_si128(c2[10703],simde_mm_xor_si128(c2[12327],simde_mm_xor_si128(c2[2031],simde_mm_xor_si128(c2[2823],simde_mm_xor_si128(c2[5187],simde_mm_xor_si128(c2[8354],simde_mm_xor_si128(c2[438],simde_mm_xor_si128(c2[3605],simde_mm_xor_si128(c2[2033],simde_mm_xor_si128(c2[8397],simde_mm_xor_si128(c2[10772],simde_mm_xor_si128(c2[11564],simde_mm_xor_si128(c2[1267],simde_mm_xor_si128(c2[4434],simde_mm_xor_si128(c2[9184],simde_mm_xor_si128(c2[12351],simde_mm_xor_si128(c2[12390],simde_mm_xor_si128(c2[2886],simde_mm_xor_si128(c2[10024],simde_mm_xor_si128(c2[520],simde_mm_xor_si128(c2[8433],simde_mm_xor_si128(c2[11600],simde_mm_xor_si128(c2[557],simde_mm_xor_si128(c2[2932],simde_mm_xor_si128(c2[3724],simde_mm_xor_si128(c2[12431],simde_mm_xor_si128(c2[2927],simde_mm_xor_si128(c2[6086],simde_mm_xor_si128(c2[9253],simde_mm_xor_si128(c2[6131],simde_mm_xor_si128(c2[8506],simde_mm_xor_si128(c2[9298],simde_mm_xor_si128(c2[4548],simde_mm_xor_si128(c2[7715],simde_mm_xor_si128(c2[7721],simde_mm_xor_si128(c2[10888],simde_mm_xor_si128(c2[4547],simde_mm_xor_si128(c2[8543],simde_mm_xor_si128(c2[11710],simde_mm_xor_si128(c2[12507],simde_mm_xor_si128(c2[3003],simde_mm_xor_si128(c2[10128],simde_mm_xor_si128(c2[624],simde_mm_xor_si128(c2[5408],simde_mm_xor_si128(c2[7783],simde_mm_xor_si128(c2[8575],simde_mm_xor_si128(c2[3817],simde_mm_xor_si128(c2[6984],simde_mm_xor_si128(c2[6194],simde_mm_xor_si128(c2[9361],simde_mm_xor_si128(c2[12579],simde_mm_xor_si128(c2[2283],simde_mm_xor_si128(c2[3075],simde_mm_xor_si128(c2[3072],simde_mm_xor_si128(c2[6239],simde_mm_xor_si128(c2[12578],simde_mm_xor_si128(c2[3074],simde_mm_xor_si128(c2[727],simde_mm_xor_si128(c2[3102],simde_mm_xor_si128(c2[3894],simde_mm_xor_si128(c2[1529],simde_mm_xor_si128(c2[4696],simde_mm_xor_si128(c2[7065],simde_mm_xor_si128(c2[10232],simde_mm_xor_si128(c2[10260],simde_mm_xor_si128(c2[12653],simde_mm_xor_si128(c2[756],simde_mm_xor_si128(c2[11853],simde_mm_xor_si128(c2[2349],simde_mm_xor_si128(c2[3141],simde_mm_xor_si128(c2[6308],c2[1565])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 12
+     d2[216]=simde_mm_xor_si128(c2[806],simde_mm_xor_si128(c2[1630],simde_mm_xor_si128(c2[3543],simde_mm_xor_si128(c2[5949],simde_mm_xor_si128(c2[11570],c2[3039])))));
+
+//row: 13
+     d2[234]=simde_mm_xor_si128(c2[5559],simde_mm_xor_si128(c2[6351],simde_mm_xor_si128(c2[6349],simde_mm_xor_si128(c2[5544],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[5558],simde_mm_xor_si128(c2[7970],simde_mm_xor_si128(c2[8762],simde_mm_xor_si128(c2[9552],simde_mm_xor_si128(c2[4799],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[878],simde_mm_xor_si128(c2[5628],simde_mm_xor_si128(c2[880],simde_mm_xor_si128(c2[11209],simde_mm_xor_si128(c2[12001],simde_mm_xor_si128(c2[3292],simde_mm_xor_si128(c2[8832],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[4104],simde_mm_xor_si128(c2[8074],simde_mm_xor_si128(c2[945],simde_mm_xor_si128(c2[12075],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[4152],simde_mm_xor_si128(c2[6533],simde_mm_xor_si128(c2[6554],simde_mm_xor_si128(c2[7346],simde_mm_xor_si128(c2[12110],simde_mm_xor_si128(c2[5767],simde_mm_xor_si128(c2[1051],simde_mm_xor_si128(c2[12147],simde_mm_xor_si128(c2[7390],simde_mm_xor_si128(c2[6601],simde_mm_xor_si128(c2[12182],simde_mm_xor_si128(c2[3462],simde_mm_xor_si128(c2[8222],simde_mm_xor_si128(c2[1120],simde_mm_xor_si128(c2[1912],simde_mm_xor_si128(c2[8253],simde_mm_xor_si128(c2[4286],simde_mm_xor_si128(c2[3539],simde_mm_xor_si128(c2[4331],simde_mm_xor_si128(c2[6698],simde_mm_xor_si128(c2[11452],simde_mm_xor_si128(c2[6741],simde_mm_xor_si128(c2[7533],simde_mm_xor_si128(c2[1989],simde_mm_xor_si128(c2[5952],simde_mm_xor_si128(c2[9951],simde_mm_xor_si128(c2[10743],simde_mm_xor_si128(c2[3603],simde_mm_xor_si128(c2[11525],simde_mm_xor_si128(c2[6021],simde_mm_xor_si128(c2[6813],simde_mm_xor_si128(c2[12354],simde_mm_xor_si128(c2[7600],simde_mm_xor_si128(c2[10806],simde_mm_xor_si128(c2[8440],simde_mm_xor_si128(c2[6849],simde_mm_xor_si128(c2[10852],simde_mm_xor_si128(c2[11644],simde_mm_xor_si128(c2[10847],simde_mm_xor_si128(c2[4502],simde_mm_xor_si128(c2[3755],simde_mm_xor_si128(c2[4547],simde_mm_xor_si128(c2[2964],simde_mm_xor_si128(c2[6137],simde_mm_xor_si128(c2[6959],simde_mm_xor_si128(c2[10923],simde_mm_xor_si128(c2[8544],simde_mm_xor_si128(c2[3032],simde_mm_xor_si128(c2[3824],simde_mm_xor_si128(c2[2233],simde_mm_xor_si128(c2[4610],simde_mm_xor_si128(c2[10203],simde_mm_xor_si128(c2[10995],simde_mm_xor_si128(c2[1488],simde_mm_xor_si128(c2[10994],simde_mm_xor_si128(c2[11022],simde_mm_xor_si128(c2[11814],simde_mm_xor_si128(c2[12616],simde_mm_xor_si128(c2[5481],simde_mm_xor_si128(c2[12602],simde_mm_xor_si128(c2[7884],simde_mm_xor_si128(c2[8676],simde_mm_xor_si128(c2[10269],c2[1557])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 14
+     d2[252]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[8363],simde_mm_xor_si128(c2[10851],simde_mm_xor_si128(c2[577],simde_mm_xor_si128(c2[612],c2[12640])))));
+
+//row: 15
+     d2[270]=simde_mm_xor_si128(c2[3185],simde_mm_xor_si128(c2[3183],simde_mm_xor_si128(c2[2378],simde_mm_xor_si128(c2[8727],simde_mm_xor_si128(c2[9519],simde_mm_xor_si128(c2[7930],simde_mm_xor_si128(c2[5596],simde_mm_xor_si128(c2[6386],simde_mm_xor_si128(c2[841],simde_mm_xor_si128(c2[1633],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[10383],simde_mm_xor_si128(c2[2462],simde_mm_xor_si128(c2[10385],simde_mm_xor_si128(c2[8835],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[4874],simde_mm_xor_si128(c2[5666],simde_mm_xor_si128(c2[938],simde_mm_xor_si128(c2[4908],simde_mm_xor_si128(c2[9658],simde_mm_xor_si128(c2[10450],simde_mm_xor_si128(c2[9701],simde_mm_xor_si128(c2[986],simde_mm_xor_si128(c2[3349],simde_mm_xor_si128(c2[4180],simde_mm_xor_si128(c2[8944],simde_mm_xor_si128(c2[1809],simde_mm_xor_si128(c2[2601],simde_mm_xor_si128(c2[10556],simde_mm_xor_si128(c2[8981],simde_mm_xor_si128(c2[3432],simde_mm_xor_si128(c2[4224],simde_mm_xor_si128(c2[9016],simde_mm_xor_si128(c2[296],simde_mm_xor_si128(c2[4264],simde_mm_xor_si128(c2[5056],simde_mm_xor_si128(c2[11417],simde_mm_xor_si128(c2[5087],simde_mm_xor_si128(c2[1120],simde_mm_xor_si128(c2[1165],simde_mm_xor_si128(c2[3532],simde_mm_xor_si128(c2[7494],simde_mm_xor_si128(c2[8286],simde_mm_xor_si128(c2[5915],simde_mm_xor_si128(c2[4367],simde_mm_xor_si128(c2[11494],simde_mm_xor_si128(c2[1994],simde_mm_xor_si128(c2[2786],simde_mm_xor_si128(c2[7577],simde_mm_xor_si128(c2[437],simde_mm_xor_si128(c2[7567],simde_mm_xor_si128(c2[8359],simde_mm_xor_si128(c2[3647],simde_mm_xor_si128(c2[9188],simde_mm_xor_si128(c2[3642],simde_mm_xor_si128(c2[4434],simde_mm_xor_si128(c2[9978],simde_mm_xor_si128(c2[7640],simde_mm_xor_si128(c2[5256],simde_mm_xor_si128(c2[2891],simde_mm_xor_si128(c2[3683],simde_mm_xor_si128(c2[8460],simde_mm_xor_si128(c2[7681],simde_mm_xor_si128(c2[1336],simde_mm_xor_si128(c2[1381],simde_mm_xor_si128(c2[12469],simde_mm_xor_si128(c2[2161],simde_mm_xor_si128(c2[2953],simde_mm_xor_si128(c2[3793],simde_mm_xor_si128(c2[7757],simde_mm_xor_si128(c2[4586],simde_mm_xor_si128(c2[5378],simde_mm_xor_si128(c2[658],simde_mm_xor_si128(c2[11738],simde_mm_xor_si128(c2[652],simde_mm_xor_si128(c2[1444],simde_mm_xor_si128(c2[3820],simde_mm_xor_si128(c2[7829],simde_mm_xor_si128(c2[10993],simde_mm_xor_si128(c2[7828],simde_mm_xor_si128(c2[8648],simde_mm_xor_si128(c2[9432],simde_mm_xor_si128(c2[1523],simde_mm_xor_si128(c2[2315],simde_mm_xor_si128(c2[5510],simde_mm_xor_si128(c2[7103],simde_mm_xor_si128(c2[10270],c2[11062]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 16
+     d2[288]=simde_mm_xor_si128(c2[2389],simde_mm_xor_si128(c2[2387],simde_mm_xor_si128(c2[1600],simde_mm_xor_si128(c2[8723],simde_mm_xor_si128(c2[4800],simde_mm_xor_si128(c2[5590],simde_mm_xor_si128(c2[837],simde_mm_xor_si128(c2[11140],simde_mm_xor_si128(c2[9587],simde_mm_xor_si128(c2[1666],simde_mm_xor_si128(c2[9589],simde_mm_xor_si128(c2[8039],simde_mm_xor_si128(c2[12001],simde_mm_xor_si128(c2[4870],simde_mm_xor_si128(c2[10404],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[4112],simde_mm_xor_si128(c2[9654],simde_mm_xor_si128(c2[8905],simde_mm_xor_si128(c2[190],simde_mm_xor_si128(c2[2571],simde_mm_xor_si128(c2[3384],simde_mm_xor_si128(c2[8148],simde_mm_xor_si128(c2[1805],simde_mm_xor_si128(c2[9760],simde_mm_xor_si128(c2[8185],simde_mm_xor_si128(c2[3428],simde_mm_xor_si128(c2[8220],simde_mm_xor_si128(c2[12171],simde_mm_xor_si128(c2[4260],simde_mm_xor_si128(c2[10621],simde_mm_xor_si128(c2[4291],simde_mm_xor_si128(c2[324],simde_mm_xor_si128(c2[369],simde_mm_xor_si128(c2[2736],simde_mm_xor_si128(c2[7490],simde_mm_xor_si128(c2[3571],simde_mm_xor_si128(c2[10698],simde_mm_xor_si128(c2[1990],simde_mm_xor_si128(c2[2778],simde_mm_xor_si128(c2[6781],simde_mm_xor_si128(c2[12312],simde_mm_xor_si128(c2[7563],simde_mm_xor_si128(c2[2851],simde_mm_xor_si128(c2[8392],simde_mm_xor_si128(c2[3638],simde_mm_xor_si128(c2[6844],simde_mm_xor_si128(c2[4478],simde_mm_xor_si128(c2[2887],simde_mm_xor_si128(c2[7682],simde_mm_xor_si128(c2[6885],simde_mm_xor_si128(c2[540],simde_mm_xor_si128(c2[585],simde_mm_xor_si128(c2[11673],simde_mm_xor_si128(c2[2175],simde_mm_xor_si128(c2[2997],simde_mm_xor_si128(c2[6961],simde_mm_xor_si128(c2[4582],simde_mm_xor_si128(c2[12533],simde_mm_xor_si128(c2[10960],simde_mm_xor_si128(c2[648],simde_mm_xor_si128(c2[7033],simde_mm_xor_si128(c2[10197],simde_mm_xor_si128(c2[7032],simde_mm_xor_si128(c2[7852],simde_mm_xor_si128(c2[8654],simde_mm_xor_si128(c2[1519],simde_mm_xor_si128(c2[5475],simde_mm_xor_si128(c2[4732],simde_mm_xor_si128(c2[6307],c2[10266]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 17
+     d2[306]=simde_mm_xor_si128(c2[7137],simde_mm_xor_si128(c2[7640],simde_mm_xor_si128(c2[576],simde_mm_xor_si128(c2[612],c2[2350]))));
+
+//row: 18
+     d2[324]=simde_mm_xor_si128(c2[838],simde_mm_xor_si128(c2[5985],simde_mm_xor_si128(c2[468],simde_mm_xor_si128(c2[1455],c2[693]))));
+
+//row: 19
+     d2[342]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[5011],simde_mm_xor_si128(c2[297],c2[360]))));
+
+//row: 20
+     d2[360]=simde_mm_xor_si128(c2[5553],simde_mm_xor_si128(c2[5551],simde_mm_xor_si128(c2[4764],simde_mm_xor_si128(c2[11887],simde_mm_xor_si128(c2[7144],simde_mm_xor_si128(c2[7964],simde_mm_xor_si128(c2[8754],simde_mm_xor_si128(c2[4001],simde_mm_xor_si128(c2[80],simde_mm_xor_si128(c2[4830],simde_mm_xor_si128(c2[82],simde_mm_xor_si128(c2[11203],simde_mm_xor_si128(c2[2494],simde_mm_xor_si128(c2[8034],simde_mm_xor_si128(c2[905],simde_mm_xor_si128(c2[3324],simde_mm_xor_si128(c2[7276],simde_mm_xor_si128(c2[147],simde_mm_xor_si128(c2[12069],simde_mm_xor_si128(c2[3354],simde_mm_xor_si128(c2[5735],simde_mm_xor_si128(c2[6566],simde_mm_xor_si128(c2[11312],simde_mm_xor_si128(c2[4969],simde_mm_xor_si128(c2[253],simde_mm_xor_si128(c2[11349],simde_mm_xor_si128(c2[6592],simde_mm_xor_si128(c2[11384],simde_mm_xor_si128(c2[2664],simde_mm_xor_si128(c2[7424],simde_mm_xor_si128(c2[1132],simde_mm_xor_si128(c2[7455],simde_mm_xor_si128(c2[3506],simde_mm_xor_si128(c2[8249],simde_mm_xor_si128(c2[3533],simde_mm_xor_si128(c2[5918],simde_mm_xor_si128(c2[10672],simde_mm_xor_si128(c2[6735],simde_mm_xor_si128(c2[1191],simde_mm_xor_si128(c2[5154],simde_mm_xor_si128(c2[405],simde_mm_xor_si128(c2[9945],simde_mm_xor_si128(c2[2823],simde_mm_xor_si128(c2[10745],simde_mm_xor_si128(c2[6015],simde_mm_xor_si128(c2[11556],simde_mm_xor_si128(c2[6820],simde_mm_xor_si128(c2[10008],simde_mm_xor_si128(c2[7642],simde_mm_xor_si128(c2[6051],simde_mm_xor_si128(c2[10846],simde_mm_xor_si128(c2[10049],simde_mm_xor_si128(c2[3722],simde_mm_xor_si128(c2[3749],simde_mm_xor_si128(c2[2166],simde_mm_xor_si128(c2[5339],simde_mm_xor_si128(c2[6161],simde_mm_xor_si128(c2[10125],simde_mm_xor_si128(c2[7746],simde_mm_xor_si128(c2[3026],simde_mm_xor_si128(c2[1453],simde_mm_xor_si128(c2[3830],simde_mm_xor_si128(c2[10197],simde_mm_xor_si128(c2[690],simde_mm_xor_si128(c2[10196],simde_mm_xor_si128(c2[11016],simde_mm_xor_si128(c2[11818],simde_mm_xor_si128(c2[4683],simde_mm_xor_si128(c2[7896],simde_mm_xor_si128(c2[9471],c2[759]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 21
+     d2[378]=simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[11270],simde_mm_xor_si128(c2[8512],simde_mm_xor_si128(c2[7848],c2[2341]))));
+
+//row: 22
+     d2[396]=simde_mm_xor_si128(c2[6340],simde_mm_xor_si128(c2[10739],simde_mm_xor_si128(c2[6808],c2[1420])));
+
+//row: 23
+     d2[414]=simde_mm_xor_si128(c2[3215],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[360],c2[4618])));
+
+//row: 24
+     d2[432]=simde_mm_xor_si128(c2[2389],simde_mm_xor_si128(c2[2387],simde_mm_xor_si128(c2[1600],simde_mm_xor_si128(c2[8723],simde_mm_xor_si128(c2[9518],simde_mm_xor_si128(c2[4800],simde_mm_xor_si128(c2[5590],simde_mm_xor_si128(c2[837],simde_mm_xor_si128(c2[9587],simde_mm_xor_si128(c2[1666],simde_mm_xor_si128(c2[9589],simde_mm_xor_si128(c2[8039],simde_mm_xor_si128(c2[12001],simde_mm_xor_si128(c2[4870],simde_mm_xor_si128(c2[5664],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[4112],simde_mm_xor_si128(c2[9654],simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[8905],simde_mm_xor_si128(c2[190],simde_mm_xor_si128(c2[2571],simde_mm_xor_si128(c2[3384],simde_mm_xor_si128(c2[8148],simde_mm_xor_si128(c2[1805],simde_mm_xor_si128(c2[9760],simde_mm_xor_si128(c2[8185],simde_mm_xor_si128(c2[3428],simde_mm_xor_si128(c2[8220],simde_mm_xor_si128(c2[12171],simde_mm_xor_si128(c2[4260],simde_mm_xor_si128(c2[10621],simde_mm_xor_si128(c2[4291],simde_mm_xor_si128(c2[324],simde_mm_xor_si128(c2[369],simde_mm_xor_si128(c2[2736],simde_mm_xor_si128(c2[7490],simde_mm_xor_si128(c2[3571],simde_mm_xor_si128(c2[10698],simde_mm_xor_si128(c2[1990],simde_mm_xor_si128(c2[8332],simde_mm_xor_si128(c2[6781],simde_mm_xor_si128(c2[12312],simde_mm_xor_si128(c2[7563],simde_mm_xor_si128(c2[2851],simde_mm_xor_si128(c2[8392],simde_mm_xor_si128(c2[3638],simde_mm_xor_si128(c2[6844],simde_mm_xor_si128(c2[4478],simde_mm_xor_si128(c2[2887],simde_mm_xor_si128(c2[7682],simde_mm_xor_si128(c2[6885],simde_mm_xor_si128(c2[540],simde_mm_xor_si128(c2[585],simde_mm_xor_si128(c2[11673],simde_mm_xor_si128(c2[2175],simde_mm_xor_si128(c2[2997],simde_mm_xor_si128(c2[6961],simde_mm_xor_si128(c2[4582],simde_mm_xor_si128(c2[12533],simde_mm_xor_si128(c2[10960],simde_mm_xor_si128(c2[648],simde_mm_xor_si128(c2[7033],simde_mm_xor_si128(c2[10197],simde_mm_xor_si128(c2[7032],simde_mm_xor_si128(c2[7852],simde_mm_xor_si128(c2[8654],simde_mm_xor_si128(c2[1519],simde_mm_xor_si128(c2[4732],simde_mm_xor_si128(c2[6307],c2[10266]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 25
+     d2[450]=simde_mm_xor_si128(c2[10344],simde_mm_xor_si128(c2[216],simde_mm_xor_si128(c2[252],c2[6059])));
+
+//row: 26
+     d2[468]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[144],c2[4517])));
+
+//row: 27
+     d2[486]=simde_mm_xor_si128(c2[10334],simde_mm_xor_si128(c2[3386],c2[6628]));
+
+//row: 28
+     d2[504]=simde_mm_xor_si128(c2[2393],simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[9405],c2[11847])));
+
+//row: 29
+     d2[522]=simde_mm_xor_si128(c2[11103],simde_mm_xor_si128(c2[11101],simde_mm_xor_si128(c2[10296],simde_mm_xor_si128(c2[3974],simde_mm_xor_si128(c2[4766],simde_mm_xor_si128(c2[843],simde_mm_xor_si128(c2[1633],simde_mm_xor_si128(c2[8759],simde_mm_xor_si128(c2[9551],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[5630],simde_mm_xor_si128(c2[10380],simde_mm_xor_si128(c2[5632],simde_mm_xor_si128(c2[4082],simde_mm_xor_si128(c2[8044],simde_mm_xor_si128(c2[121],simde_mm_xor_si128(c2[913],simde_mm_xor_si128(c2[8856],simde_mm_xor_si128(c2[155],simde_mm_xor_si128(c2[4905],simde_mm_xor_si128(c2[5697],simde_mm_xor_si128(c2[4948],simde_mm_xor_si128(c2[8904],simde_mm_xor_si128(c2[11285],simde_mm_xor_si128(c2[12098],simde_mm_xor_si128(c2[4191],simde_mm_xor_si128(c2[9727],simde_mm_xor_si128(c2[10519],simde_mm_xor_si128(c2[5803],simde_mm_xor_si128(c2[4228],simde_mm_xor_si128(c2[11350],simde_mm_xor_si128(c2[12142],simde_mm_xor_si128(c2[4263],simde_mm_xor_si128(c2[8214],simde_mm_xor_si128(c2[12182],simde_mm_xor_si128(c2[303],simde_mm_xor_si128(c2[6664],simde_mm_xor_si128(c2[334],simde_mm_xor_si128(c2[9038],simde_mm_xor_si128(c2[9083],simde_mm_xor_si128(c2[11450],simde_mm_xor_si128(c2[2741],simde_mm_xor_si128(c2[3533],simde_mm_xor_si128(c2[12285],simde_mm_xor_si128(c2[6741],simde_mm_xor_si128(c2[9912],simde_mm_xor_si128(c2[10704],simde_mm_xor_si128(c2[2824],simde_mm_xor_si128(c2[8355],simde_mm_xor_si128(c2[2814],simde_mm_xor_si128(c2[3606],simde_mm_xor_si128(c2[11565],simde_mm_xor_si128(c2[4435],simde_mm_xor_si128(c2[11560],simde_mm_xor_si128(c2[12352],simde_mm_xor_si128(c2[2887],simde_mm_xor_si128(c2[521],simde_mm_xor_si128(c2[10809],simde_mm_xor_si128(c2[11601],simde_mm_xor_si128(c2[3683],simde_mm_xor_si128(c2[3725],simde_mm_xor_si128(c2[2928],simde_mm_xor_si128(c2[9254],simde_mm_xor_si128(c2[9299],simde_mm_xor_si128(c2[7716],simde_mm_xor_si128(c2[10097],simde_mm_xor_si128(c2[10889],simde_mm_xor_si128(c2[11711],simde_mm_xor_si128(c2[3004],simde_mm_xor_si128(c2[12504],simde_mm_xor_si128(c2[625],simde_mm_xor_si128(c2[8576],simde_mm_xor_si128(c2[6985],simde_mm_xor_si128(c2[8570],simde_mm_xor_si128(c2[9362],simde_mm_xor_si128(c2[648],simde_mm_xor_si128(c2[3076],simde_mm_xor_si128(c2[6240],simde_mm_xor_si128(c2[3075],simde_mm_xor_si128(c2[3895],simde_mm_xor_si128(c2[4697],simde_mm_xor_si128(c2[9441],simde_mm_xor_si128(c2[10233],simde_mm_xor_si128(c2[757],simde_mm_xor_si128(c2[2350],simde_mm_xor_si128(c2[5517],c2[6309]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 30
+     d2[540]=simde_mm_xor_si128(c2[798],simde_mm_xor_si128(c2[796],simde_mm_xor_si128(c2[11888],simde_mm_xor_si128(c2[9],simde_mm_xor_si128(c2[6340],simde_mm_xor_si128(c2[7132],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[3209],simde_mm_xor_si128(c2[3207],simde_mm_xor_si128(c2[3999],simde_mm_xor_si128(c2[11125],simde_mm_xor_si128(c2[11917],simde_mm_xor_si128(c2[7996],simde_mm_xor_si128(c2[75],simde_mm_xor_si128(c2[7206],simde_mm_xor_si128(c2[7998],simde_mm_xor_si128(c2[6448],simde_mm_xor_si128(c2[10410],simde_mm_xor_si128(c2[2487],simde_mm_xor_si128(c2[3279],simde_mm_xor_si128(c2[11240],simde_mm_xor_si128(c2[1729],simde_mm_xor_si128(c2[2521],simde_mm_xor_si128(c2[7289],simde_mm_xor_si128(c2[8081],simde_mm_xor_si128(c2[7314],simde_mm_xor_si128(c2[11270],simde_mm_xor_si128(c2[188],simde_mm_xor_si128(c2[980],simde_mm_xor_si128(c2[1811],simde_mm_xor_si128(c2[5765],simde_mm_xor_si128(c2[6557],simde_mm_xor_si128(c2[12111],simde_mm_xor_si128(c2[232],simde_mm_xor_si128(c2[8187],simde_mm_xor_si128(c2[5802],simde_mm_xor_si128(c2[6594],simde_mm_xor_si128(c2[1045],simde_mm_xor_si128(c2[1837],simde_mm_xor_si128(c2[6629],simde_mm_xor_si128(c2[9806],simde_mm_xor_si128(c2[10598],simde_mm_xor_si128(c2[1877],simde_mm_xor_si128(c2[2669],simde_mm_xor_si128(c2[9048],simde_mm_xor_si128(c2[2700],simde_mm_xor_si128(c2[10630],simde_mm_xor_si128(c2[11422],simde_mm_xor_si128(c2[11449],simde_mm_xor_si128(c2[371],simde_mm_xor_si128(c2[1163],simde_mm_xor_si128(c2[5125],simde_mm_xor_si128(c2[5917],simde_mm_xor_si128(c2[8285],simde_mm_xor_si128(c2[1980],simde_mm_xor_si128(c2[9125],simde_mm_xor_si128(c2[12278],simde_mm_xor_si128(c2[399],simde_mm_xor_si128(c2[5190],simde_mm_xor_si128(c2[10739],simde_mm_xor_si128(c2[5198],simde_mm_xor_si128(c2[5990],simde_mm_xor_si128(c2[1260],simde_mm_xor_si128(c2[6027],simde_mm_xor_si128(c2[6819],simde_mm_xor_si128(c2[1273],simde_mm_xor_si128(c2[2065],simde_mm_xor_si128(c2[9987],simde_mm_xor_si128(c2[5271],simde_mm_xor_si128(c2[2095],simde_mm_xor_si128(c2[2887],simde_mm_xor_si128(c2[504],simde_mm_xor_si128(c2[1296],simde_mm_xor_si128(c2[6091],simde_mm_xor_si128(c2[5294],simde_mm_xor_si128(c2[10846],simde_mm_xor_si128(c2[11638],simde_mm_xor_si128(c2[11665],simde_mm_xor_si128(c2[10082],simde_mm_xor_si128(c2[12463],simde_mm_xor_si128(c2[584],simde_mm_xor_si128(c2[1406],simde_mm_xor_si128(c2[4578],simde_mm_xor_si128(c2[5370],simde_mm_xor_si128(c2[2199],simde_mm_xor_si128(c2[2991],simde_mm_xor_si128(c2[10960],simde_mm_xor_si128(c2[8577],simde_mm_xor_si128(c2[9369],simde_mm_xor_si128(c2[10954],simde_mm_xor_si128(c2[11746],simde_mm_xor_si128(c2[5442],simde_mm_xor_si128(c2[8606],simde_mm_xor_si128(c2[4649],simde_mm_xor_si128(c2[5441],simde_mm_xor_si128(c2[6279],simde_mm_xor_si128(c2[6271],simde_mm_xor_si128(c2[7063],simde_mm_xor_si128(c2[11825],simde_mm_xor_si128(c2[12617],simde_mm_xor_si128(c2[3141],simde_mm_xor_si128(c2[4716],simde_mm_xor_si128(c2[7901],c2[8693])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 31
+     d2[558]=simde_mm_xor_si128(c2[7133],simde_mm_xor_si128(c2[5558],simde_mm_xor_si128(c2[7131],simde_mm_xor_si128(c2[5556],simde_mm_xor_si128(c2[6344],simde_mm_xor_si128(c2[4769],simde_mm_xor_si128(c2[796],simde_mm_xor_si128(c2[11100],simde_mm_xor_si128(c2[11892],simde_mm_xor_si128(c2[9544],simde_mm_xor_si128(c2[7969],simde_mm_xor_si128(c2[10334],simde_mm_xor_si128(c2[8759],simde_mm_xor_si128(c2[5581],simde_mm_xor_si128(c2[3214],simde_mm_xor_si128(c2[4006],simde_mm_xor_si128(c2[45],simde_mm_xor_si128(c2[1660],simde_mm_xor_si128(c2[85],simde_mm_xor_si128(c2[6410],simde_mm_xor_si128(c2[4835],simde_mm_xor_si128(c2[1662],simde_mm_xor_si128(c2[87],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[11208],simde_mm_xor_si128(c2[4074],simde_mm_xor_si128(c2[2499],simde_mm_xor_si128(c2[9614],simde_mm_xor_si128(c2[7247],simde_mm_xor_si128(c2[8039],simde_mm_xor_si128(c2[4904],simde_mm_xor_si128(c2[3329],simde_mm_xor_si128(c2[8856],simde_mm_xor_si128(c2[7281],simde_mm_xor_si128(c2[1745],simde_mm_xor_si128(c2[12031],simde_mm_xor_si128(c2[152],simde_mm_xor_si128(c2[978],simde_mm_xor_si128(c2[12074],simde_mm_xor_si128(c2[4934],simde_mm_xor_si128(c2[3359],simde_mm_xor_si128(c2[7315],simde_mm_xor_si128(c2[5740],simde_mm_xor_si128(c2[8146],simde_mm_xor_si128(c2[6553],simde_mm_xor_si128(c2[221],simde_mm_xor_si128(c2[11317],simde_mm_xor_si128(c2[6567],simde_mm_xor_si128(c2[4182],simde_mm_xor_si128(c2[4974],simde_mm_xor_si128(c2[1851],simde_mm_xor_si128(c2[258],simde_mm_xor_si128(c2[258],simde_mm_xor_si128(c2[11354],simde_mm_xor_si128(c2[8172],simde_mm_xor_si128(c2[5805],simde_mm_xor_si128(c2[6597],simde_mm_xor_si128(c2[261],simde_mm_xor_si128(c2[293],simde_mm_xor_si128(c2[11389],simde_mm_xor_si128(c2[4262],simde_mm_xor_si128(c2[2669],simde_mm_xor_si128(c2[9004],simde_mm_xor_si128(c2[6637],simde_mm_xor_si128(c2[7429],simde_mm_xor_si128(c2[2712],simde_mm_xor_si128(c2[1119],simde_mm_xor_si128(c2[9053],simde_mm_xor_si128(c2[7460],simde_mm_xor_si128(c2[5086],simde_mm_xor_si128(c2[3493],simde_mm_xor_si128(c2[5113],simde_mm_xor_si128(c2[3538],simde_mm_xor_si128(c2[7498],simde_mm_xor_si128(c2[5905],simde_mm_xor_si128(c2[12252],simde_mm_xor_si128(c2[9867],simde_mm_xor_si128(c2[10659],simde_mm_xor_si128(c2[8333],simde_mm_xor_si128(c2[6740],simde_mm_xor_si128(c2[2789],simde_mm_xor_si128(c2[1196],simde_mm_xor_si128(c2[6734],simde_mm_xor_si128(c2[4367],simde_mm_xor_si128(c2[5159],simde_mm_xor_si128(c2[11525],simde_mm_xor_si128(c2[9950],simde_mm_xor_si128(c2[4403],simde_mm_xor_si128(c2[2810],simde_mm_xor_si128(c2[12325],simde_mm_xor_si128(c2[9940],simde_mm_xor_si128(c2[10732],simde_mm_xor_si128(c2[7613],simde_mm_xor_si128(c2[6020],simde_mm_xor_si128(c2[483],simde_mm_xor_si128(c2[11561],simde_mm_xor_si128(c2[8400],simde_mm_xor_si128(c2[6015],simde_mm_xor_si128(c2[6807],simde_mm_xor_si128(c2[11606],simde_mm_xor_si128(c2[10013],simde_mm_xor_si128(c2[9222],simde_mm_xor_si128(c2[7647],simde_mm_xor_si128(c2[7649],simde_mm_xor_si128(c2[5264],simde_mm_xor_si128(c2[6056],simde_mm_xor_si128(c2[12426],simde_mm_xor_si128(c2[10851],simde_mm_xor_si128(c2[11629],simde_mm_xor_si128(c2[10054],simde_mm_xor_si128(c2[5302],simde_mm_xor_si128(c2[3709],simde_mm_xor_si128(c2[5329],simde_mm_xor_si128(c2[3754],simde_mm_xor_si128(c2[3746],simde_mm_xor_si128(c2[2171],simde_mm_xor_si128(c2[6919],simde_mm_xor_si128(c2[4552],simde_mm_xor_si128(c2[5344],simde_mm_xor_si128(c2[7741],simde_mm_xor_si128(c2[6166],simde_mm_xor_si128(c2[11705],simde_mm_xor_si128(c2[10130],simde_mm_xor_si128(c2[9326],simde_mm_xor_si128(c2[6959],simde_mm_xor_si128(c2[7751],simde_mm_xor_si128(c2[4624],simde_mm_xor_si128(c2[3031],simde_mm_xor_si128(c2[3033],simde_mm_xor_si128(c2[1440],simde_mm_xor_si128(c2[5410],simde_mm_xor_si128(c2[3025],simde_mm_xor_si128(c2[3817],simde_mm_xor_si128(c2[11777],simde_mm_xor_si128(c2[10202],simde_mm_xor_si128(c2[2270],simde_mm_xor_si128(c2[695],simde_mm_xor_si128(c2[11776],simde_mm_xor_si128(c2[10201],simde_mm_xor_si128(c2[12614],simde_mm_xor_si128(c2[11021],simde_mm_xor_si128(c2[727],simde_mm_xor_si128(c2[11823],simde_mm_xor_si128(c2[6281],simde_mm_xor_si128(c2[3896],simde_mm_xor_si128(c2[4688],simde_mm_xor_si128(c2[9476],simde_mm_xor_si128(c2[7901],simde_mm_xor_si128(c2[11069],simde_mm_xor_si128(c2[9476],simde_mm_xor_si128(c2[2357],simde_mm_xor_si128(c2[12643],c2[764]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 32
+     d2[576]=simde_mm_xor_si128(c2[4757],simde_mm_xor_si128(c2[4755],simde_mm_xor_si128(c2[3176],simde_mm_xor_si128(c2[3968],simde_mm_xor_si128(c2[10299],simde_mm_xor_si128(c2[11091],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[7168],simde_mm_xor_si128(c2[7166],simde_mm_xor_si128(c2[7958],simde_mm_xor_si128(c2[2413],simde_mm_xor_si128(c2[3205],simde_mm_xor_si128(c2[11955],simde_mm_xor_si128(c2[4034],simde_mm_xor_si128(c2[11165],simde_mm_xor_si128(c2[11957],simde_mm_xor_si128(c2[10407],simde_mm_xor_si128(c2[1698],simde_mm_xor_si128(c2[6446],simde_mm_xor_si128(c2[7238],simde_mm_xor_si128(c2[2528],simde_mm_xor_si128(c2[5688],simde_mm_xor_si128(c2[6480],simde_mm_xor_si128(c2[11248],simde_mm_xor_si128(c2[12040],simde_mm_xor_si128(c2[11273],simde_mm_xor_si128(c2[2558],simde_mm_xor_si128(c2[4147],simde_mm_xor_si128(c2[4939],simde_mm_xor_si128(c2[5770],simde_mm_xor_si128(c2[9724],simde_mm_xor_si128(c2[10516],simde_mm_xor_si128(c2[3399],simde_mm_xor_si128(c2[4191],simde_mm_xor_si128(c2[12146],simde_mm_xor_si128(c2[9761],simde_mm_xor_si128(c2[10553],simde_mm_xor_si128(c2[5004],simde_mm_xor_si128(c2[5796],simde_mm_xor_si128(c2[10588],simde_mm_xor_si128(c2[1094],simde_mm_xor_si128(c2[1886],simde_mm_xor_si128(c2[5836],simde_mm_xor_si128(c2[6628],simde_mm_xor_si128(c2[336],simde_mm_xor_si128(c2[6677],simde_mm_xor_si128(c2[1918],simde_mm_xor_si128(c2[2710],simde_mm_xor_si128(c2[2737],simde_mm_xor_si128(c2[4330],simde_mm_xor_si128(c2[5122],simde_mm_xor_si128(c2[9084],simde_mm_xor_si128(c2[9876],simde_mm_xor_si128(c2[5957],simde_mm_xor_si128(c2[413],simde_mm_xor_si128(c2[3566],simde_mm_xor_si128(c2[4358],simde_mm_xor_si128(c2[9149],simde_mm_xor_si128(c2[2027],simde_mm_xor_si128(c2[9157],simde_mm_xor_si128(c2[9949],simde_mm_xor_si128(c2[2821],simde_mm_xor_si128(c2[5237],simde_mm_xor_si128(c2[9986],simde_mm_xor_si128(c2[10778],simde_mm_xor_si128(c2[5232],simde_mm_xor_si128(c2[6024],simde_mm_xor_si128(c2[9230],simde_mm_xor_si128(c2[6054],simde_mm_xor_si128(c2[6846],simde_mm_xor_si128(c2[4481],simde_mm_xor_si128(c2[5273],simde_mm_xor_si128(c2[3674],simde_mm_xor_si128(c2[10050],simde_mm_xor_si128(c2[9253],simde_mm_xor_si128(c2[2134],simde_mm_xor_si128(c2[2926],simde_mm_xor_si128(c2[2953],simde_mm_xor_si128(c2[1370],simde_mm_xor_si128(c2[3751],simde_mm_xor_si128(c2[4543],simde_mm_xor_si128(c2[5365],simde_mm_xor_si128(c2[8537],simde_mm_xor_si128(c2[9329],simde_mm_xor_si128(c2[6158],simde_mm_xor_si128(c2[6950],simde_mm_xor_si128(c2[2248],simde_mm_xor_si128(c2[12536],simde_mm_xor_si128(c2[657],simde_mm_xor_si128(c2[2242],simde_mm_xor_si128(c2[3034],simde_mm_xor_si128(c2[9401],simde_mm_xor_si128(c2[12565],simde_mm_xor_si128(c2[8608],simde_mm_xor_si128(c2[9400],simde_mm_xor_si128(c2[10238],simde_mm_xor_si128(c2[10230],simde_mm_xor_si128(c2[11022],simde_mm_xor_si128(c2[3113],simde_mm_xor_si128(c2[3905],simde_mm_xor_si128(c2[7100],simde_mm_xor_si128(c2[8693],simde_mm_xor_si128(c2[11860],c2[12652])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 33
+     d2[594]=simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[9904],c2[2341])));
+
+//row: 34
+     d2[612]=simde_mm_xor_si128(c2[3972],simde_mm_xor_si128(c2[252],simde_mm_xor_si128(c2[10050],c2[612])));
+
+//row: 35
+     d2[630]=simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[6],simde_mm_xor_si128(c2[11890],simde_mm_xor_si128(c2[6342],simde_mm_xor_si128(c2[2419],simde_mm_xor_si128(c2[3209],simde_mm_xor_si128(c2[11127],simde_mm_xor_si128(c2[5592],simde_mm_xor_si128(c2[7206],simde_mm_xor_si128(c2[11956],simde_mm_xor_si128(c2[7208],simde_mm_xor_si128(c2[5658],simde_mm_xor_si128(c2[9620],simde_mm_xor_si128(c2[2489],simde_mm_xor_si128(c2[10450],simde_mm_xor_si128(c2[1731],simde_mm_xor_si128(c2[7273],simde_mm_xor_si128(c2[6524],simde_mm_xor_si128(c2[10480],simde_mm_xor_si128(c2[190],simde_mm_xor_si128(c2[1021],simde_mm_xor_si128(c2[5767],simde_mm_xor_si128(c2[12113],simde_mm_xor_si128(c2[4985],simde_mm_xor_si128(c2[7397],simde_mm_xor_si128(c2[5804],simde_mm_xor_si128(c2[1047],simde_mm_xor_si128(c2[5839],simde_mm_xor_si128(c2[9808],simde_mm_xor_si128(c2[1879],simde_mm_xor_si128(c2[8258],simde_mm_xor_si128(c2[1910],simde_mm_xor_si128(c2[10632],simde_mm_xor_si128(c2[10659],simde_mm_xor_si128(c2[373],simde_mm_xor_si128(c2[5127],simde_mm_xor_si128(c2[1190],simde_mm_xor_si128(c2[8317],simde_mm_xor_si128(c2[12280],simde_mm_xor_si128(c2[4400],simde_mm_xor_si128(c2[9949],simde_mm_xor_si128(c2[5200],simde_mm_xor_si128(c2[432],simde_mm_xor_si128(c2[470],simde_mm_xor_si128(c2[6029],simde_mm_xor_si128(c2[1275],simde_mm_xor_si128(c2[4481],simde_mm_xor_si128(c2[2097],simde_mm_xor_si128(c2[506],simde_mm_xor_si128(c2[5301],simde_mm_xor_si128(c2[4504],simde_mm_xor_si128(c2[10848],simde_mm_xor_si128(c2[10875],simde_mm_xor_si128(c2[9292],simde_mm_xor_si128(c2[12465],simde_mm_xor_si128(c2[616],simde_mm_xor_si128(c2[4580],simde_mm_xor_si128(c2[2201],simde_mm_xor_si128(c2[10152],simde_mm_xor_si128(c2[8579],simde_mm_xor_si128(c2[10956],simde_mm_xor_si128(c2[4652],simde_mm_xor_si128(c2[7816],simde_mm_xor_si128(c2[4651],simde_mm_xor_si128(c2[5489],simde_mm_xor_si128(c2[6273],simde_mm_xor_si128(c2[11809],simde_mm_xor_si128(c2[2351],simde_mm_xor_si128(c2[3926],c2[7885])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 36
+     d2[648]=simde_mm_xor_si128(c2[6349],simde_mm_xor_si128(c2[505],simde_mm_xor_si128(c2[540],c2[648])));
+
+//row: 37
+     d2[666]=simde_mm_xor_si128(c2[2389],simde_mm_xor_si128(c2[3181],simde_mm_xor_si128(c2[3179],simde_mm_xor_si128(c2[2392],simde_mm_xor_si128(c2[9515],simde_mm_xor_si128(c2[4800],simde_mm_xor_si128(c2[5592],simde_mm_xor_si128(c2[6382],simde_mm_xor_si128(c2[1629],simde_mm_xor_si128(c2[6376],simde_mm_xor_si128(c2[9587],simde_mm_xor_si128(c2[10379],simde_mm_xor_si128(c2[2458],simde_mm_xor_si128(c2[10381],simde_mm_xor_si128(c2[8039],simde_mm_xor_si128(c2[8831],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[5662],simde_mm_xor_si128(c2[952],simde_mm_xor_si128(c2[4904],simde_mm_xor_si128(c2[10446],simde_mm_xor_si128(c2[8905],simde_mm_xor_si128(c2[9697],simde_mm_xor_si128(c2[982],simde_mm_xor_si128(c2[3363],simde_mm_xor_si128(c2[3384],simde_mm_xor_si128(c2[4176],simde_mm_xor_si128(c2[8940],simde_mm_xor_si128(c2[2597],simde_mm_xor_si128(c2[10552],simde_mm_xor_si128(c2[8977],simde_mm_xor_si128(c2[4220],simde_mm_xor_si128(c2[9012],simde_mm_xor_si128(c2[292],simde_mm_xor_si128(c2[5052],simde_mm_xor_si128(c2[10621],simde_mm_xor_si128(c2[11413],simde_mm_xor_si128(c2[5083],simde_mm_xor_si128(c2[1116],simde_mm_xor_si128(c2[369],simde_mm_xor_si128(c2[1161],simde_mm_xor_si128(c2[3528],simde_mm_xor_si128(c2[8282],simde_mm_xor_si128(c2[3571],simde_mm_xor_si128(c2[4363],simde_mm_xor_si128(c2[11490],simde_mm_xor_si128(c2[2782],simde_mm_xor_si128(c2[6781],simde_mm_xor_si128(c2[7573],simde_mm_xor_si128(c2[433],simde_mm_xor_si128(c2[8355],simde_mm_xor_si128(c2[2851],simde_mm_xor_si128(c2[3643],simde_mm_xor_si128(c2[9184],simde_mm_xor_si128(c2[4430],simde_mm_xor_si128(c2[477],simde_mm_xor_si128(c2[7636],simde_mm_xor_si128(c2[5270],simde_mm_xor_si128(c2[3679],simde_mm_xor_si128(c2[7682],simde_mm_xor_si128(c2[8474],simde_mm_xor_si128(c2[7677],simde_mm_xor_si128(c2[1332],simde_mm_xor_si128(c2[585],simde_mm_xor_si128(c2[1377],simde_mm_xor_si128(c2[12465],simde_mm_xor_si128(c2[2967],simde_mm_xor_si128(c2[3789],simde_mm_xor_si128(c2[7753],simde_mm_xor_si128(c2[5374],simde_mm_xor_si128(c2[12533],simde_mm_xor_si128(c2[654],simde_mm_xor_si128(c2[11752],simde_mm_xor_si128(c2[1440],simde_mm_xor_si128(c2[7033],simde_mm_xor_si128(c2[7825],simde_mm_xor_si128(c2[10989],simde_mm_xor_si128(c2[7824],simde_mm_xor_si128(c2[7852],simde_mm_xor_si128(c2[8644],simde_mm_xor_si128(c2[9446],simde_mm_xor_si128(c2[2311],simde_mm_xor_si128(c2[4732],simde_mm_xor_si128(c2[5524],simde_mm_xor_si128(c2[7099],c2[11058])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 38
+     d2[684]=simde_mm_xor_si128(c2[11099],simde_mm_xor_si128(c2[324],simde_mm_xor_si128(c2[360],c2[432])));
+
+//row: 39
+     d2[702]=simde_mm_xor_si128(c2[7173],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[4222],c2[4651])));
+
+//row: 40
+     d2[720]=simde_mm_xor_si128(c2[6349],simde_mm_xor_si128(c2[297],c2[2196]));
+
+//row: 41
+     d2[738]=simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[324],c2[6203])));
+
+//row: 42
+     d2[756]=simde_mm_xor_si128(c2[5559],simde_mm_xor_si128(c2[5557],simde_mm_xor_si128(c2[3960],simde_mm_xor_si128(c2[4752],simde_mm_xor_si128(c2[11101],simde_mm_xor_si128(c2[11893],simde_mm_xor_si128(c2[8713],simde_mm_xor_si128(c2[7970],simde_mm_xor_si128(c2[7968],simde_mm_xor_si128(c2[8760],simde_mm_xor_si128(c2[3215],simde_mm_xor_si128(c2[4007],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[4836],simde_mm_xor_si128(c2[11967],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[11209],simde_mm_xor_si128(c2[2500],simde_mm_xor_si128(c2[7248],simde_mm_xor_si128(c2[8040],simde_mm_xor_si128(c2[3312],simde_mm_xor_si128(c2[6490],simde_mm_xor_si128(c2[7282],simde_mm_xor_si128(c2[12032],simde_mm_xor_si128(c2[153],simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[12075],simde_mm_xor_si128(c2[3360],simde_mm_xor_si128(c2[4949],simde_mm_xor_si128(c2[5741],simde_mm_xor_si128(c2[6554],simde_mm_xor_si128(c2[10526],simde_mm_xor_si128(c2[11318],simde_mm_xor_si128(c2[4183],simde_mm_xor_si128(c2[4975],simde_mm_xor_si128(c2[259],simde_mm_xor_si128(c2[10563],simde_mm_xor_si128(c2[11355],simde_mm_xor_si128(c2[5806],simde_mm_xor_si128(c2[6598],simde_mm_xor_si128(c2[11390],simde_mm_xor_si128(c2[1878],simde_mm_xor_si128(c2[2670],simde_mm_xor_si128(c2[6638],simde_mm_xor_si128(c2[7430],simde_mm_xor_si128(c2[1120],simde_mm_xor_si128(c2[7461],simde_mm_xor_si128(c2[2702],simde_mm_xor_si128(c2[3494],simde_mm_xor_si128(c2[3539],simde_mm_xor_si128(c2[5114],simde_mm_xor_si128(c2[5906],simde_mm_xor_si128(c2[9868],simde_mm_xor_si128(c2[10660],simde_mm_xor_si128(c2[6741],simde_mm_xor_si128(c2[1197],simde_mm_xor_si128(c2[4368],simde_mm_xor_si128(c2[5160],simde_mm_xor_si128(c2[9951],simde_mm_xor_si128(c2[2811],simde_mm_xor_si128(c2[9941],simde_mm_xor_si128(c2[10733],simde_mm_xor_si128(c2[6021],simde_mm_xor_si128(c2[10770],simde_mm_xor_si128(c2[11562],simde_mm_xor_si128(c2[6016],simde_mm_xor_si128(c2[6808],simde_mm_xor_si128(c2[10014],simde_mm_xor_si128(c2[6856],simde_mm_xor_si128(c2[7648],simde_mm_xor_si128(c2[5265],simde_mm_xor_si128(c2[6057],simde_mm_xor_si128(c2[10852],simde_mm_xor_si128(c2[10055],simde_mm_xor_si128(c2[2918],simde_mm_xor_si128(c2[3710],simde_mm_xor_si128(c2[3755],simde_mm_xor_si128(c2[2172],simde_mm_xor_si128(c2[4553],simde_mm_xor_si128(c2[5345],simde_mm_xor_si128(c2[6167],simde_mm_xor_si128(c2[9339],simde_mm_xor_si128(c2[10131],simde_mm_xor_si128(c2[6960],simde_mm_xor_si128(c2[7752],simde_mm_xor_si128(c2[3032],simde_mm_xor_si128(c2[649],simde_mm_xor_si128(c2[1441],simde_mm_xor_si128(c2[3026],simde_mm_xor_si128(c2[3818],simde_mm_xor_si128(c2[10203],simde_mm_xor_si128(c2[696],simde_mm_xor_si128(c2[9410],simde_mm_xor_si128(c2[10202],simde_mm_xor_si128(c2[11022],simde_mm_xor_si128(c2[11032],simde_mm_xor_si128(c2[11824],simde_mm_xor_si128(c2[3897],simde_mm_xor_si128(c2[4689],simde_mm_xor_si128(c2[7884],simde_mm_xor_si128(c2[9477],simde_mm_xor_si128(c2[12644],c2[765]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 43
+     d2[774]=simde_mm_xor_si128(c2[1596],simde_mm_xor_si128(c2[1594],simde_mm_xor_si128(c2[807],simde_mm_xor_si128(c2[7138],simde_mm_xor_si128(c2[7930],simde_mm_xor_si128(c2[4007],simde_mm_xor_si128(c2[4797],simde_mm_xor_si128(c2[11923],simde_mm_xor_si128(c2[44],simde_mm_xor_si128(c2[3207],simde_mm_xor_si128(c2[8794],simde_mm_xor_si128(c2[873],simde_mm_xor_si128(c2[8796],simde_mm_xor_si128(c2[7246],simde_mm_xor_si128(c2[11208],simde_mm_xor_si128(c2[3285],simde_mm_xor_si128(c2[4077],simde_mm_xor_si128(c2[12038],simde_mm_xor_si128(c2[3319],simde_mm_xor_si128(c2[8069],simde_mm_xor_si128(c2[8861],simde_mm_xor_si128(c2[8112],simde_mm_xor_si128(c2[12068],simde_mm_xor_si128(c2[1778],simde_mm_xor_si128(c2[2609],simde_mm_xor_si128(c2[7355],simde_mm_xor_si128(c2[220],simde_mm_xor_si128(c2[1012],simde_mm_xor_si128(c2[8967],simde_mm_xor_si128(c2[7392],simde_mm_xor_si128(c2[1843],simde_mm_xor_si128(c2[2635],simde_mm_xor_si128(c2[7427],simde_mm_xor_si128(c2[11378],simde_mm_xor_si128(c2[2675],simde_mm_xor_si128(c2[3467],simde_mm_xor_si128(c2[9828],simde_mm_xor_si128(c2[3498],simde_mm_xor_si128(c2[12220],simde_mm_xor_si128(c2[12247],simde_mm_xor_si128(c2[1961],simde_mm_xor_si128(c2[5905],simde_mm_xor_si128(c2[6697],simde_mm_xor_si128(c2[2778],simde_mm_xor_si128(c2[9905],simde_mm_xor_si128(c2[405],simde_mm_xor_si128(c2[1197],simde_mm_xor_si128(c2[5988],simde_mm_xor_si128(c2[11537],simde_mm_xor_si128(c2[5978],simde_mm_xor_si128(c2[6770],simde_mm_xor_si128(c2[2058],simde_mm_xor_si128(c2[7599],simde_mm_xor_si128(c2[2053],simde_mm_xor_si128(c2[2845],simde_mm_xor_si128(c2[6051],simde_mm_xor_si128(c2[3685],simde_mm_xor_si128(c2[1302],simde_mm_xor_si128(c2[2094],simde_mm_xor_si128(c2[6889],simde_mm_xor_si128(c2[6092],simde_mm_xor_si128(c2[12436],simde_mm_xor_si128(c2[12463],simde_mm_xor_si128(c2[10880],simde_mm_xor_si128(c2[590],simde_mm_xor_si128(c2[1382],simde_mm_xor_si128(c2[2967],simde_mm_xor_si128(c2[2204],simde_mm_xor_si128(c2[6168],simde_mm_xor_si128(c2[2997],simde_mm_xor_si128(c2[3789],simde_mm_xor_si128(c2[11740],simde_mm_xor_si128(c2[10167],simde_mm_xor_si128(c2[11752],simde_mm_xor_si128(c2[12544],simde_mm_xor_si128(c2[648],simde_mm_xor_si128(c2[6240],simde_mm_xor_si128(c2[9404],simde_mm_xor_si128(c2[6239],simde_mm_xor_si128(c2[7059],simde_mm_xor_si128(c2[7861],simde_mm_xor_si128(c2[12605],simde_mm_xor_si128(c2[726],simde_mm_xor_si128(c2[3939],simde_mm_xor_si128(c2[5514],simde_mm_xor_si128(c2[8681],c2[9473]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 44
+     d2[792]=simde_mm_xor_si128(c2[9520],simde_mm_xor_si128(c2[9518],simde_mm_xor_si128(c2[8713],simde_mm_xor_si128(c2[3183],simde_mm_xor_si128(c2[1585],simde_mm_xor_si128(c2[11931],simde_mm_xor_si128(c2[50],simde_mm_xor_si128(c2[7968],simde_mm_xor_si128(c2[4047],simde_mm_xor_si128(c2[8797],simde_mm_xor_si128(c2[4049],simde_mm_xor_si128(c2[2499],simde_mm_xor_si128(c2[6461],simde_mm_xor_si128(c2[12001],simde_mm_xor_si128(c2[7273],simde_mm_xor_si128(c2[11243],simde_mm_xor_si128(c2[4114],simde_mm_xor_si128(c2[3365],simde_mm_xor_si128(c2[7321],simde_mm_xor_si128(c2[9684],simde_mm_xor_si128(c2[10515],simde_mm_xor_si128(c2[2608],simde_mm_xor_si128(c2[8936],simde_mm_xor_si128(c2[4220],simde_mm_xor_si128(c2[2645],simde_mm_xor_si128(c2[10559],simde_mm_xor_si128(c2[252],simde_mm_xor_si128(c2[2680],simde_mm_xor_si128(c2[6631],simde_mm_xor_si128(c2[11391],simde_mm_xor_si128(c2[5081],simde_mm_xor_si128(c2[11422],simde_mm_xor_si128(c2[7455],simde_mm_xor_si128(c2[324],simde_mm_xor_si128(c2[7500],simde_mm_xor_si128(c2[9867],simde_mm_xor_si128(c2[1950],simde_mm_xor_si128(c2[10702],simde_mm_xor_si128(c2[5158],simde_mm_xor_si128(c2[9121],simde_mm_xor_si128(c2[1241],simde_mm_xor_si128(c2[6772],simde_mm_xor_si128(c2[2023],simde_mm_xor_si128(c2[9982],simde_mm_xor_si128(c2[2852],simde_mm_xor_si128(c2[10769],simde_mm_xor_si128(c2[1304],simde_mm_xor_si128(c2[11609],simde_mm_xor_si128(c2[10018],simde_mm_xor_si128(c2[2124],simde_mm_xor_si128(c2[1345],simde_mm_xor_si128(c2[7671],simde_mm_xor_si128(c2[7716],simde_mm_xor_si128(c2[6133],simde_mm_xor_si128(c2[9288],simde_mm_xor_si128(c2[10128],simde_mm_xor_si128(c2[1421],simde_mm_xor_si128(c2[11713],simde_mm_xor_si128(c2[6993],simde_mm_xor_si128(c2[5402],simde_mm_xor_si128(c2[7779],simde_mm_xor_si128(c2[1493],simde_mm_xor_si128(c2[4657],simde_mm_xor_si128(c2[1492],simde_mm_xor_si128(c2[2312],simde_mm_xor_si128(c2[3096],simde_mm_xor_si128(c2[8650],simde_mm_xor_si128(c2[11845],simde_mm_xor_si128(c2[767],c2[4726])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 45
+     d2[810]=simde_mm_xor_si128(c2[6382],simde_mm_xor_si128(c2[216],c2[369]));
+  }
+}
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc320_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc320_byte.c
index be5cbbdad8f2d213b31a031d3722d9dd4c301acf..231c19e504c28d45b24d124bf1a757146ce7c1bc 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc320_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc320_byte.c
@@ -1,9 +1,10 @@
+#ifdef __AVX2__
 #include "PHY/sse_intrin.h"
 // generated code for Zc=320, byte encoding
 static inline void ldpc320_byte(uint8_t *c,uint8_t *d) {
-  __m256i *csimd=(__m256i *)c,*dsimd=(__m256i *)d;
+  simde__m256i *csimd=(simde__m256i *)c,*dsimd=(simde__m256i *)d;
 
-  __m256i *c2,*d2;
+  simde__m256i *c2,*d2;
 
   int i2;
   for (i2=0; i2<10; i2++) {
@@ -149,3 +150,4 @@ static inline void ldpc320_byte(uint8_t *c,uint8_t *d) {
      d2[450]=simde_mm256_xor_si256(c2[2223],simde_mm256_xor_si256(c2[1887],c2[13403]));
   }
 }
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc320_byte_128.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc320_byte_128.c
new file mode 100644
index 0000000000000000000000000000000000000000..76ccd4c3f4b30c441007bdb9ab9038c10722bf7a
--- /dev/null
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc320_byte_128.c
@@ -0,0 +1,153 @@
+#ifndef __AVX2__
+#include "PHY/sse_intrin.h"
+// generated code for Zc=320, byte encoding
+static inline void ldpc320_byte(uint8_t *c,uint8_t *d) {
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
+
+  simde__m128i *c2,*d2;
+
+  int i2;
+  for (i2=0; i2<20; i2++) {
+     c2=&csimd[i2];
+     d2=&dsimd[i2];
+
+//row: 0
+     d2[0]=simde_mm_xor_si128(c2[7924],simde_mm_xor_si128(c2[13218],simde_mm_xor_si128(c2[3524],simde_mm_xor_si128(c2[10573],simde_mm_xor_si128(c2[13240],simde_mm_xor_si128(c2[6200],simde_mm_xor_si128(c2[53],simde_mm_xor_si128(c2[6246],simde_mm_xor_si128(c2[5378],simde_mm_xor_si128(c2[85],simde_mm_xor_si128(c2[1003],simde_mm_xor_si128(c2[9801],simde_mm_xor_si128(c2[12441],simde_mm_xor_si128(c2[4576],simde_mm_xor_si128(c2[7217],simde_mm_xor_si128(c2[4572],simde_mm_xor_si128(c2[215],simde_mm_xor_si128(c2[1090],simde_mm_xor_si128(c2[5482],simde_mm_xor_si128(c2[6402],simde_mm_xor_si128(c2[2894],simde_mm_xor_si128(c2[11683],simde_mm_xor_si128(c2[4688],simde_mm_xor_si128(c2[9092],simde_mm_xor_si128(c2[13490],simde_mm_xor_si128(c2[3840],simde_mm_xor_si128(c2[7372],simde_mm_xor_si128(c2[13524],simde_mm_xor_si128(c2[13560],simde_mm_xor_si128(c2[365],simde_mm_xor_si128(c2[6524],simde_mm_xor_si128(c2[2170],simde_mm_xor_si128(c2[9206],simde_mm_xor_si128(c2[8337],simde_mm_xor_si128(c2[6613],simde_mm_xor_si128(c2[1328],simde_mm_xor_si128(c2[13658],simde_mm_xor_si128(c2[4010],simde_mm_xor_si128(c2[11058],simde_mm_xor_si128(c2[11935],simde_mm_xor_si128(c2[4928],simde_mm_xor_si128(c2[6698],simde_mm_xor_si128(c2[4050],simde_mm_xor_si128(c2[11124],simde_mm_xor_si128(c2[10257],simde_mm_xor_si128(c2[4963],simde_mm_xor_si128(c2[9418],simde_mm_xor_si128(c2[9416],simde_mm_xor_si128(c2[12058],simde_mm_xor_si128(c2[12966],simde_mm_xor_si128(c2[7684],simde_mm_xor_si128(c2[11202],simde_mm_xor_si128(c2[3325],simde_mm_xor_si128(c2[7731],simde_mm_xor_si128(c2[11241],simde_mm_xor_si128(c2[1607],simde_mm_xor_si128(c2[6015],simde_mm_xor_si128(c2[12164],simde_mm_xor_si128(c2[761],simde_mm_xor_si128(c2[4296],simde_mm_xor_si128(c2[6054],simde_mm_xor_si128(c2[12251],simde_mm_xor_si128(c2[4337],simde_mm_xor_si128(c2[14019],simde_mm_xor_si128(c2[842],simde_mm_xor_si128(c2[12298],c2[4364]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 1
+     d2[20]=simde_mm_xor_si128(c2[7924],simde_mm_xor_si128(c2[8804],simde_mm_xor_si128(c2[19],simde_mm_xor_si128(c2[4404],simde_mm_xor_si128(c2[11453],simde_mm_xor_si128(c2[13240],simde_mm_xor_si128(c2[41],simde_mm_xor_si128(c2[7080],simde_mm_xor_si128(c2[933],simde_mm_xor_si128(c2[6246],simde_mm_xor_si128(c2[7126],simde_mm_xor_si128(c2[6258],simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[1003],simde_mm_xor_si128(c2[1883],simde_mm_xor_si128(c2[10681],simde_mm_xor_si128(c2[13321],simde_mm_xor_si128(c2[5456],simde_mm_xor_si128(c2[8097],simde_mm_xor_si128(c2[5452],simde_mm_xor_si128(c2[215],simde_mm_xor_si128(c2[1095],simde_mm_xor_si128(c2[1970],simde_mm_xor_si128(c2[6362],simde_mm_xor_si128(c2[6402],simde_mm_xor_si128(c2[7282],simde_mm_xor_si128(c2[3774],simde_mm_xor_si128(c2[12563],simde_mm_xor_si128(c2[5568],simde_mm_xor_si128(c2[9972],simde_mm_xor_si128(c2[291],simde_mm_xor_si128(c2[4720],simde_mm_xor_si128(c2[8252],simde_mm_xor_si128(c2[325],simde_mm_xor_si128(c2[13560],simde_mm_xor_si128(c2[361],simde_mm_xor_si128(c2[1245],simde_mm_xor_si128(c2[7404],simde_mm_xor_si128(c2[2170],simde_mm_xor_si128(c2[3050],simde_mm_xor_si128(c2[10086],simde_mm_xor_si128(c2[9217],simde_mm_xor_si128(c2[6613],simde_mm_xor_si128(c2[7493],simde_mm_xor_si128(c2[2208],simde_mm_xor_si128(c2[459],simde_mm_xor_si128(c2[4010],simde_mm_xor_si128(c2[4890],simde_mm_xor_si128(c2[11938],simde_mm_xor_si128(c2[12815],simde_mm_xor_si128(c2[4928],simde_mm_xor_si128(c2[5808],simde_mm_xor_si128(c2[7578],simde_mm_xor_si128(c2[4930],simde_mm_xor_si128(c2[12004],simde_mm_xor_si128(c2[11137],simde_mm_xor_si128(c2[5843],simde_mm_xor_si128(c2[9418],simde_mm_xor_si128(c2[10298],simde_mm_xor_si128(c2[10296],simde_mm_xor_si128(c2[12938],simde_mm_xor_si128(c2[12966],simde_mm_xor_si128(c2[13846],simde_mm_xor_si128(c2[8564],simde_mm_xor_si128(c2[12082],simde_mm_xor_si128(c2[4205],simde_mm_xor_si128(c2[8611],simde_mm_xor_si128(c2[12121],simde_mm_xor_si128(c2[1607],simde_mm_xor_si128(c2[2487],simde_mm_xor_si128(c2[6895],simde_mm_xor_si128(c2[13044],simde_mm_xor_si128(c2[761],simde_mm_xor_si128(c2[1641],simde_mm_xor_si128(c2[5176],simde_mm_xor_si128(c2[6934],simde_mm_xor_si128(c2[12251],simde_mm_xor_si128(c2[13131],simde_mm_xor_si128(c2[5217],simde_mm_xor_si128(c2[800],simde_mm_xor_si128(c2[842],simde_mm_xor_si128(c2[1722],simde_mm_xor_si128(c2[13178],c2[5244])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 2
+     d2[40]=simde_mm_xor_si128(c2[8804],simde_mm_xor_si128(c2[19],simde_mm_xor_si128(c2[3524],simde_mm_xor_si128(c2[4404],simde_mm_xor_si128(c2[10573],simde_mm_xor_si128(c2[11453],simde_mm_xor_si128(c2[41],simde_mm_xor_si128(c2[6200],simde_mm_xor_si128(c2[7080],simde_mm_xor_si128(c2[53],simde_mm_xor_si128(c2[933],simde_mm_xor_si128(c2[7126],simde_mm_xor_si128(c2[6258],simde_mm_xor_si128(c2[85],simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[1883],simde_mm_xor_si128(c2[10681],simde_mm_xor_si128(c2[12441],simde_mm_xor_si128(c2[13321],simde_mm_xor_si128(c2[5456],simde_mm_xor_si128(c2[7217],simde_mm_xor_si128(c2[8097],simde_mm_xor_si128(c2[4572],simde_mm_xor_si128(c2[5452],simde_mm_xor_si128(c2[1095],simde_mm_xor_si128(c2[1970],simde_mm_xor_si128(c2[5482],simde_mm_xor_si128(c2[6362],simde_mm_xor_si128(c2[7282],simde_mm_xor_si128(c2[2894],simde_mm_xor_si128(c2[3774],simde_mm_xor_si128(c2[11683],simde_mm_xor_si128(c2[12563],simde_mm_xor_si128(c2[5568],simde_mm_xor_si128(c2[9092],simde_mm_xor_si128(c2[9972],simde_mm_xor_si128(c2[13490],simde_mm_xor_si128(c2[291],simde_mm_xor_si128(c2[4720],simde_mm_xor_si128(c2[7372],simde_mm_xor_si128(c2[8252],simde_mm_xor_si128(c2[13524],simde_mm_xor_si128(c2[325],simde_mm_xor_si128(c2[361],simde_mm_xor_si128(c2[1245],simde_mm_xor_si128(c2[6524],simde_mm_xor_si128(c2[7404],simde_mm_xor_si128(c2[3050],simde_mm_xor_si128(c2[9206],simde_mm_xor_si128(c2[10086],simde_mm_xor_si128(c2[8337],simde_mm_xor_si128(c2[9217],simde_mm_xor_si128(c2[7493],simde_mm_xor_si128(c2[2208],simde_mm_xor_si128(c2[13658],simde_mm_xor_si128(c2[459],simde_mm_xor_si128(c2[4890],simde_mm_xor_si128(c2[11938],simde_mm_xor_si128(c2[11935],simde_mm_xor_si128(c2[12815],simde_mm_xor_si128(c2[5808],simde_mm_xor_si128(c2[6698],simde_mm_xor_si128(c2[7578],simde_mm_xor_si128(c2[4050],simde_mm_xor_si128(c2[4930],simde_mm_xor_si128(c2[12004],simde_mm_xor_si128(c2[10257],simde_mm_xor_si128(c2[11137],simde_mm_xor_si128(c2[4963],simde_mm_xor_si128(c2[5843],simde_mm_xor_si128(c2[10298],simde_mm_xor_si128(c2[10296],simde_mm_xor_si128(c2[12058],simde_mm_xor_si128(c2[12938],simde_mm_xor_si128(c2[13846],simde_mm_xor_si128(c2[8564],simde_mm_xor_si128(c2[11202],simde_mm_xor_si128(c2[12082],simde_mm_xor_si128(c2[4205],simde_mm_xor_si128(c2[7731],simde_mm_xor_si128(c2[8611],simde_mm_xor_si128(c2[11241],simde_mm_xor_si128(c2[12121],simde_mm_xor_si128(c2[2487],simde_mm_xor_si128(c2[6015],simde_mm_xor_si128(c2[6895],simde_mm_xor_si128(c2[12164],simde_mm_xor_si128(c2[13044],simde_mm_xor_si128(c2[1641],simde_mm_xor_si128(c2[5176],simde_mm_xor_si128(c2[6054],simde_mm_xor_si128(c2[6934],simde_mm_xor_si128(c2[13131],simde_mm_xor_si128(c2[4337],simde_mm_xor_si128(c2[5217],simde_mm_xor_si128(c2[14019],simde_mm_xor_si128(c2[800],simde_mm_xor_si128(c2[1722],simde_mm_xor_si128(c2[13178],simde_mm_xor_si128(c2[4364],c2[5244]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 3
+     d2[60]=simde_mm_xor_si128(c2[8804],simde_mm_xor_si128(c2[19],simde_mm_xor_si128(c2[4404],simde_mm_xor_si128(c2[10573],simde_mm_xor_si128(c2[11453],simde_mm_xor_si128(c2[41],simde_mm_xor_si128(c2[7080],simde_mm_xor_si128(c2[53],simde_mm_xor_si128(c2[933],simde_mm_xor_si128(c2[7126],simde_mm_xor_si128(c2[6258],simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[1883],simde_mm_xor_si128(c2[10681],simde_mm_xor_si128(c2[12441],simde_mm_xor_si128(c2[13321],simde_mm_xor_si128(c2[5456],simde_mm_xor_si128(c2[8097],simde_mm_xor_si128(c2[4572],simde_mm_xor_si128(c2[5452],simde_mm_xor_si128(c2[1095],simde_mm_xor_si128(c2[1970],simde_mm_xor_si128(c2[6362],simde_mm_xor_si128(c2[7282],simde_mm_xor_si128(c2[3774],simde_mm_xor_si128(c2[11683],simde_mm_xor_si128(c2[12563],simde_mm_xor_si128(c2[5568],simde_mm_xor_si128(c2[9972],simde_mm_xor_si128(c2[13490],simde_mm_xor_si128(c2[291],simde_mm_xor_si128(c2[4720],simde_mm_xor_si128(c2[8252],simde_mm_xor_si128(c2[13524],simde_mm_xor_si128(c2[325],simde_mm_xor_si128(c2[361],simde_mm_xor_si128(c2[1245],simde_mm_xor_si128(c2[7404],simde_mm_xor_si128(c2[3050],simde_mm_xor_si128(c2[10086],simde_mm_xor_si128(c2[8337],simde_mm_xor_si128(c2[9217],simde_mm_xor_si128(c2[7493],simde_mm_xor_si128(c2[2208],simde_mm_xor_si128(c2[13658],simde_mm_xor_si128(c2[459],simde_mm_xor_si128(c2[4890],simde_mm_xor_si128(c2[11938],simde_mm_xor_si128(c2[11935],simde_mm_xor_si128(c2[12815],simde_mm_xor_si128(c2[5808],simde_mm_xor_si128(c2[7578],simde_mm_xor_si128(c2[4050],simde_mm_xor_si128(c2[4930],simde_mm_xor_si128(c2[12004],simde_mm_xor_si128(c2[11137],simde_mm_xor_si128(c2[4963],simde_mm_xor_si128(c2[5843],simde_mm_xor_si128(c2[10298],simde_mm_xor_si128(c2[10296],simde_mm_xor_si128(c2[12938],simde_mm_xor_si128(c2[13846],simde_mm_xor_si128(c2[8564],simde_mm_xor_si128(c2[11202],simde_mm_xor_si128(c2[12082],simde_mm_xor_si128(c2[4205],simde_mm_xor_si128(c2[8611],simde_mm_xor_si128(c2[11241],simde_mm_xor_si128(c2[12121],simde_mm_xor_si128(c2[2487],simde_mm_xor_si128(c2[6895],simde_mm_xor_si128(c2[12164],simde_mm_xor_si128(c2[13044],simde_mm_xor_si128(c2[1641],simde_mm_xor_si128(c2[5176],simde_mm_xor_si128(c2[6934],simde_mm_xor_si128(c2[13131],simde_mm_xor_si128(c2[5217],simde_mm_xor_si128(c2[14019],simde_mm_xor_si128(c2[800],simde_mm_xor_si128(c2[1722],simde_mm_xor_si128(c2[13178],simde_mm_xor_si128(c2[4364],c2[5244])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 4
+     d2[80]=simde_mm_xor_si128(c2[7934],c2[11492]);
+
+//row: 5
+     d2[100]=simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[5296],simde_mm_xor_si128(c2[9681],simde_mm_xor_si128(c2[2651],simde_mm_xor_si128(c2[2645],simde_mm_xor_si128(c2[5338],simde_mm_xor_si128(c2[12377],simde_mm_xor_si128(c2[6210],simde_mm_xor_si128(c2[3578],simde_mm_xor_si128(c2[12403],simde_mm_xor_si128(c2[11535],simde_mm_xor_si128(c2[6242],simde_mm_xor_si128(c2[7160],simde_mm_xor_si128(c2[1899],simde_mm_xor_si128(c2[4539],simde_mm_xor_si128(c2[1883],simde_mm_xor_si128(c2[10733],simde_mm_xor_si128(c2[13374],simde_mm_xor_si128(c2[10729],simde_mm_xor_si128(c2[6372],simde_mm_xor_si128(c2[7247],simde_mm_xor_si128(c2[11659],simde_mm_xor_si128(c2[12579],simde_mm_xor_si128(c2[9051],simde_mm_xor_si128(c2[3761],simde_mm_xor_si128(c2[10845],simde_mm_xor_si128(c2[1170],simde_mm_xor_si128(c2[5568],simde_mm_xor_si128(c2[10017],simde_mm_xor_si128(c2[13529],simde_mm_xor_si128(c2[5602],simde_mm_xor_si128(c2[5658],simde_mm_xor_si128(c2[6522],simde_mm_xor_si128(c2[12681],simde_mm_xor_si128(c2[8327],simde_mm_xor_si128(c2[1284],simde_mm_xor_si128(c2[415],simde_mm_xor_si128(c2[12770],simde_mm_xor_si128(c2[7485],simde_mm_xor_si128(c2[5736],simde_mm_xor_si128(c2[10167],simde_mm_xor_si128(c2[3136],simde_mm_xor_si128(c2[4013],simde_mm_xor_si128(c2[12819],simde_mm_xor_si128(c2[11085],simde_mm_xor_si128(c2[12855],simde_mm_xor_si128(c2[10207],simde_mm_xor_si128(c2[3202],simde_mm_xor_si128(c2[2335],simde_mm_xor_si128(c2[11120],simde_mm_xor_si128(c2[1496],simde_mm_xor_si128(c2[1494],simde_mm_xor_si128(c2[4136],simde_mm_xor_si128(c2[5044],simde_mm_xor_si128(c2[13841],simde_mm_xor_si128(c2[3280],simde_mm_xor_si128(c2[8572],simde_mm_xor_si128(c2[9482],simde_mm_xor_si128(c2[13888],simde_mm_xor_si128(c2[3339],simde_mm_xor_si128(c2[7764],simde_mm_xor_si128(c2[12172],simde_mm_xor_si128(c2[4242],simde_mm_xor_si128(c2[6938],simde_mm_xor_si128(c2[10453],simde_mm_xor_si128(c2[12211],simde_mm_xor_si128(c2[4329],simde_mm_xor_si128(c2[10494],simde_mm_xor_si128(c2[6097],simde_mm_xor_si128(c2[7019],simde_mm_xor_si128(c2[4376],simde_mm_xor_si128(c2[10521],c2[10536]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 6
+     d2[120]=simde_mm_xor_si128(c2[898],simde_mm_xor_si128(c2[4641],simde_mm_xor_si128(c2[4818],simde_mm_xor_si128(c2[11880],simde_mm_xor_si128(c2[7574],simde_mm_xor_si128(c2[13018],simde_mm_xor_si128(c2[9528],c2[10494])))))));
+
+//row: 7
+     d2[140]=simde_mm_xor_si128(c2[10560],simde_mm_xor_si128(c2[7085],simde_mm_xor_si128(c2[13372],simde_mm_xor_si128(c2[2043],simde_mm_xor_si128(c2[8241],c2[11124])))));
+
+//row: 8
+     d2[160]=simde_mm_xor_si128(c2[12339],simde_mm_xor_si128(c2[3534],simde_mm_xor_si128(c2[3534],simde_mm_xor_si128(c2[8808],simde_mm_xor_si128(c2[7939],simde_mm_xor_si128(c2[12333],simde_mm_xor_si128(c2[13213],simde_mm_xor_si128(c2[889],simde_mm_xor_si128(c2[5283],simde_mm_xor_si128(c2[6163],simde_mm_xor_si128(c2[6178],simde_mm_xor_si128(c2[3576],simde_mm_xor_si128(c2[8850],simde_mm_xor_si128(c2[10615],simde_mm_xor_si128(c2[930],simde_mm_xor_si128(c2[1810],simde_mm_xor_si128(c2[4448],simde_mm_xor_si128(c2[8842],simde_mm_xor_si128(c2[9722],simde_mm_xor_si128(c2[4448],simde_mm_xor_si128(c2[10641],simde_mm_xor_si128(c2[1856],simde_mm_xor_si128(c2[9773],simde_mm_xor_si128(c2[968],simde_mm_xor_si128(c2[4480],simde_mm_xor_si128(c2[8894],simde_mm_xor_si128(c2[9774],simde_mm_xor_si128(c2[5418],simde_mm_xor_si128(c2[10692],simde_mm_xor_si128(c2[137],simde_mm_xor_si128(c2[5411],simde_mm_xor_si128(c2[2777],simde_mm_xor_si128(c2[7171],simde_mm_xor_si128(c2[8051],simde_mm_xor_si128(c2[1888],simde_mm_xor_si128(c2[8971],simde_mm_xor_si128(c2[166],simde_mm_xor_si128(c2[11612],simde_mm_xor_si128(c2[1927],simde_mm_xor_si128(c2[2807],simde_mm_xor_si128(c2[8967],simde_mm_xor_si128(c2[13361],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[4610],simde_mm_xor_si128(c2[9884],simde_mm_xor_si128(c2[5485],simde_mm_xor_si128(c2[10779],simde_mm_xor_si128(c2[9897],simde_mm_xor_si128(c2[212],simde_mm_xor_si128(c2[1092],simde_mm_xor_si128(c2[10817],simde_mm_xor_si128(c2[2012],simde_mm_xor_si128(c2[7289],simde_mm_xor_si128(c2[11683],simde_mm_xor_si128(c2[12563],simde_mm_xor_si128(c2[2019],simde_mm_xor_si128(c2[6413],simde_mm_xor_si128(c2[7293],simde_mm_xor_si128(c2[9083],simde_mm_xor_si128(c2[298],simde_mm_xor_si128(c2[13487],simde_mm_xor_si128(c2[3802],simde_mm_xor_si128(c2[4682],simde_mm_xor_si128(c2[3806],simde_mm_xor_si128(c2[8200],simde_mm_xor_si128(c2[9080],simde_mm_xor_si128(c2[8255],simde_mm_xor_si128(c2[13529],simde_mm_xor_si128(c2[11767],simde_mm_xor_si128(c2[2082],simde_mm_xor_si128(c2[2962],simde_mm_xor_si128(c2[3840],simde_mm_xor_si128(c2[8254],simde_mm_xor_si128(c2[9134],simde_mm_xor_si128(c2[3896],simde_mm_xor_si128(c2[9170],simde_mm_xor_si128(c2[4760],simde_mm_xor_si128(c2[10054],simde_mm_xor_si128(c2[10939],simde_mm_xor_si128(c2[1254],simde_mm_xor_si128(c2[2134],simde_mm_xor_si128(c2[6565],simde_mm_xor_si128(c2[11859],simde_mm_xor_si128(c2[13601],simde_mm_xor_si128(c2[3936],simde_mm_xor_si128(c2[4816],simde_mm_xor_si128(c2[12732],simde_mm_xor_si128(c2[3047],simde_mm_xor_si128(c2[3927],simde_mm_xor_si128(c2[11008],simde_mm_xor_si128(c2[2203],simde_mm_xor_si128(c2[5723],simde_mm_xor_si128(c2[11017],simde_mm_xor_si128(c2[3974],simde_mm_xor_si128(c2[8368],simde_mm_xor_si128(c2[9248],simde_mm_xor_si128(c2[8405],simde_mm_xor_si128(c2[13699],simde_mm_xor_si128(c2[1374],simde_mm_xor_si128(c2[6648],simde_mm_xor_si128(c2[2251],simde_mm_xor_si128(c2[6645],simde_mm_xor_si128(c2[7525],simde_mm_xor_si128(c2[6654],simde_mm_xor_si128(c2[9323],simde_mm_xor_si128(c2[538],simde_mm_xor_si128(c2[11093],simde_mm_xor_si128(c2[1408],simde_mm_xor_si128(c2[2288],simde_mm_xor_si128(c2[8445],simde_mm_xor_si128(c2[12859],simde_mm_xor_si128(c2[13739],simde_mm_xor_si128(c2[1440],simde_mm_xor_si128(c2[6734],simde_mm_xor_si128(c2[573],simde_mm_xor_si128(c2[4967],simde_mm_xor_si128(c2[5847],simde_mm_xor_si128(c2[9378],simde_mm_xor_si128(c2[13772],simde_mm_xor_si128(c2[573],simde_mm_xor_si128(c2[13813],simde_mm_xor_si128(c2[5008],simde_mm_xor_si128(c2[13811],simde_mm_xor_si128(c2[5006],simde_mm_xor_si128(c2[2374],simde_mm_xor_si128(c2[6768],simde_mm_xor_si128(c2[7648],simde_mm_xor_si128(c2[3282],simde_mm_xor_si128(c2[8576],simde_mm_xor_si128(c2[12099],simde_mm_xor_si128(c2[3294],simde_mm_xor_si128(c2[1538],simde_mm_xor_si128(c2[5932],simde_mm_xor_si128(c2[6812],simde_mm_xor_si128(c2[7698],simde_mm_xor_si128(c2[7720],simde_mm_xor_si128(c2[13014],simde_mm_xor_si128(c2[12126],simde_mm_xor_si128(c2[2441],simde_mm_xor_si128(c2[3321],simde_mm_xor_si128(c2[1577],simde_mm_xor_si128(c2[5971],simde_mm_xor_si128(c2[6851],simde_mm_xor_si128(c2[6002],simde_mm_xor_si128(c2[11296],simde_mm_xor_si128(c2[10410],simde_mm_xor_si128(c2[725],simde_mm_xor_si128(c2[1605],simde_mm_xor_si128(c2[2480],simde_mm_xor_si128(c2[6894],simde_mm_xor_si128(c2[7774],simde_mm_xor_si128(c2[5176],simde_mm_xor_si128(c2[10450],simde_mm_xor_si128(c2[8691],simde_mm_xor_si128(c2[13965],simde_mm_xor_si128(c2[10449],simde_mm_xor_si128(c2[764],simde_mm_xor_si128(c2[1644],simde_mm_xor_si128(c2[13086],simde_mm_xor_si128(c2[2567],simde_mm_xor_si128(c2[7841],simde_mm_xor_si128(c2[8732],simde_mm_xor_si128(c2[13126],simde_mm_xor_si128(c2[14006],simde_mm_xor_si128(c2[4335],simde_mm_xor_si128(c2[8729],simde_mm_xor_si128(c2[9609],simde_mm_xor_si128(c2[5257],simde_mm_xor_si128(c2[10531],simde_mm_xor_si128(c2[2614],simde_mm_xor_si128(c2[7888],simde_mm_xor_si128(c2[8779],simde_mm_xor_si128(c2[13173],simde_mm_xor_si128(c2[14053],c2[12296]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 9
+     d2[180]=simde_mm_xor_si128(c2[11451],simde_mm_xor_si128(c2[3575],simde_mm_xor_si128(c2[3922],simde_mm_xor_si128(c2[12777],simde_mm_xor_si128(c2[6689],simde_mm_xor_si128(c2[10376],simde_mm_xor_si128(c2[6888],c2[1693])))))));
+
+//row: 10
+     d2[200]=simde_mm_xor_si128(c2[12360],simde_mm_xor_si128(c2[85],simde_mm_xor_si128(c2[2813],simde_mm_xor_si128(c2[9964],simde_mm_xor_si128(c2[1210],c2[6739])))));
+
+//row: 11
+     d2[220]=simde_mm_xor_si128(c2[3527],simde_mm_xor_si128(c2[882],simde_mm_xor_si128(c2[1762],simde_mm_xor_si128(c2[8801],simde_mm_xor_si128(c2[7056],simde_mm_xor_si128(c2[13206],simde_mm_xor_si128(c2[11441],simde_mm_xor_si128(c2[6176],simde_mm_xor_si128(c2[4411],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[8843],simde_mm_xor_si128(c2[6218],simde_mm_xor_si128(c2[7098],simde_mm_xor_si128(c2[1803],simde_mm_xor_si128(c2[58],simde_mm_xor_si128(c2[9735],simde_mm_xor_si128(c2[7970],simde_mm_xor_si128(c2[2689],simde_mm_xor_si128(c2[1849],simde_mm_xor_si128(c2[13283],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[961],simde_mm_xor_si128(c2[13295],simde_mm_xor_si128(c2[9767],simde_mm_xor_si128(c2[8002],simde_mm_xor_si128(c2[10685],simde_mm_xor_si128(c2[8040],simde_mm_xor_si128(c2[8920],simde_mm_xor_si128(c2[5404],simde_mm_xor_si128(c2[3659],simde_mm_xor_si128(c2[8044],simde_mm_xor_si128(c2[6299],simde_mm_xor_si128(c2[179],simde_mm_xor_si128(c2[12493],simde_mm_xor_si128(c2[2800],simde_mm_xor_si128(c2[1055],simde_mm_xor_si128(c2[175],simde_mm_xor_si128(c2[12489],simde_mm_xor_si128(c2[9897],simde_mm_xor_si128(c2[7252],simde_mm_xor_si128(c2[8132],simde_mm_xor_si128(c2[10772],simde_mm_xor_si128(c2[9007],simde_mm_xor_si128(c2[1085],simde_mm_xor_si128(c2[13419],simde_mm_xor_si128(c2[2005],simde_mm_xor_si128(c2[13459],simde_mm_xor_si128(c2[240],simde_mm_xor_si128(c2[12576],simde_mm_xor_si128(c2[10811],simde_mm_xor_si128(c2[7286],simde_mm_xor_si128(c2[5521],simde_mm_xor_si128(c2[291],simde_mm_xor_si128(c2[12605],simde_mm_xor_si128(c2[4695],simde_mm_xor_si128(c2[2930],simde_mm_xor_si128(c2[9093],simde_mm_xor_si128(c2[7328],simde_mm_xor_si128(c2[13522],simde_mm_xor_si128(c2[11777],simde_mm_xor_si128(c2[2975],simde_mm_xor_si128(c2[1210],simde_mm_xor_si128(c2[9127],simde_mm_xor_si128(c2[7362],simde_mm_xor_si128(c2[9163],simde_mm_xor_si128(c2[6538],simde_mm_xor_si128(c2[7418],simde_mm_xor_si128(c2[10047],simde_mm_xor_si128(c2[8282],simde_mm_xor_si128(c2[2127],simde_mm_xor_si128(c2[362],simde_mm_xor_si128(c2[11852],simde_mm_xor_si128(c2[9207],simde_mm_xor_si128(c2[10087],simde_mm_xor_si128(c2[4809],simde_mm_xor_si128(c2[3044],simde_mm_xor_si128(c2[3920],simde_mm_xor_si128(c2[2175],simde_mm_xor_si128(c2[2216],simde_mm_xor_si128(c2[13650],simde_mm_xor_si128(c2[451],simde_mm_xor_si128(c2[11010],simde_mm_xor_si128(c2[9245],simde_mm_xor_si128(c2[9241],simde_mm_xor_si128(c2[7496],simde_mm_xor_si128(c2[13692],simde_mm_xor_si128(c2[11047],simde_mm_xor_si128(c2[11927],simde_mm_xor_si128(c2[6641],simde_mm_xor_si128(c2[4896],simde_mm_xor_si128(c2[7538],simde_mm_xor_si128(c2[5773],simde_mm_xor_si128(c2[2258],simde_mm_xor_si128(c2[531],simde_mm_xor_si128(c2[11965],simde_mm_xor_si128(c2[12845],simde_mm_xor_si128(c2[2281],simde_mm_xor_si128(c2[536],simde_mm_xor_si128(c2[13732],simde_mm_xor_si128(c2[11967],simde_mm_xor_si128(c2[6727],simde_mm_xor_si128(c2[4962],simde_mm_xor_si128(c2[5840],simde_mm_xor_si128(c2[4095],simde_mm_xor_si128(c2[566],simde_mm_xor_si128(c2[12880],simde_mm_xor_si128(c2[5001],simde_mm_xor_si128(c2[2376],simde_mm_xor_si128(c2[3256],simde_mm_xor_si128(c2[5019],simde_mm_xor_si128(c2[3254],simde_mm_xor_si128(c2[7641],simde_mm_xor_si128(c2[5896],simde_mm_xor_si128(c2[8569],simde_mm_xor_si128(c2[5924],simde_mm_xor_si128(c2[6804],simde_mm_xor_si128(c2[3287],simde_mm_xor_si128(c2[1522],simde_mm_xor_si128(c2[6805],simde_mm_xor_si128(c2[5040],simde_mm_xor_si128(c2[1538],simde_mm_xor_si128(c2[13007],simde_mm_xor_si128(c2[11242],simde_mm_xor_si128(c2[3334],simde_mm_xor_si128(c2[1569],simde_mm_xor_si128(c2[6844],simde_mm_xor_si128(c2[5099],simde_mm_xor_si128(c2[11289],simde_mm_xor_si128(c2[8644],simde_mm_xor_si128(c2[9524],simde_mm_xor_si128(c2[1618],simde_mm_xor_si128(c2[13932],simde_mm_xor_si128(c2[7767],simde_mm_xor_si128(c2[6002],simde_mm_xor_si128(c2[10443],simde_mm_xor_si128(c2[7818],simde_mm_xor_si128(c2[8698],simde_mm_xor_si128(c2[13978],simde_mm_xor_si128(c2[12213],simde_mm_xor_si128(c2[1657],simde_mm_xor_si128(c2[13971],simde_mm_xor_si128(c2[7854],simde_mm_xor_si128(c2[5209],simde_mm_xor_si128(c2[6089],simde_mm_xor_si128(c2[14019],simde_mm_xor_si128(c2[12254],simde_mm_xor_si128(c2[9602],simde_mm_xor_si128(c2[7857],simde_mm_xor_si128(c2[10524],simde_mm_xor_si128(c2[7899],simde_mm_xor_si128(c2[8779],simde_mm_xor_si128(c2[7881],simde_mm_xor_si128(c2[6136],simde_mm_xor_si128(c2[14046],simde_mm_xor_si128(c2[12281],c2[1731])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 12
+     d2[240]=simde_mm_xor_si128(c2[4414],simde_mm_xor_si128(c2[9734],simde_mm_xor_si128(c2[8330],simde_mm_xor_si128(c2[443],simde_mm_xor_si128(c2[8446],c2[4243])))));
+
+//row: 13
+     d2[260]=simde_mm_xor_si128(c2[12326],simde_mm_xor_si128(c2[13206],simde_mm_xor_si128(c2[4401],simde_mm_xor_si128(c2[8806],simde_mm_xor_si128(c2[1776],simde_mm_xor_si128(c2[6162],simde_mm_xor_si128(c2[3563],simde_mm_xor_si128(c2[4443],simde_mm_xor_si128(c2[11482],simde_mm_xor_si128(c2[5335],simde_mm_xor_si128(c2[10648],simde_mm_xor_si128(c2[11528],simde_mm_xor_si128(c2[10640],simde_mm_xor_si128(c2[5367],simde_mm_xor_si128(c2[5405],simde_mm_xor_si128(c2[6285],simde_mm_xor_si128(c2[1004],simde_mm_xor_si128(c2[3644],simde_mm_xor_si128(c2[12458],simde_mm_xor_si128(c2[9858],simde_mm_xor_si128(c2[12499],simde_mm_xor_si128(c2[9854],simde_mm_xor_si128(c2[4617],simde_mm_xor_si128(c2[5497],simde_mm_xor_si128(c2[6372],simde_mm_xor_si128(c2[10764],simde_mm_xor_si128(c2[10804],simde_mm_xor_si128(c2[11684],simde_mm_xor_si128(c2[8176],simde_mm_xor_si128(c2[2886],simde_mm_xor_si128(c2[9970],simde_mm_xor_si128(c2[295],simde_mm_xor_si128(c2[4693],simde_mm_xor_si128(c2[13498],simde_mm_xor_si128(c2[9122],simde_mm_xor_si128(c2[12654],simde_mm_xor_si128(c2[4727],simde_mm_xor_si128(c2[3883],simde_mm_xor_si128(c2[4763],simde_mm_xor_si128(c2[5647],simde_mm_xor_si128(c2[11806],simde_mm_xor_si128(c2[6572],simde_mm_xor_si128(c2[7452],simde_mm_xor_si128(c2[409],simde_mm_xor_si128(c2[13619],simde_mm_xor_si128(c2[11015],simde_mm_xor_si128(c2[11895],simde_mm_xor_si128(c2[6610],simde_mm_xor_si128(c2[4841],simde_mm_xor_si128(c2[8412],simde_mm_xor_si128(c2[9292],simde_mm_xor_si128(c2[2241],simde_mm_xor_si128(c2[3138],simde_mm_xor_si128(c2[9330],simde_mm_xor_si128(c2[10210],simde_mm_xor_si128(c2[11960],simde_mm_xor_si128(c2[9332],simde_mm_xor_si128(c2[2327],simde_mm_xor_si128(c2[1440],simde_mm_xor_si128(c2[10245],simde_mm_xor_si128(c2[13800],simde_mm_xor_si128(c2[601],simde_mm_xor_si128(c2[619],simde_mm_xor_si128(c2[3241],simde_mm_xor_si128(c2[3289],simde_mm_xor_si128(c2[4169],simde_mm_xor_si128(c2[12966],simde_mm_xor_si128(c2[2405],simde_mm_xor_si128(c2[8607],simde_mm_xor_si128(c2[13013],simde_mm_xor_si128(c2[2444],simde_mm_xor_si128(c2[6009],simde_mm_xor_si128(c2[6889],simde_mm_xor_si128(c2[11297],simde_mm_xor_si128(c2[3367],simde_mm_xor_si128(c2[5163],simde_mm_xor_si128(c2[6043],simde_mm_xor_si128(c2[9578],simde_mm_xor_si128(c2[11336],simde_mm_xor_si128(c2[2574],simde_mm_xor_si128(c2[3454],simde_mm_xor_si128(c2[9619],simde_mm_xor_si128(c2[5202],simde_mm_xor_si128(c2[810],simde_mm_xor_si128(c2[5244],simde_mm_xor_si128(c2[6124],simde_mm_xor_si128(c2[3481],c2[9646])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 14
+     d2[280]=simde_mm_xor_si128(c2[12324],simde_mm_xor_si128(c2[10178],simde_mm_xor_si128(c2[5883],simde_mm_xor_si128(c2[12083],simde_mm_xor_si128(c2[3331],c2[2616])))));
+
+//row: 15
+     d2[300]=simde_mm_xor_si128(c2[11447],simde_mm_xor_si128(c2[2642],simde_mm_xor_si128(c2[7047],simde_mm_xor_si128(c2[13216],simde_mm_xor_si128(c2[17],simde_mm_xor_si128(c2[4414],simde_mm_xor_si128(c2[2684],simde_mm_xor_si128(c2[9723],simde_mm_xor_si128(c2[2696],simde_mm_xor_si128(c2[3576],simde_mm_xor_si128(c2[1818],simde_mm_xor_si128(c2[9769],simde_mm_xor_si128(c2[8881],simde_mm_xor_si128(c2[3608],simde_mm_xor_si128(c2[4526],simde_mm_xor_si128(c2[13324],simde_mm_xor_si128(c2[1005],simde_mm_xor_si128(c2[1885],simde_mm_xor_si128(c2[8099],simde_mm_xor_si128(c2[10720],simde_mm_xor_si128(c2[7215],simde_mm_xor_si128(c2[8095],simde_mm_xor_si128(c2[3738],simde_mm_xor_si128(c2[4613],simde_mm_xor_si128(c2[9005],simde_mm_xor_si128(c2[9925],simde_mm_xor_si128(c2[6417],simde_mm_xor_si128(c2[247],simde_mm_xor_si128(c2[1127],simde_mm_xor_si128(c2[8211],simde_mm_xor_si128(c2[12615],simde_mm_xor_si128(c2[2054],simde_mm_xor_si128(c2[2934],simde_mm_xor_si128(c2[7363],simde_mm_xor_si128(c2[10895],simde_mm_xor_si128(c2[2088],simde_mm_xor_si128(c2[2968],simde_mm_xor_si128(c2[3004],simde_mm_xor_si128(c2[3888],simde_mm_xor_si128(c2[10047],simde_mm_xor_si128(c2[5693],simde_mm_xor_si128(c2[12729],simde_mm_xor_si128(c2[10960],simde_mm_xor_si128(c2[11840],simde_mm_xor_si128(c2[10963],simde_mm_xor_si128(c2[10136],simde_mm_xor_si128(c2[4851],simde_mm_xor_si128(c2[2202],simde_mm_xor_si128(c2[3082],simde_mm_xor_si128(c2[7533],simde_mm_xor_si128(c2[482],simde_mm_xor_si128(c2[499],simde_mm_xor_si128(c2[1379],simde_mm_xor_si128(c2[8451],simde_mm_xor_si128(c2[10201],simde_mm_xor_si128(c2[6693],simde_mm_xor_si128(c2[7573],simde_mm_xor_si128(c2[2288],simde_mm_xor_si128(c2[568],simde_mm_xor_si128(c2[13760],simde_mm_xor_si128(c2[7606],simde_mm_xor_si128(c2[8486],simde_mm_xor_si128(c2[12921],simde_mm_xor_si128(c2[12939],simde_mm_xor_si128(c2[1482],simde_mm_xor_si128(c2[2410],simde_mm_xor_si128(c2[11207],simde_mm_xor_si128(c2[13845],simde_mm_xor_si128(c2[646],simde_mm_xor_si128(c2[6848],simde_mm_xor_si128(c2[11254],simde_mm_xor_si128(c2[13884],simde_mm_xor_si128(c2[685],simde_mm_xor_si128(c2[5130],simde_mm_xor_si128(c2[9538],simde_mm_xor_si128(c2[728],simde_mm_xor_si128(c2[1608],simde_mm_xor_si128(c2[7771],simde_mm_xor_si128(c2[4284],simde_mm_xor_si128(c2[7819],simde_mm_xor_si128(c2[9577],simde_mm_xor_si128(c2[1695],simde_mm_xor_si128(c2[7840],simde_mm_xor_si128(c2[2563],simde_mm_xor_si128(c2[3443],simde_mm_xor_si128(c2[4365],simde_mm_xor_si128(c2[1722],simde_mm_xor_si128(c2[7007],c2[7887]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 16
+     d2[320]=simde_mm_xor_si128(c2[4403],simde_mm_xor_si128(c2[9697],simde_mm_xor_si128(c2[3],simde_mm_xor_si128(c2[7052],simde_mm_xor_si128(c2[9739],simde_mm_xor_si128(c2[2699],simde_mm_xor_si128(c2[10611],simde_mm_xor_si128(c2[4444],simde_mm_xor_si128(c2[2725],simde_mm_xor_si128(c2[1857],simde_mm_xor_si128(c2[10643],simde_mm_xor_si128(c2[11561],simde_mm_xor_si128(c2[6280],simde_mm_xor_si128(c2[8920],simde_mm_xor_si128(c2[10688],simde_mm_xor_si128(c2[1055],simde_mm_xor_si128(c2[3696],simde_mm_xor_si128(c2[1051],simde_mm_xor_si128(c2[10773],simde_mm_xor_si128(c2[11648],simde_mm_xor_si128(c2[1961],simde_mm_xor_si128(c2[2881],simde_mm_xor_si128(c2[13452],simde_mm_xor_si128(c2[8162],simde_mm_xor_si128(c2[1167],simde_mm_xor_si128(c2[5571],simde_mm_xor_si128(c2[9969],simde_mm_xor_si128(c2[339],simde_mm_xor_si128(c2[3851],simde_mm_xor_si128(c2[10003],simde_mm_xor_si128(c2[10059],simde_mm_xor_si128(c2[10923],simde_mm_xor_si128(c2[3003],simde_mm_xor_si128(c2[12728],simde_mm_xor_si128(c2[5685],simde_mm_xor_si128(c2[4816],simde_mm_xor_si128(c2[3092],simde_mm_xor_si128(c2[11886],simde_mm_xor_si128(c2[10137],simde_mm_xor_si128(c2[11882],simde_mm_xor_si128(c2[489],simde_mm_xor_si128(c2[7537],simde_mm_xor_si128(c2[8414],simde_mm_xor_si128(c2[1407],simde_mm_xor_si128(c2[3177],simde_mm_xor_si128(c2[529],simde_mm_xor_si128(c2[7603],simde_mm_xor_si128(c2[6736],simde_mm_xor_si128(c2[1442],simde_mm_xor_si128(c2[5897],simde_mm_xor_si128(c2[5895],simde_mm_xor_si128(c2[8537],simde_mm_xor_si128(c2[9445],simde_mm_xor_si128(c2[4163],simde_mm_xor_si128(c2[7681],simde_mm_xor_si128(c2[13883],simde_mm_xor_si128(c2[4210],simde_mm_xor_si128(c2[7720],simde_mm_xor_si128(c2[12165],simde_mm_xor_si128(c2[2494],simde_mm_xor_si128(c2[8643],simde_mm_xor_si128(c2[11339],simde_mm_xor_si128(c2[775],simde_mm_xor_si128(c2[2533],simde_mm_xor_si128(c2[8730],simde_mm_xor_si128(c2[816],simde_mm_xor_si128(c2[10498],simde_mm_xor_si128(c2[3447],simde_mm_xor_si128(c2[11400],simde_mm_xor_si128(c2[8777],c2[843]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 17
+     d2[340]=simde_mm_xor_si128(c2[896],simde_mm_xor_si128(c2[3209],simde_mm_xor_si128(c2[648],simde_mm_xor_si128(c2[3323],c2[4374]))));
+
+//row: 18
+     d2[360]=simde_mm_xor_si128(c2[3576],simde_mm_xor_si128(c2[5778],simde_mm_xor_si128(c2[3178],simde_mm_xor_si128(c2[12168],c2[6938]))));
+
+//row: 19
+     d2[380]=simde_mm_xor_si128(c2[4],simde_mm_xor_si128(c2[4451],simde_mm_xor_si128(c2[4686],simde_mm_xor_si128(c2[12656],c2[8322]))));
+
+//row: 20
+     d2[400]=simde_mm_xor_si128(c2[2645],simde_mm_xor_si128(c2[7939],simde_mm_xor_si128(c2[12324],simde_mm_xor_si128(c2[5294],simde_mm_xor_si128(c2[11458],simde_mm_xor_si128(c2[7961],simde_mm_xor_si128(c2[921],simde_mm_xor_si128(c2[8853],simde_mm_xor_si128(c2[967],simde_mm_xor_si128(c2[99],simde_mm_xor_si128(c2[8885],simde_mm_xor_si128(c2[9803],simde_mm_xor_si128(c2[4522],simde_mm_xor_si128(c2[7162],simde_mm_xor_si128(c2[1890],simde_mm_xor_si128(c2[13376],simde_mm_xor_si128(c2[1938],simde_mm_xor_si128(c2[13372],simde_mm_xor_si128(c2[9015],simde_mm_xor_si128(c2[9890],simde_mm_xor_si128(c2[203],simde_mm_xor_si128(c2[1123],simde_mm_xor_si128(c2[11694],simde_mm_xor_si128(c2[6404],simde_mm_xor_si128(c2[13488],simde_mm_xor_si128(c2[3813],simde_mm_xor_si128(c2[8211],simde_mm_xor_si128(c2[12640],simde_mm_xor_si128(c2[2093],simde_mm_xor_si128(c2[8245],simde_mm_xor_si128(c2[8281],simde_mm_xor_si128(c2[9165],simde_mm_xor_si128(c2[1245],simde_mm_xor_si128(c2[7402],simde_mm_xor_si128(c2[10970],simde_mm_xor_si128(c2[3927],simde_mm_xor_si128(c2[3058],simde_mm_xor_si128(c2[1334],simde_mm_xor_si128(c2[10128],simde_mm_xor_si128(c2[8379],simde_mm_xor_si128(c2[2208],simde_mm_xor_si128(c2[12810],simde_mm_xor_si128(c2[5779],simde_mm_xor_si128(c2[6656],simde_mm_xor_si128(c2[13728],simde_mm_xor_si128(c2[1419],simde_mm_xor_si128(c2[12850],simde_mm_xor_si128(c2[5845],simde_mm_xor_si128(c2[4978],simde_mm_xor_si128(c2[13763],simde_mm_xor_si128(c2[4139],simde_mm_xor_si128(c2[4137],simde_mm_xor_si128(c2[6779],simde_mm_xor_si128(c2[7687],simde_mm_xor_si128(c2[2405],simde_mm_xor_si128(c2[5923],simde_mm_xor_si128(c2[12125],simde_mm_xor_si128(c2[2452],simde_mm_xor_si128(c2[5962],simde_mm_xor_si128(c2[10407],simde_mm_xor_si128(c2[736],simde_mm_xor_si128(c2[6885],simde_mm_xor_si128(c2[9561],simde_mm_xor_si128(c2[13096],simde_mm_xor_si128(c2[775],simde_mm_xor_si128(c2[6972],simde_mm_xor_si128(c2[13137],simde_mm_xor_si128(c2[8720],simde_mm_xor_si128(c2[9642],simde_mm_xor_si128(c2[7019],c2[13164]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 21
+     d2[420]=simde_mm_xor_si128(c2[13244],simde_mm_xor_si128(c2[13410],simde_mm_xor_si128(c2[4168],simde_mm_xor_si128(c2[10497],c2[7006]))));
+
+//row: 22
+     d2[440]=simde_mm_xor_si128(c2[891],simde_mm_xor_si128(c2[4001],simde_mm_xor_si128(c2[6683],c2[11259])));
+
+//row: 23
+     d2[460]=simde_mm_xor_si128(c2[7975],simde_mm_xor_si128(c2[1843],simde_mm_xor_si128(c2[4808],c2[8646])));
+
+//row: 24
+     d2[480]=simde_mm_xor_si128(c2[1762],simde_mm_xor_si128(c2[7056],simde_mm_xor_si128(c2[11441],simde_mm_xor_si128(c2[4411],simde_mm_xor_si128(c2[898],simde_mm_xor_si128(c2[7098],simde_mm_xor_si128(c2[58],simde_mm_xor_si128(c2[7970],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[13295],simde_mm_xor_si128(c2[8002],simde_mm_xor_si128(c2[8920],simde_mm_xor_si128(c2[3659],simde_mm_xor_si128(c2[6299],simde_mm_xor_si128(c2[7177],simde_mm_xor_si128(c2[12493],simde_mm_xor_si128(c2[1055],simde_mm_xor_si128(c2[12489],simde_mm_xor_si128(c2[12486],simde_mm_xor_si128(c2[8132],simde_mm_xor_si128(c2[9007],simde_mm_xor_si128(c2[13419],simde_mm_xor_si128(c2[240],simde_mm_xor_si128(c2[10811],simde_mm_xor_si128(c2[5521],simde_mm_xor_si128(c2[12605],simde_mm_xor_si128(c2[2930],simde_mm_xor_si128(c2[7328],simde_mm_xor_si128(c2[11777],simde_mm_xor_si128(c2[1210],simde_mm_xor_si128(c2[7362],simde_mm_xor_si128(c2[7418],simde_mm_xor_si128(c2[8282],simde_mm_xor_si128(c2[362],simde_mm_xor_si128(c2[10087],simde_mm_xor_si128(c2[3044],simde_mm_xor_si128(c2[2175],simde_mm_xor_si128(c2[451],simde_mm_xor_si128(c2[9245],simde_mm_xor_si128(c2[7496],simde_mm_xor_si128(c2[10131],simde_mm_xor_si128(c2[11927],simde_mm_xor_si128(c2[4896],simde_mm_xor_si128(c2[5773],simde_mm_xor_si128(c2[12845],simde_mm_xor_si128(c2[536],simde_mm_xor_si128(c2[11967],simde_mm_xor_si128(c2[4962],simde_mm_xor_si128(c2[4095],simde_mm_xor_si128(c2[12880],simde_mm_xor_si128(c2[3256],simde_mm_xor_si128(c2[3254],simde_mm_xor_si128(c2[5896],simde_mm_xor_si128(c2[6804],simde_mm_xor_si128(c2[1522],simde_mm_xor_si128(c2[5040],simde_mm_xor_si128(c2[11242],simde_mm_xor_si128(c2[1569],simde_mm_xor_si128(c2[5099],simde_mm_xor_si128(c2[9524],simde_mm_xor_si128(c2[13932],simde_mm_xor_si128(c2[6002],simde_mm_xor_si128(c2[8698],simde_mm_xor_si128(c2[12213],simde_mm_xor_si128(c2[13971],simde_mm_xor_si128(c2[6089],simde_mm_xor_si128(c2[12254],simde_mm_xor_si128(c2[7857],simde_mm_xor_si128(c2[8779],simde_mm_xor_si128(c2[6136],c2[12281]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 25
+     d2[500]=simde_mm_xor_si128(c2[10610],simde_mm_xor_si128(c2[6418],simde_mm_xor_si128(c2[286],c2[12882])));
+
+//row: 26
+     d2[520]=simde_mm_xor_si128(c2[12336],simde_mm_xor_si128(c2[12406],simde_mm_xor_si128(c2[12499],c2[3244])));
+
+//row: 27
+     d2[540]=simde_mm_xor_si128(c2[1813],simde_mm_xor_si128(c2[11681],c2[339]));
+
+//row: 28
+     d2[560]=simde_mm_xor_si128(c2[9680],simde_mm_xor_si128(c2[4578],simde_mm_xor_si128(c2[2523],c2[9654])));
+
+//row: 29
+     d2[580]=simde_mm_xor_si128(c2[6166],simde_mm_xor_si128(c2[11440],simde_mm_xor_si128(c2[1766],simde_mm_xor_si128(c2[7935],simde_mm_xor_si128(c2[8815],simde_mm_xor_si128(c2[11482],simde_mm_xor_si128(c2[4442],simde_mm_xor_si128(c2[11494],simde_mm_xor_si128(c2[12374],simde_mm_xor_si128(c2[9721],simde_mm_xor_si128(c2[4488],simde_mm_xor_si128(c2[3600],simde_mm_xor_si128(c2[12406],simde_mm_xor_si128(c2[13324],simde_mm_xor_si128(c2[8043],simde_mm_xor_si128(c2[9803],simde_mm_xor_si128(c2[10683],simde_mm_xor_si128(c2[2818],simde_mm_xor_si128(c2[5459],simde_mm_xor_si128(c2[1934],simde_mm_xor_si128(c2[2814],simde_mm_xor_si128(c2[12536],simde_mm_xor_si128(c2[13411],simde_mm_xor_si128(c2[3724],simde_mm_xor_si128(c2[4644],simde_mm_xor_si128(c2[1136],simde_mm_xor_si128(c2[9045],simde_mm_xor_si128(c2[9925],simde_mm_xor_si128(c2[2930],simde_mm_xor_si128(c2[7334],simde_mm_xor_si128(c2[10852],simde_mm_xor_si128(c2[11732],simde_mm_xor_si128(c2[2082],simde_mm_xor_si128(c2[5614],simde_mm_xor_si128(c2[10886],simde_mm_xor_si128(c2[11766],simde_mm_xor_si128(c2[11802],simde_mm_xor_si128(c2[12686],simde_mm_xor_si128(c2[4766],simde_mm_xor_si128(c2[412],simde_mm_xor_si128(c2[7448],simde_mm_xor_si128(c2[5699],simde_mm_xor_si128(c2[6579],simde_mm_xor_si128(c2[4855],simde_mm_xor_si128(c2[13649],simde_mm_xor_si128(c2[11000],simde_mm_xor_si128(c2[11880],simde_mm_xor_si128(c2[2252],simde_mm_xor_si128(c2[9280],simde_mm_xor_si128(c2[9297],simde_mm_xor_si128(c2[10177],simde_mm_xor_si128(c2[3170],simde_mm_xor_si128(c2[4920],simde_mm_xor_si128(c2[1412],simde_mm_xor_si128(c2[2292],simde_mm_xor_si128(c2[9366],simde_mm_xor_si128(c2[8499],simde_mm_xor_si128(c2[2325],simde_mm_xor_si128(c2[3205],simde_mm_xor_si128(c2[4099],simde_mm_xor_si128(c2[7640],simde_mm_xor_si128(c2[7658],simde_mm_xor_si128(c2[10280],simde_mm_xor_si128(c2[11208],simde_mm_xor_si128(c2[5926],simde_mm_xor_si128(c2[8564],simde_mm_xor_si128(c2[9444],simde_mm_xor_si128(c2[1567],simde_mm_xor_si128(c2[5973],simde_mm_xor_si128(c2[8603],simde_mm_xor_si128(c2[9483],simde_mm_xor_si128(c2[13928],simde_mm_xor_si128(c2[4257],simde_mm_xor_si128(c2[9526],simde_mm_xor_si128(c2[10406],simde_mm_xor_si128(c2[5127],simde_mm_xor_si128(c2[13082],simde_mm_xor_si128(c2[2538],simde_mm_xor_si128(c2[4296],simde_mm_xor_si128(c2[10493],simde_mm_xor_si128(c2[2579],simde_mm_xor_si128(c2[11361],simde_mm_xor_si128(c2[12241],simde_mm_xor_si128(c2[13163],simde_mm_xor_si128(c2[10520],simde_mm_xor_si128(c2[1726],c2[2606]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 30
+     d2[600]=simde_mm_xor_si128(c2[893],simde_mm_xor_si128(c2[6167],simde_mm_xor_si128(c2[9692],simde_mm_xor_si128(c2[10572],simde_mm_xor_si128(c2[2642],simde_mm_xor_si128(c2[3522],simde_mm_xor_si128(c2[9685],simde_mm_xor_si128(c2[6209],simde_mm_xor_si128(c2[12368],simde_mm_xor_si128(c2[13248],simde_mm_xor_si128(c2[6201],simde_mm_xor_si128(c2[7081],simde_mm_xor_si128(c2[13294],simde_mm_xor_si128(c2[12406],simde_mm_xor_si128(c2[6253],simde_mm_xor_si128(c2[7133],simde_mm_xor_si128(c2[8051],simde_mm_xor_si128(c2[2770],simde_mm_xor_si128(c2[4530],simde_mm_xor_si128(c2[5410],simde_mm_xor_si128(c2[11604],simde_mm_xor_si128(c2[13365],simde_mm_xor_si128(c2[166],simde_mm_xor_si128(c2[10720],simde_mm_xor_si128(c2[11600],simde_mm_xor_si128(c2[7243],simde_mm_xor_si128(c2[8138],simde_mm_xor_si128(c2[11650],simde_mm_xor_si128(c2[12530],simde_mm_xor_si128(c2[13450],simde_mm_xor_si128(c2[9042],simde_mm_xor_si128(c2[9922],simde_mm_xor_si128(c2[3772],simde_mm_xor_si128(c2[4652],simde_mm_xor_si128(c2[11736],simde_mm_xor_si128(c2[1161],simde_mm_xor_si128(c2[2041],simde_mm_xor_si128(c2[5579],simde_mm_xor_si128(c2[6459],simde_mm_xor_si128(c2[10888],simde_mm_xor_si128(c2[13520],simde_mm_xor_si128(c2[321],simde_mm_xor_si128(c2[5613],simde_mm_xor_si128(c2[6493],simde_mm_xor_si128(c2[6529],simde_mm_xor_si128(c2[7413],simde_mm_xor_si128(c2[12692],simde_mm_xor_si128(c2[13572],simde_mm_xor_si128(c2[9218],simde_mm_xor_si128(c2[1295],simde_mm_xor_si128(c2[2175],simde_mm_xor_si128(c2[406],simde_mm_xor_si128(c2[1286],simde_mm_xor_si128(c2[6561],simde_mm_xor_si128(c2[13641],simde_mm_xor_si128(c2[8376],simde_mm_xor_si128(c2[5727],simde_mm_xor_si128(c2[6607],simde_mm_xor_si128(c2[11058],simde_mm_xor_si128(c2[4007],simde_mm_xor_si128(c2[4004],simde_mm_xor_si128(c2[4884],simde_mm_xor_si128(c2[11976],simde_mm_xor_si128(c2[12846],simde_mm_xor_si128(c2[13726],simde_mm_xor_si128(c2[10218],simde_mm_xor_si128(c2[11098],simde_mm_xor_si128(c2[8446],simde_mm_xor_si128(c2[4093],simde_mm_xor_si128(c2[2326],simde_mm_xor_si128(c2[3206],simde_mm_xor_si128(c2[11131],simde_mm_xor_si128(c2[12011],simde_mm_xor_si128(c2[2367],simde_mm_xor_si128(c2[2365],simde_mm_xor_si128(c2[4127],simde_mm_xor_si128(c2[5007],simde_mm_xor_si128(c2[5935],simde_mm_xor_si128(c2[653],simde_mm_xor_si128(c2[3291],simde_mm_xor_si128(c2[4171],simde_mm_xor_si128(c2[10373],simde_mm_xor_si128(c2[13899],simde_mm_xor_si128(c2[680],simde_mm_xor_si128(c2[3330],simde_mm_xor_si128(c2[4210],simde_mm_xor_si128(c2[8655],simde_mm_xor_si128(c2[12163],simde_mm_xor_si128(c2[13043],simde_mm_xor_si128(c2[4253],simde_mm_xor_si128(c2[5133],simde_mm_xor_si128(c2[7809],simde_mm_xor_si128(c2[11324],simde_mm_xor_si128(c2[12202],simde_mm_xor_si128(c2[13082],simde_mm_xor_si128(c2[5200],simde_mm_xor_si128(c2[10485],simde_mm_xor_si128(c2[11365],simde_mm_xor_si128(c2[6088],simde_mm_xor_si128(c2[6968],simde_mm_xor_si128(c2[7890],simde_mm_xor_si128(c2[5247],simde_mm_xor_si128(c2[10532],c2[11412])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 31
+     d2[620]=simde_mm_xor_si128(c2[9688],simde_mm_xor_si128(c2[10574],simde_mm_xor_si128(c2[883],simde_mm_xor_si128(c2[1769],simde_mm_xor_si128(c2[5288],simde_mm_xor_si128(c2[6174],simde_mm_xor_si128(c2[12337],simde_mm_xor_si128(c2[12323],simde_mm_xor_si128(c2[13203],simde_mm_xor_si128(c2[925],simde_mm_xor_si128(c2[1811],simde_mm_xor_si128(c2[7964],simde_mm_xor_si128(c2[8850],simde_mm_xor_si128(c2[1817],simde_mm_xor_si128(c2[1803],simde_mm_xor_si128(c2[2683],simde_mm_xor_si128(c2[12373],simde_mm_xor_si128(c2[8010],simde_mm_xor_si128(c2[8896],simde_mm_xor_si128(c2[7122],simde_mm_xor_si128(c2[8008],simde_mm_xor_si128(c2[1849],simde_mm_xor_si128(c2[2735],simde_mm_xor_si128(c2[2767],simde_mm_xor_si128(c2[3653],simde_mm_xor_si128(c2[11565],simde_mm_xor_si128(c2[12451],simde_mm_xor_si128(c2[126],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[1012],simde_mm_xor_si128(c2[6320],simde_mm_xor_si128(c2[7206],simde_mm_xor_si128(c2[8961],simde_mm_xor_si128(c2[9847],simde_mm_xor_si128(c2[6336],simde_mm_xor_si128(c2[6322],simde_mm_xor_si128(c2[7202],simde_mm_xor_si128(c2[1979],simde_mm_xor_si128(c2[2845],simde_mm_xor_si128(c2[2854],simde_mm_xor_si128(c2[3720],simde_mm_xor_si128(c2[7246],simde_mm_xor_si128(c2[8132],simde_mm_xor_si128(c2[8166],simde_mm_xor_si128(c2[9052],simde_mm_xor_si128(c2[4658],simde_mm_xor_si128(c2[5524],simde_mm_xor_si128(c2[13447],simde_mm_xor_si128(c2[13453],simde_mm_xor_si128(c2[254],simde_mm_xor_si128(c2[6452],simde_mm_xor_si128(c2[7338],simde_mm_xor_si128(c2[10856],simde_mm_xor_si128(c2[11722],simde_mm_xor_si128(c2[1175],simde_mm_xor_si128(c2[1161],simde_mm_xor_si128(c2[2041],simde_mm_xor_si128(c2[3819],simde_mm_xor_si128(c2[5604],simde_mm_xor_si128(c2[6490],simde_mm_xor_si128(c2[9136],simde_mm_xor_si128(c2[10002],simde_mm_xor_si128(c2[1209],simde_mm_xor_si128(c2[1215],simde_mm_xor_si128(c2[2095],simde_mm_xor_si128(c2[1245],simde_mm_xor_si128(c2[2131],simde_mm_xor_si128(c2[2129],simde_mm_xor_si128(c2[3015],simde_mm_xor_si128(c2[8288],simde_mm_xor_si128(c2[9174],simde_mm_xor_si128(c2[3934],simde_mm_xor_si128(c2[4800],simde_mm_xor_si128(c2[10970],simde_mm_xor_si128(c2[11856],simde_mm_xor_si128(c2[10081],simde_mm_xor_si128(c2[10087],simde_mm_xor_si128(c2[10967],simde_mm_xor_si128(c2[8377],simde_mm_xor_si128(c2[9243],simde_mm_xor_si128(c2[3092],simde_mm_xor_si128(c2[3978],simde_mm_xor_si128(c2[1323],simde_mm_xor_si128(c2[1329],simde_mm_xor_si128(c2[2209],simde_mm_xor_si128(c2[5774],simde_mm_xor_si128(c2[6640],simde_mm_xor_si128(c2[12802],simde_mm_xor_si128(c2[13688],simde_mm_xor_si128(c2[13699],simde_mm_xor_si128(c2[13685],simde_mm_xor_si128(c2[486],simde_mm_xor_si128(c2[6692],simde_mm_xor_si128(c2[7578],simde_mm_xor_si128(c2[8442],simde_mm_xor_si128(c2[9328],simde_mm_xor_si128(c2[5814],simde_mm_xor_si128(c2[5800],simde_mm_xor_si128(c2[6680],simde_mm_xor_si128(c2[12888],simde_mm_xor_si128(c2[13774],simde_mm_xor_si128(c2[12001],simde_mm_xor_si128(c2[12887],simde_mm_xor_si128(c2[6727],simde_mm_xor_si128(c2[6733],simde_mm_xor_si128(c2[7613],simde_mm_xor_si128(c2[11162],simde_mm_xor_si128(c2[12048],simde_mm_xor_si128(c2[11160],simde_mm_xor_si128(c2[12046],simde_mm_xor_si128(c2[13802],simde_mm_xor_si128(c2[609],simde_mm_xor_si128(c2[651],simde_mm_xor_si128(c2[1537],simde_mm_xor_si128(c2[9448],simde_mm_xor_si128(c2[10334],simde_mm_xor_si128(c2[12966],simde_mm_xor_si128(c2[12972],simde_mm_xor_si128(c2[13852],simde_mm_xor_si128(c2[5089],simde_mm_xor_si128(c2[5975],simde_mm_xor_si128(c2[9495],simde_mm_xor_si128(c2[10361],simde_mm_xor_si128(c2[13005],simde_mm_xor_si128(c2[13011],simde_mm_xor_si128(c2[13891],simde_mm_xor_si128(c2[3371],simde_mm_xor_si128(c2[4257],simde_mm_xor_si128(c2[7779],simde_mm_xor_si128(c2[8645],simde_mm_xor_si128(c2[13928],simde_mm_xor_si128(c2[13934],simde_mm_xor_si128(c2[735],simde_mm_xor_si128(c2[2525],simde_mm_xor_si128(c2[3411],simde_mm_xor_si128(c2[6040],simde_mm_xor_si128(c2[6926],simde_mm_xor_si128(c2[7818],simde_mm_xor_si128(c2[8684],simde_mm_xor_si128(c2[14015],simde_mm_xor_si128(c2[802],simde_mm_xor_si128(c2[6081],simde_mm_xor_si128(c2[6967],simde_mm_xor_si128(c2[1684],simde_mm_xor_si128(c2[1690],simde_mm_xor_si128(c2[2570],simde_mm_xor_si128(c2[2606],simde_mm_xor_si128(c2[3492],simde_mm_xor_si128(c2[14042],simde_mm_xor_si128(c2[849],simde_mm_xor_si128(c2[6128],simde_mm_xor_si128(c2[6134],c2[7014]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 32
+     d2[640]=simde_mm_xor_si128(c2[2658],simde_mm_xor_si128(c2[7932],simde_mm_xor_si128(c2[11457],simde_mm_xor_si128(c2[12337],simde_mm_xor_si128(c2[4407],simde_mm_xor_si128(c2[5287],simde_mm_xor_si128(c2[1773],simde_mm_xor_si128(c2[7974],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[934],simde_mm_xor_si128(c2[7966],simde_mm_xor_si128(c2[8846],simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[92],simde_mm_xor_si128(c2[8018],simde_mm_xor_si128(c2[8898],simde_mm_xor_si128(c2[9816],simde_mm_xor_si128(c2[4535],simde_mm_xor_si128(c2[6295],simde_mm_xor_si128(c2[7175],simde_mm_xor_si128(c2[13369],simde_mm_xor_si128(c2[1051],simde_mm_xor_si128(c2[1931],simde_mm_xor_si128(c2[12485],simde_mm_xor_si128(c2[13365],simde_mm_xor_si128(c2[9008],simde_mm_xor_si128(c2[9883],simde_mm_xor_si128(c2[13415],simde_mm_xor_si128(c2[216],simde_mm_xor_si128(c2[1136],simde_mm_xor_si128(c2[10807],simde_mm_xor_si128(c2[11687],simde_mm_xor_si128(c2[5537],simde_mm_xor_si128(c2[6417],simde_mm_xor_si128(c2[13481],simde_mm_xor_si128(c2[2926],simde_mm_xor_si128(c2[3806],simde_mm_xor_si128(c2[7324],simde_mm_xor_si128(c2[8204],simde_mm_xor_si128(c2[12653],simde_mm_xor_si128(c2[1206],simde_mm_xor_si128(c2[2086],simde_mm_xor_si128(c2[7378],simde_mm_xor_si128(c2[8258],simde_mm_xor_si128(c2[8294],simde_mm_xor_si128(c2[9178],simde_mm_xor_si128(c2[378],simde_mm_xor_si128(c2[1258],simde_mm_xor_si128(c2[10963],simde_mm_xor_si128(c2[3040],simde_mm_xor_si128(c2[3920],simde_mm_xor_si128(c2[2171],simde_mm_xor_si128(c2[3051],simde_mm_xor_si128(c2[1327],simde_mm_xor_si128(c2[10121],simde_mm_xor_si128(c2[7492],simde_mm_xor_si128(c2[8372],simde_mm_xor_si128(c2[12803],simde_mm_xor_si128(c2[5772],simde_mm_xor_si128(c2[5769],simde_mm_xor_si128(c2[6649],simde_mm_xor_si128(c2[5761],simde_mm_xor_si128(c2[13721],simde_mm_xor_si128(c2[532],simde_mm_xor_si128(c2[1412],simde_mm_xor_si128(c2[11963],simde_mm_xor_si128(c2[12843],simde_mm_xor_si128(c2[5858],simde_mm_xor_si128(c2[4091],simde_mm_xor_si128(c2[4971],simde_mm_xor_si128(c2[12896],simde_mm_xor_si128(c2[13776],simde_mm_xor_si128(c2[13776],simde_mm_xor_si128(c2[4132],simde_mm_xor_si128(c2[4130],simde_mm_xor_si128(c2[5892],simde_mm_xor_si128(c2[6772],simde_mm_xor_si128(c2[7680],simde_mm_xor_si128(c2[2418],simde_mm_xor_si128(c2[5056],simde_mm_xor_si128(c2[5936],simde_mm_xor_si128(c2[12138],simde_mm_xor_si128(c2[1565],simde_mm_xor_si128(c2[2445],simde_mm_xor_si128(c2[5095],simde_mm_xor_si128(c2[5975],simde_mm_xor_si128(c2[10400],simde_mm_xor_si128(c2[13928],simde_mm_xor_si128(c2[729],simde_mm_xor_si128(c2[6018],simde_mm_xor_si128(c2[6898],simde_mm_xor_si128(c2[9574],simde_mm_xor_si128(c2[13089],simde_mm_xor_si128(c2[13967],simde_mm_xor_si128(c2[768],simde_mm_xor_si128(c2[6965],simde_mm_xor_si128(c2[12250],simde_mm_xor_si128(c2[13130],simde_mm_xor_si128(c2[7853],simde_mm_xor_si128(c2[8733],simde_mm_xor_si128(c2[9655],simde_mm_xor_si128(c2[7012],simde_mm_xor_si128(c2[12297],c2[13177])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 33
+     d2[660]=simde_mm_xor_si128(c2[8850],simde_mm_xor_si128(c2[3601],simde_mm_xor_si128(c2[11008],c2[1722])));
+
+//row: 34
+     d2[680]=simde_mm_xor_si128(c2[9691],simde_mm_xor_si128(c2[7338],simde_mm_xor_si128(c2[5000],c2[11242])));
+
+//row: 35
+     d2[700]=simde_mm_xor_si128(c2[5282],simde_mm_xor_si128(c2[10576],simde_mm_xor_si128(c2[882],simde_mm_xor_si128(c2[7931],simde_mm_xor_si128(c2[10618],simde_mm_xor_si128(c2[3578],simde_mm_xor_si128(c2[11490],simde_mm_xor_si128(c2[13252],simde_mm_xor_si128(c2[3604],simde_mm_xor_si128(c2[2736],simde_mm_xor_si128(c2[11522],simde_mm_xor_si128(c2[12440],simde_mm_xor_si128(c2[7179],simde_mm_xor_si128(c2[9819],simde_mm_xor_si128(c2[1934],simde_mm_xor_si128(c2[4575],simde_mm_xor_si128(c2[1930],simde_mm_xor_si128(c2[11652],simde_mm_xor_si128(c2[12527],simde_mm_xor_si128(c2[2840],simde_mm_xor_si128(c2[3760],simde_mm_xor_si128(c2[252],simde_mm_xor_si128(c2[9041],simde_mm_xor_si128(c2[12569],simde_mm_xor_si128(c2[2046],simde_mm_xor_si128(c2[6450],simde_mm_xor_si128(c2[10848],simde_mm_xor_si128(c2[1218],simde_mm_xor_si128(c2[4730],simde_mm_xor_si128(c2[10882],simde_mm_xor_si128(c2[10938],simde_mm_xor_si128(c2[11802],simde_mm_xor_si128(c2[3882],simde_mm_xor_si128(c2[13607],simde_mm_xor_si128(c2[6564],simde_mm_xor_si128(c2[5695],simde_mm_xor_si128(c2[3971],simde_mm_xor_si128(c2[12765],simde_mm_xor_si128(c2[11016],simde_mm_xor_si128(c2[1368],simde_mm_xor_si128(c2[8416],simde_mm_xor_si128(c2[9293],simde_mm_xor_si128(c2[6643],simde_mm_xor_si128(c2[2286],simde_mm_xor_si128(c2[4056],simde_mm_xor_si128(c2[1408],simde_mm_xor_si128(c2[8482],simde_mm_xor_si128(c2[7615],simde_mm_xor_si128(c2[2321],simde_mm_xor_si128(c2[6776],simde_mm_xor_si128(c2[6774],simde_mm_xor_si128(c2[9416],simde_mm_xor_si128(c2[10324],simde_mm_xor_si128(c2[5042],simde_mm_xor_si128(c2[8560],simde_mm_xor_si128(c2[683],simde_mm_xor_si128(c2[5089],simde_mm_xor_si128(c2[8619],simde_mm_xor_si128(c2[13044],simde_mm_xor_si128(c2[3373],simde_mm_xor_si128(c2[9522],simde_mm_xor_si128(c2[12218],simde_mm_xor_si128(c2[1654],simde_mm_xor_si128(c2[3412],simde_mm_xor_si128(c2[9609],simde_mm_xor_si128(c2[1695],simde_mm_xor_si128(c2[11377],simde_mm_xor_si128(c2[12299],simde_mm_xor_si128(c2[9656],c2[1722])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 36
+     d2[720]=simde_mm_xor_si128(c2[2656],simde_mm_xor_si128(c2[3211],simde_mm_xor_si128(c2[2371],c2[730])));
+
+//row: 37
+     d2[740]=simde_mm_xor_si128(c2[10571],simde_mm_xor_si128(c2[11451],simde_mm_xor_si128(c2[2646],simde_mm_xor_si128(c2[7051],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[1808],simde_mm_xor_si128(c2[2688],simde_mm_xor_si128(c2[9727],simde_mm_xor_si128(c2[3560],simde_mm_xor_si128(c2[8858],simde_mm_xor_si128(c2[8893],simde_mm_xor_si128(c2[9773],simde_mm_xor_si128(c2[8885],simde_mm_xor_si128(c2[3612],simde_mm_xor_si128(c2[3650],simde_mm_xor_si128(c2[4530],simde_mm_xor_si128(c2[13328],simde_mm_xor_si128(c2[1889],simde_mm_xor_si128(c2[8083],simde_mm_xor_si128(c2[10724],simde_mm_xor_si128(c2[8099],simde_mm_xor_si128(c2[2842],simde_mm_xor_si128(c2[3722],simde_mm_xor_si128(c2[4617],simde_mm_xor_si128(c2[9009],simde_mm_xor_si128(c2[9049],simde_mm_xor_si128(c2[9929],simde_mm_xor_si128(c2[6401],simde_mm_xor_si128(c2[1131],simde_mm_xor_si128(c2[8215],simde_mm_xor_si128(c2[12619],simde_mm_xor_si128(c2[2938],simde_mm_xor_si128(c2[7367],simde_mm_xor_si128(c2[10899],simde_mm_xor_si128(c2[2972],simde_mm_xor_si128(c2[2128],simde_mm_xor_si128(c2[3008],simde_mm_xor_si128(c2[3892],simde_mm_xor_si128(c2[10051],simde_mm_xor_si128(c2[4817],simde_mm_xor_si128(c2[5697],simde_mm_xor_si128(c2[12733],simde_mm_xor_si128(c2[11844],simde_mm_xor_si128(c2[9240],simde_mm_xor_si128(c2[10120],simde_mm_xor_si128(c2[4855],simde_mm_xor_si128(c2[3086],simde_mm_xor_si128(c2[6657],simde_mm_xor_si128(c2[7537],simde_mm_xor_si128(c2[486],simde_mm_xor_si128(c2[1363],simde_mm_xor_si128(c2[7575],simde_mm_xor_si128(c2[8455],simde_mm_xor_si128(c2[10205],simde_mm_xor_si128(c2[7577],simde_mm_xor_si128(c2[13720],simde_mm_xor_si128(c2[572],simde_mm_xor_si128(c2[13764],simde_mm_xor_si128(c2[8490],simde_mm_xor_si128(c2[12045],simde_mm_xor_si128(c2[12925],simde_mm_xor_si128(c2[12923],simde_mm_xor_si128(c2[1486],simde_mm_xor_si128(c2[1534],simde_mm_xor_si128(c2[2414],simde_mm_xor_si128(c2[11211],simde_mm_xor_si128(c2[650],simde_mm_xor_si128(c2[6852],simde_mm_xor_si128(c2[11258],simde_mm_xor_si128(c2[689],simde_mm_xor_si128(c2[4254],simde_mm_xor_si128(c2[5134],simde_mm_xor_si128(c2[9522],simde_mm_xor_si128(c2[1612],simde_mm_xor_si128(c2[3408],simde_mm_xor_si128(c2[4288],simde_mm_xor_si128(c2[7803],simde_mm_xor_si128(c2[9561],simde_mm_xor_si128(c2[819],simde_mm_xor_si128(c2[1699],simde_mm_xor_si128(c2[7844],simde_mm_xor_si128(c2[3447],simde_mm_xor_si128(c2[3489],simde_mm_xor_si128(c2[4369],simde_mm_xor_si128(c2[1726],c2[7891])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 38
+     d2[760]=simde_mm_xor_si128(c2[6169],simde_mm_xor_si128(c2[3011],simde_mm_xor_si128(c2[404],c2[4891])));
+
+//row: 39
+     d2[780]=simde_mm_xor_si128(c2[5326],simde_mm_xor_si128(c2[11564],simde_mm_xor_si128(c2[292],c2[773])));
+
+//row: 40
+     d2[800]=simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[325],c2[5092]));
+
+//row: 41
+     d2[820]=simde_mm_xor_si128(c2[8849],simde_mm_xor_si128(c2[13322],simde_mm_xor_si128(c2[10927],c2[13932])));
+
+//row: 42
+     d2[840]=simde_mm_xor_si128(c2[7052],simde_mm_xor_si128(c2[12326],simde_mm_xor_si128(c2[1772],simde_mm_xor_si128(c2[2652],simde_mm_xor_si128(c2[8801],simde_mm_xor_si128(c2[9681],simde_mm_xor_si128(c2[1774],simde_mm_xor_si128(c2[12368],simde_mm_xor_si128(c2[4448],simde_mm_xor_si128(c2[5328],simde_mm_xor_si128(c2[12360],simde_mm_xor_si128(c2[13240],simde_mm_xor_si128(c2[5374],simde_mm_xor_si128(c2[4486],simde_mm_xor_si128(c2[12412],simde_mm_xor_si128(c2[13292],simde_mm_xor_si128(c2[131],simde_mm_xor_si128(c2[8929],simde_mm_xor_si128(c2[10689],simde_mm_xor_si128(c2[11569],simde_mm_xor_si128(c2[3684],simde_mm_xor_si128(c2[5445],simde_mm_xor_si128(c2[6325],simde_mm_xor_si128(c2[2800],simde_mm_xor_si128(c2[3680],simde_mm_xor_si128(c2[1044],simde_mm_xor_si128(c2[13402],simde_mm_xor_si128(c2[218],simde_mm_xor_si128(c2[3730],simde_mm_xor_si128(c2[4610],simde_mm_xor_si128(c2[5530],simde_mm_xor_si128(c2[1122],simde_mm_xor_si128(c2[2002],simde_mm_xor_si128(c2[9931],simde_mm_xor_si128(c2[10811],simde_mm_xor_si128(c2[3816],simde_mm_xor_si128(c2[7320],simde_mm_xor_si128(c2[8200],simde_mm_xor_si128(c2[11738],simde_mm_xor_si128(c2[12618],simde_mm_xor_si128(c2[2968],simde_mm_xor_si128(c2[5600],simde_mm_xor_si128(c2[6480],simde_mm_xor_si128(c2[11772],simde_mm_xor_si128(c2[12652],simde_mm_xor_si128(c2[12688],simde_mm_xor_si128(c2[13572],simde_mm_xor_si128(c2[4772],simde_mm_xor_si128(c2[5652],simde_mm_xor_si128(c2[1298],simde_mm_xor_si128(c2[7454],simde_mm_xor_si128(c2[8334],simde_mm_xor_si128(c2[6565],simde_mm_xor_si128(c2[7445],simde_mm_xor_si128(c2[5721],simde_mm_xor_si128(c2[456],simde_mm_xor_si128(c2[11886],simde_mm_xor_si128(c2[12766],simde_mm_xor_si128(c2[3138],simde_mm_xor_si128(c2[10166],simde_mm_xor_si128(c2[10163],simde_mm_xor_si128(c2[11043],simde_mm_xor_si128(c2[4056],simde_mm_xor_si128(c2[4926],simde_mm_xor_si128(c2[5806],simde_mm_xor_si128(c2[2298],simde_mm_xor_si128(c2[3178],simde_mm_xor_si128(c2[10252],simde_mm_xor_si128(c2[8485],simde_mm_xor_si128(c2[9365],simde_mm_xor_si128(c2[3211],simde_mm_xor_si128(c2[4091],simde_mm_xor_si128(c2[8526],simde_mm_xor_si128(c2[8524],simde_mm_xor_si128(c2[10286],simde_mm_xor_si128(c2[11166],simde_mm_xor_si128(c2[12094],simde_mm_xor_si128(c2[6812],simde_mm_xor_si128(c2[9450],simde_mm_xor_si128(c2[10330],simde_mm_xor_si128(c2[2453],simde_mm_xor_si128(c2[5979],simde_mm_xor_si128(c2[6859],simde_mm_xor_si128(c2[9489],simde_mm_xor_si128(c2[10369],simde_mm_xor_si128(c2[735],simde_mm_xor_si128(c2[4243],simde_mm_xor_si128(c2[5123],simde_mm_xor_si128(c2[10412],simde_mm_xor_si128(c2[11292],simde_mm_xor_si128(c2[13968],simde_mm_xor_si128(c2[3404],simde_mm_xor_si128(c2[4282],simde_mm_xor_si128(c2[5162],simde_mm_xor_si128(c2[11379],simde_mm_xor_si128(c2[2565],simde_mm_xor_si128(c2[3445],simde_mm_xor_si128(c2[12247],simde_mm_xor_si128(c2[13127],simde_mm_xor_si128(c2[14049],simde_mm_xor_si128(c2[11406],simde_mm_xor_si128(c2[2612],c2[3492]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 43
+     d2[860]=simde_mm_xor_si128(c2[11],simde_mm_xor_si128(c2[5285],simde_mm_xor_si128(c2[9690],simde_mm_xor_si128(c2[1760],simde_mm_xor_si128(c2[2640],simde_mm_xor_si128(c2[5327],simde_mm_xor_si128(c2[12366],simde_mm_xor_si128(c2[5339],simde_mm_xor_si128(c2[6219],simde_mm_xor_si128(c2[3574],simde_mm_xor_si128(c2[12412],simde_mm_xor_si128(c2[11524],simde_mm_xor_si128(c2[6251],simde_mm_xor_si128(c2[7169],simde_mm_xor_si128(c2[1888],simde_mm_xor_si128(c2[3648],simde_mm_xor_si128(c2[4528],simde_mm_xor_si128(c2[10722],simde_mm_xor_si128(c2[13363],simde_mm_xor_si128(c2[9858],simde_mm_xor_si128(c2[10738],simde_mm_xor_si128(c2[6361],simde_mm_xor_si128(c2[7256],simde_mm_xor_si128(c2[11648],simde_mm_xor_si128(c2[12568],simde_mm_xor_si128(c2[9040],simde_mm_xor_si128(c2[2890],simde_mm_xor_si128(c2[3770],simde_mm_xor_si128(c2[10854],simde_mm_xor_si128(c2[1179],simde_mm_xor_si128(c2[4697],simde_mm_xor_si128(c2[5577],simde_mm_xor_si128(c2[10006],simde_mm_xor_si128(c2[13538],simde_mm_xor_si128(c2[4731],simde_mm_xor_si128(c2[5611],simde_mm_xor_si128(c2[5647],simde_mm_xor_si128(c2[6531],simde_mm_xor_si128(c2[12690],simde_mm_xor_si128(c2[8336],simde_mm_xor_si128(c2[1293],simde_mm_xor_si128(c2[13603],simde_mm_xor_si128(c2[404],simde_mm_xor_si128(c2[12779],simde_mm_xor_si128(c2[7494],simde_mm_xor_si128(c2[4845],simde_mm_xor_si128(c2[5725],simde_mm_xor_si128(c2[10176],simde_mm_xor_si128(c2[3125],simde_mm_xor_si128(c2[3122],simde_mm_xor_si128(c2[4002],simde_mm_xor_si128(c2[11094],simde_mm_xor_si128(c2[12844],simde_mm_xor_si128(c2[9336],simde_mm_xor_si128(c2[10216],simde_mm_xor_si128(c2[3211],simde_mm_xor_si128(c2[2324],simde_mm_xor_si128(c2[10249],simde_mm_xor_si128(c2[11129],simde_mm_xor_si128(c2[1485],simde_mm_xor_si128(c2[1483],simde_mm_xor_si128(c2[4125],simde_mm_xor_si128(c2[5053],simde_mm_xor_si128(c2[13850],simde_mm_xor_si128(c2[2409],simde_mm_xor_si128(c2[3289],simde_mm_xor_si128(c2[5044],simde_mm_xor_si128(c2[9491],simde_mm_xor_si128(c2[13897],simde_mm_xor_si128(c2[2448],simde_mm_xor_si128(c2[3328],simde_mm_xor_si128(c2[7773],simde_mm_xor_si128(c2[12161],simde_mm_xor_si128(c2[3371],simde_mm_xor_si128(c2[4251],simde_mm_xor_si128(c2[731],simde_mm_xor_si128(c2[6927],simde_mm_xor_si128(c2[10442],simde_mm_xor_si128(c2[12200],simde_mm_xor_si128(c2[4338],simde_mm_xor_si128(c2[10483],simde_mm_xor_si128(c2[5206],simde_mm_xor_si128(c2[6086],simde_mm_xor_si128(c2[7008],simde_mm_xor_si128(c2[4365],simde_mm_xor_si128(c2[9650],c2[10530]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 44
+     d2[880]=simde_mm_xor_si128(c2[11440],simde_mm_xor_si128(c2[2655],simde_mm_xor_si128(c2[7040],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[8814],simde_mm_xor_si128(c2[2697],simde_mm_xor_si128(c2[9736],simde_mm_xor_si128(c2[3569],simde_mm_xor_si128(c2[9762],simde_mm_xor_si128(c2[8894],simde_mm_xor_si128(c2[3601],simde_mm_xor_si128(c2[4539],simde_mm_xor_si128(c2[13337],simde_mm_xor_si128(c2[1898],simde_mm_xor_si128(c2[8092],simde_mm_xor_si128(c2[10733],simde_mm_xor_si128(c2[8088],simde_mm_xor_si128(c2[3731],simde_mm_xor_si128(c2[4606],simde_mm_xor_si128(c2[9018],simde_mm_xor_si128(c2[9938],simde_mm_xor_si128(c2[6410],simde_mm_xor_si128(c2[1120],simde_mm_xor_si128(c2[8204],simde_mm_xor_si128(c2[12608],simde_mm_xor_si128(c2[2927],simde_mm_xor_si128(c2[2934],simde_mm_xor_si128(c2[7376],simde_mm_xor_si128(c2[10888],simde_mm_xor_si128(c2[2961],simde_mm_xor_si128(c2[3017],simde_mm_xor_si128(c2[3881],simde_mm_xor_si128(c2[10040],simde_mm_xor_si128(c2[3016],simde_mm_xor_si128(c2[5686],simde_mm_xor_si128(c2[12722],simde_mm_xor_si128(c2[11853],simde_mm_xor_si128(c2[10129],simde_mm_xor_si128(c2[4844],simde_mm_xor_si128(c2[3095],simde_mm_xor_si128(c2[7526],simde_mm_xor_si128(c2[495],simde_mm_xor_si128(c2[1372],simde_mm_xor_si128(c2[8444],simde_mm_xor_si128(c2[10214],simde_mm_xor_si128(c2[7566],simde_mm_xor_si128(c2[561],simde_mm_xor_si128(c2[13773],simde_mm_xor_si128(c2[8499],simde_mm_xor_si128(c2[12934],simde_mm_xor_si128(c2[12932],simde_mm_xor_si128(c2[1495],simde_mm_xor_si128(c2[2403],simde_mm_xor_si128(c2[11200],simde_mm_xor_si128(c2[659],simde_mm_xor_si128(c2[6841],simde_mm_xor_si128(c2[11247],simde_mm_xor_si128(c2[698],simde_mm_xor_si128(c2[5123],simde_mm_xor_si128(c2[9531],simde_mm_xor_si128(c2[1601],simde_mm_xor_si128(c2[4297],simde_mm_xor_si128(c2[7812],simde_mm_xor_si128(c2[9570],simde_mm_xor_si128(c2[1688],simde_mm_xor_si128(c2[7853],simde_mm_xor_si128(c2[3456],simde_mm_xor_si128(c2[4378],simde_mm_xor_si128(c2[1735],c2[7880])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 45
+     d2[900]=simde_mm_xor_si128(c2[4446],simde_mm_xor_si128(c2[3774],c2[12727]));
+  }
+}
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc352_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc352_byte.c
index 271070b71972499d7527a91afea584119452672e..e920501d8551ddb1e57f0b1db5548f0ed785755d 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc352_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc352_byte.c
@@ -1,9 +1,10 @@
+#ifdef __AVX2__
 #include "PHY/sse_intrin.h"
 // generated code for Zc=352, byte encoding
 static inline void ldpc352_byte(uint8_t *c,uint8_t *d) {
-  __m256i *csimd=(__m256i *)c,*dsimd=(__m256i *)d;
+  simde__m256i *csimd=(simde__m256i *)c,*dsimd=(simde__m256i *)d;
 
-  __m256i *c2,*d2;
+  simde__m256i *c2,*d2;
 
   int i2;
   for (i2=0; i2<11; i2++) {
@@ -149,3 +150,4 @@ static inline void ldpc352_byte(uint8_t *c,uint8_t *d) {
      d2[495]=simde_mm256_xor_si256(c2[8736],simde_mm256_xor_si256(c2[1586],c2[5551]));
   }
 }
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc352_byte_128.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc352_byte_128.c
new file mode 100644
index 0000000000000000000000000000000000000000..a0add1832cdab88fd9bf7bd09aa1a13eb3498ee1
--- /dev/null
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc352_byte_128.c
@@ -0,0 +1,153 @@
+#ifndef __AVX2__
+#include "PHY/sse_intrin.h"
+// generated code for Zc=352, byte encoding
+static inline void ldpc352_byte(uint8_t *c,uint8_t *d) {
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
+
+  simde__m128i *c2,*d2;
+
+  int i2;
+  for (i2=0; i2<22; i2++) {
+     c2=&csimd[i2];
+     d2=&dsimd[i2];
+
+//row: 0
+     d2[0]=simde_mm_xor_si128(c2[5826],simde_mm_xor_si128(c2[12588],simde_mm_xor_si128(c2[1950],simde_mm_xor_si128(c2[974],simde_mm_xor_si128(c2[5859],simde_mm_xor_si128(c2[2950],simde_mm_xor_si128(c2[13601],simde_mm_xor_si128(c2[6874],simde_mm_xor_si128(c2[1070],simde_mm_xor_si128(c2[4941],simde_mm_xor_si128(c2[9832],simde_mm_xor_si128(c2[138],simde_mm_xor_si128(c2[1103],simde_mm_xor_si128(c2[2133],simde_mm_xor_si128(c2[13746],simde_mm_xor_si128(c2[6969],simde_mm_xor_si128(c2[14752],simde_mm_xor_si128(c2[11852],simde_mm_xor_si128(c2[14746],simde_mm_xor_si128(c2[5114],simde_mm_xor_si128(c2[8992],simde_mm_xor_si128(c2[10920],simde_mm_xor_si128(c2[315],simde_mm_xor_si128(c2[316],simde_mm_xor_si128(c2[6126],simde_mm_xor_si128(c2[13922],simde_mm_xor_si128(c2[12950],simde_mm_xor_si128(c2[11005],simde_mm_xor_si128(c2[3315],simde_mm_xor_si128(c2[2335],simde_mm_xor_si128(c2[6222],simde_mm_xor_si128(c2[10135],simde_mm_xor_si128(c2[14967],simde_mm_xor_si128(c2[10126],simde_mm_xor_si128(c2[1452],simde_mm_xor_si128(c2[7270],simde_mm_xor_si128(c2[6307],simde_mm_xor_si128(c2[3453],simde_mm_xor_si128(c2[13127],simde_mm_xor_si128(c2[9261],simde_mm_xor_si128(c2[9296],simde_mm_xor_si128(c2[14130],simde_mm_xor_si128(c2[13172],simde_mm_xor_si128(c2[14188],simde_mm_xor_si128(c2[14185],simde_mm_xor_si128(c2[9339],simde_mm_xor_si128(c2[5503],simde_mm_xor_si128(c2[2611],simde_mm_xor_si128(c2[13251],simde_mm_xor_si128(c2[11373],simde_mm_xor_si128(c2[1688],simde_mm_xor_si128(c2[9431],simde_mm_xor_si128(c2[5596],simde_mm_xor_si128(c2[3660],simde_mm_xor_si128(c2[7537],simde_mm_xor_si128(c2[811],simde_mm_xor_si128(c2[3706],simde_mm_xor_si128(c2[15320],simde_mm_xor_si128(c2[7622],simde_mm_xor_si128(c2[9548],simde_mm_xor_si128(c2[2785],simde_mm_xor_si128(c2[15402],simde_mm_xor_si128(c2[7656],simde_mm_xor_si128(c2[9599],simde_mm_xor_si128(c2[12551],simde_mm_xor_si128(c2[14494],c2[9643]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 1
+     d2[22]=simde_mm_xor_si128(c2[5826],simde_mm_xor_si128(c2[6794],simde_mm_xor_si128(c2[13556],simde_mm_xor_si128(c2[2918],simde_mm_xor_si128(c2[1942],simde_mm_xor_si128(c2[5859],simde_mm_xor_si128(c2[6827],simde_mm_xor_si128(c2[3918],simde_mm_xor_si128(c2[14569],simde_mm_xor_si128(c2[6874],simde_mm_xor_si128(c2[7842],simde_mm_xor_si128(c2[2038],simde_mm_xor_si128(c2[5909],simde_mm_xor_si128(c2[9832],simde_mm_xor_si128(c2[10800],simde_mm_xor_si128(c2[1106],simde_mm_xor_si128(c2[2071],simde_mm_xor_si128(c2[3101],simde_mm_xor_si128(c2[14714],simde_mm_xor_si128(c2[7937],simde_mm_xor_si128(c2[14752],simde_mm_xor_si128(c2[233],simde_mm_xor_si128(c2[12820],simde_mm_xor_si128(c2[227],simde_mm_xor_si128(c2[5114],simde_mm_xor_si128(c2[6082],simde_mm_xor_si128(c2[9960],simde_mm_xor_si128(c2[11888],simde_mm_xor_si128(c2[1283],simde_mm_xor_si128(c2[1284],simde_mm_xor_si128(c2[7094],simde_mm_xor_si128(c2[14890],simde_mm_xor_si128(c2[13918],simde_mm_xor_si128(c2[11973],simde_mm_xor_si128(c2[3315],simde_mm_xor_si128(c2[4283],simde_mm_xor_si128(c2[3303],simde_mm_xor_si128(c2[7190],simde_mm_xor_si128(c2[10135],simde_mm_xor_si128(c2[11103],simde_mm_xor_si128(c2[448],simde_mm_xor_si128(c2[11094],simde_mm_xor_si128(c2[1452],simde_mm_xor_si128(c2[2420],simde_mm_xor_si128(c2[8238],simde_mm_xor_si128(c2[7275],simde_mm_xor_si128(c2[3453],simde_mm_xor_si128(c2[4421],simde_mm_xor_si128(c2[14095],simde_mm_xor_si128(c2[10229],simde_mm_xor_si128(c2[9296],simde_mm_xor_si128(c2[10264],simde_mm_xor_si128(c2[15098],simde_mm_xor_si128(c2[14140],simde_mm_xor_si128(c2[15156],simde_mm_xor_si128(c2[15153],simde_mm_xor_si128(c2[10307],simde_mm_xor_si128(c2[5503],simde_mm_xor_si128(c2[6471],simde_mm_xor_si128(c2[3579],simde_mm_xor_si128(c2[14219],simde_mm_xor_si128(c2[11373],simde_mm_xor_si128(c2[12341],simde_mm_xor_si128(c2[2656],simde_mm_xor_si128(c2[10399],simde_mm_xor_si128(c2[6564],simde_mm_xor_si128(c2[4628],simde_mm_xor_si128(c2[8505],simde_mm_xor_si128(c2[811],simde_mm_xor_si128(c2[1779],simde_mm_xor_si128(c2[4674],simde_mm_xor_si128(c2[801],simde_mm_xor_si128(c2[7622],simde_mm_xor_si128(c2[8590],simde_mm_xor_si128(c2[10516],simde_mm_xor_si128(c2[3753],simde_mm_xor_si128(c2[15402],simde_mm_xor_si128(c2[883],simde_mm_xor_si128(c2[8624],simde_mm_xor_si128(c2[10567],simde_mm_xor_si128(c2[12551],simde_mm_xor_si128(c2[13519],simde_mm_xor_si128(c2[15462],c2[10611])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 2
+     d2[44]=simde_mm_xor_si128(c2[6794],simde_mm_xor_si128(c2[13556],simde_mm_xor_si128(c2[1950],simde_mm_xor_si128(c2[2918],simde_mm_xor_si128(c2[974],simde_mm_xor_si128(c2[1942],simde_mm_xor_si128(c2[6827],simde_mm_xor_si128(c2[2950],simde_mm_xor_si128(c2[3918],simde_mm_xor_si128(c2[13601],simde_mm_xor_si128(c2[14569],simde_mm_xor_si128(c2[7842],simde_mm_xor_si128(c2[2038],simde_mm_xor_si128(c2[4941],simde_mm_xor_si128(c2[5909],simde_mm_xor_si128(c2[10800],simde_mm_xor_si128(c2[1106],simde_mm_xor_si128(c2[1103],simde_mm_xor_si128(c2[2071],simde_mm_xor_si128(c2[3101],simde_mm_xor_si128(c2[13746],simde_mm_xor_si128(c2[14714],simde_mm_xor_si128(c2[6969],simde_mm_xor_si128(c2[7937],simde_mm_xor_si128(c2[233],simde_mm_xor_si128(c2[12820],simde_mm_xor_si128(c2[14746],simde_mm_xor_si128(c2[227],simde_mm_xor_si128(c2[6082],simde_mm_xor_si128(c2[8992],simde_mm_xor_si128(c2[9960],simde_mm_xor_si128(c2[10920],simde_mm_xor_si128(c2[11888],simde_mm_xor_si128(c2[1283],simde_mm_xor_si128(c2[316],simde_mm_xor_si128(c2[1284],simde_mm_xor_si128(c2[6126],simde_mm_xor_si128(c2[7094],simde_mm_xor_si128(c2[14890],simde_mm_xor_si128(c2[12950],simde_mm_xor_si128(c2[13918],simde_mm_xor_si128(c2[11005],simde_mm_xor_si128(c2[11973],simde_mm_xor_si128(c2[4283],simde_mm_xor_si128(c2[3303],simde_mm_xor_si128(c2[6222],simde_mm_xor_si128(c2[7190],simde_mm_xor_si128(c2[11103],simde_mm_xor_si128(c2[14967],simde_mm_xor_si128(c2[448],simde_mm_xor_si128(c2[10126],simde_mm_xor_si128(c2[11094],simde_mm_xor_si128(c2[2420],simde_mm_xor_si128(c2[8238],simde_mm_xor_si128(c2[6307],simde_mm_xor_si128(c2[7275],simde_mm_xor_si128(c2[4421],simde_mm_xor_si128(c2[14095],simde_mm_xor_si128(c2[9261],simde_mm_xor_si128(c2[10229],simde_mm_xor_si128(c2[10264],simde_mm_xor_si128(c2[14130],simde_mm_xor_si128(c2[15098],simde_mm_xor_si128(c2[13172],simde_mm_xor_si128(c2[14140],simde_mm_xor_si128(c2[15156],simde_mm_xor_si128(c2[14185],simde_mm_xor_si128(c2[15153],simde_mm_xor_si128(c2[9339],simde_mm_xor_si128(c2[10307],simde_mm_xor_si128(c2[6471],simde_mm_xor_si128(c2[3579],simde_mm_xor_si128(c2[13251],simde_mm_xor_si128(c2[14219],simde_mm_xor_si128(c2[12341],simde_mm_xor_si128(c2[2656],simde_mm_xor_si128(c2[9431],simde_mm_xor_si128(c2[10399],simde_mm_xor_si128(c2[6564],simde_mm_xor_si128(c2[3660],simde_mm_xor_si128(c2[4628],simde_mm_xor_si128(c2[7537],simde_mm_xor_si128(c2[8505],simde_mm_xor_si128(c2[1779],simde_mm_xor_si128(c2[3706],simde_mm_xor_si128(c2[4674],simde_mm_xor_si128(c2[15320],simde_mm_xor_si128(c2[801],simde_mm_xor_si128(c2[8590],simde_mm_xor_si128(c2[10516],simde_mm_xor_si128(c2[2785],simde_mm_xor_si128(c2[3753],simde_mm_xor_si128(c2[883],simde_mm_xor_si128(c2[7656],simde_mm_xor_si128(c2[8624],simde_mm_xor_si128(c2[9599],simde_mm_xor_si128(c2[10567],simde_mm_xor_si128(c2[13519],simde_mm_xor_si128(c2[15462],simde_mm_xor_si128(c2[9643],c2[10611]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 3
+     d2[66]=simde_mm_xor_si128(c2[6794],simde_mm_xor_si128(c2[13556],simde_mm_xor_si128(c2[2918],simde_mm_xor_si128(c2[974],simde_mm_xor_si128(c2[1942],simde_mm_xor_si128(c2[6827],simde_mm_xor_si128(c2[3918],simde_mm_xor_si128(c2[13601],simde_mm_xor_si128(c2[14569],simde_mm_xor_si128(c2[7842],simde_mm_xor_si128(c2[2038],simde_mm_xor_si128(c2[5909],simde_mm_xor_si128(c2[10800],simde_mm_xor_si128(c2[1106],simde_mm_xor_si128(c2[1103],simde_mm_xor_si128(c2[2071],simde_mm_xor_si128(c2[3101],simde_mm_xor_si128(c2[14714],simde_mm_xor_si128(c2[6969],simde_mm_xor_si128(c2[7937],simde_mm_xor_si128(c2[233],simde_mm_xor_si128(c2[12820],simde_mm_xor_si128(c2[227],simde_mm_xor_si128(c2[6082],simde_mm_xor_si128(c2[9960],simde_mm_xor_si128(c2[10920],simde_mm_xor_si128(c2[11888],simde_mm_xor_si128(c2[1283],simde_mm_xor_si128(c2[1284],simde_mm_xor_si128(c2[6126],simde_mm_xor_si128(c2[7094],simde_mm_xor_si128(c2[14890],simde_mm_xor_si128(c2[13918],simde_mm_xor_si128(c2[11005],simde_mm_xor_si128(c2[11973],simde_mm_xor_si128(c2[4283],simde_mm_xor_si128(c2[3303],simde_mm_xor_si128(c2[7190],simde_mm_xor_si128(c2[11103],simde_mm_xor_si128(c2[448],simde_mm_xor_si128(c2[10126],simde_mm_xor_si128(c2[11094],simde_mm_xor_si128(c2[2420],simde_mm_xor_si128(c2[8238],simde_mm_xor_si128(c2[6307],simde_mm_xor_si128(c2[7275],simde_mm_xor_si128(c2[4421],simde_mm_xor_si128(c2[14095],simde_mm_xor_si128(c2[9261],simde_mm_xor_si128(c2[10229],simde_mm_xor_si128(c2[10264],simde_mm_xor_si128(c2[15098],simde_mm_xor_si128(c2[13172],simde_mm_xor_si128(c2[14140],simde_mm_xor_si128(c2[15156],simde_mm_xor_si128(c2[15153],simde_mm_xor_si128(c2[9339],simde_mm_xor_si128(c2[10307],simde_mm_xor_si128(c2[6471],simde_mm_xor_si128(c2[3579],simde_mm_xor_si128(c2[14219],simde_mm_xor_si128(c2[12341],simde_mm_xor_si128(c2[2656],simde_mm_xor_si128(c2[9431],simde_mm_xor_si128(c2[10399],simde_mm_xor_si128(c2[6564],simde_mm_xor_si128(c2[4628],simde_mm_xor_si128(c2[7537],simde_mm_xor_si128(c2[8505],simde_mm_xor_si128(c2[1779],simde_mm_xor_si128(c2[4674],simde_mm_xor_si128(c2[15320],simde_mm_xor_si128(c2[801],simde_mm_xor_si128(c2[8590],simde_mm_xor_si128(c2[10516],simde_mm_xor_si128(c2[3753],simde_mm_xor_si128(c2[883],simde_mm_xor_si128(c2[8624],simde_mm_xor_si128(c2[9599],simde_mm_xor_si128(c2[10567],simde_mm_xor_si128(c2[13519],simde_mm_xor_si128(c2[15462],simde_mm_xor_si128(c2[9643],c2[10611])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 4
+     d2[88]=simde_mm_xor_si128(c2[9682],c2[60]);
+
+//row: 5
+     d2[110]=simde_mm_xor_si128(c2[982],simde_mm_xor_si128(c2[7744],simde_mm_xor_si128(c2[12593],simde_mm_xor_si128(c2[11617],simde_mm_xor_si128(c2[10661],simde_mm_xor_si128(c2[1015],simde_mm_xor_si128(c2[13615],simde_mm_xor_si128(c2[8757],simde_mm_xor_si128(c2[1988],simde_mm_xor_si128(c2[2030],simde_mm_xor_si128(c2[11713],simde_mm_xor_si128(c2[97],simde_mm_xor_si128(c2[4988],simde_mm_xor_si128(c2[10781],simde_mm_xor_si128(c2[11768],simde_mm_xor_si128(c2[10795],simde_mm_xor_si128(c2[12776],simde_mm_xor_si128(c2[8902],simde_mm_xor_si128(c2[2125],simde_mm_xor_si128(c2[9908],simde_mm_xor_si128(c2[7008],simde_mm_xor_si128(c2[9902],simde_mm_xor_si128(c2[270],simde_mm_xor_si128(c2[4148],simde_mm_xor_si128(c2[6076],simde_mm_xor_si128(c2[10958],simde_mm_xor_si128(c2[10959],simde_mm_xor_si128(c2[1282],simde_mm_xor_si128(c2[9078],simde_mm_xor_si128(c2[8106],simde_mm_xor_si128(c2[6161],simde_mm_xor_si128(c2[13958],simde_mm_xor_si128(c2[13000],simde_mm_xor_si128(c2[1378],simde_mm_xor_si128(c2[5291],simde_mm_xor_si128(c2[10123],simde_mm_xor_si128(c2[5282],simde_mm_xor_si128(c2[12117],simde_mm_xor_si128(c2[2426],simde_mm_xor_si128(c2[1463],simde_mm_xor_si128(c2[14096],simde_mm_xor_si128(c2[8283],simde_mm_xor_si128(c2[4417],simde_mm_xor_si128(c2[2484],simde_mm_xor_si128(c2[4452],simde_mm_xor_si128(c2[9286],simde_mm_xor_si128(c2[8328],simde_mm_xor_si128(c2[9344],simde_mm_xor_si128(c2[9341],simde_mm_xor_si128(c2[4495],simde_mm_xor_si128(c2[681],simde_mm_xor_si128(c2[13254],simde_mm_xor_si128(c2[8407],simde_mm_xor_si128(c2[6529],simde_mm_xor_si128(c2[12331],simde_mm_xor_si128(c2[4587],simde_mm_xor_si128(c2[7498],simde_mm_xor_si128(c2[752],simde_mm_xor_si128(c2[14303],simde_mm_xor_si128(c2[2693],simde_mm_xor_si128(c2[11454],simde_mm_xor_si128(c2[14349],simde_mm_xor_si128(c2[10476],simde_mm_xor_si128(c2[2778],simde_mm_xor_si128(c2[4726],simde_mm_xor_si128(c2[13428],simde_mm_xor_si128(c2[10580],simde_mm_xor_si128(c2[2834],simde_mm_xor_si128(c2[4755],simde_mm_xor_si128(c2[7707],simde_mm_xor_si128(c2[9650],simde_mm_xor_si128(c2[4799],c2[2876]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 6
+     d2[132]=simde_mm_xor_si128(c2[5826],simde_mm_xor_si128(c2[8980],simde_mm_xor_si128(c2[10140],simde_mm_xor_si128(c2[3394],simde_mm_xor_si128(c2[12198],simde_mm_xor_si128(c2[6565],simde_mm_xor_si128(c2[12425],c2[1867])))))));
+
+//row: 7
+     d2[154]=simde_mm_xor_si128(c2[2904],simde_mm_xor_si128(c2[6826],simde_mm_xor_si128(c2[190],simde_mm_xor_si128(c2[9038],simde_mm_xor_si128(c2[7141],c2[7394])))));
+
+//row: 8
+     d2[176]=simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[10659],simde_mm_xor_si128(c2[6778],simde_mm_xor_si128(c2[1956],simde_mm_xor_si128(c2[11627],simde_mm_xor_si128(c2[5815],simde_mm_xor_si128(c2[6783],simde_mm_xor_si128(c2[10651],simde_mm_xor_si128(c2[4861],simde_mm_xor_si128(c2[5829],simde_mm_xor_si128(c2[11637],simde_mm_xor_si128(c2[49],simde_mm_xor_si128(c2[10692],simde_mm_xor_si128(c2[12649],simde_mm_xor_si128(c2[6837],simde_mm_xor_si128(c2[7805],simde_mm_xor_si128(c2[7791],simde_mm_xor_si128(c2[2001],simde_mm_xor_si128(c2[2969],simde_mm_xor_si128(c2[10696],simde_mm_xor_si128(c2[1064],simde_mm_xor_si128(c2[11707],simde_mm_xor_si128(c2[10747],simde_mm_xor_si128(c2[5903],simde_mm_xor_si128(c2[14618],simde_mm_xor_si128(c2[8806],simde_mm_xor_si128(c2[9774],simde_mm_xor_si128(c2[4022],simde_mm_xor_si128(c2[14665],simde_mm_xor_si128(c2[9815],simde_mm_xor_si128(c2[4993],simde_mm_xor_si128(c2[10780],simde_mm_xor_si128(c2[4990],simde_mm_xor_si128(c2[5958],simde_mm_xor_si128(c2[5941],simde_mm_xor_si128(c2[11810],simde_mm_xor_si128(c2[6966],simde_mm_xor_si128(c2[7936],simde_mm_xor_si128(c2[2124],simde_mm_xor_si128(c2[3092],simde_mm_xor_si128(c2[1159],simde_mm_xor_si128(c2[10834],simde_mm_xor_si128(c2[11802],simde_mm_xor_si128(c2[8942],simde_mm_xor_si128(c2[4098],simde_mm_xor_si128(c2[6042],simde_mm_xor_si128(c2[1198],simde_mm_xor_si128(c2[8936],simde_mm_xor_si128(c2[3124],simde_mm_xor_si128(c2[4092],simde_mm_xor_si128(c2[14791],simde_mm_xor_si128(c2[9947],simde_mm_xor_si128(c2[3182],simde_mm_xor_si128(c2[12857],simde_mm_xor_si128(c2[13825],simde_mm_xor_si128(c2[5110],simde_mm_xor_si128(c2[14785],simde_mm_xor_si128(c2[266],simde_mm_xor_si128(c2[9992],simde_mm_xor_si128(c2[5148],simde_mm_xor_si128(c2[9993],simde_mm_xor_si128(c2[4181],simde_mm_xor_si128(c2[5149],simde_mm_xor_si128(c2[316],simde_mm_xor_si128(c2[9991],simde_mm_xor_si128(c2[10959],simde_mm_xor_si128(c2[8112],simde_mm_xor_si128(c2[3268],simde_mm_xor_si128(c2[7140],simde_mm_xor_si128(c2[1328],simde_mm_xor_si128(c2[2296],simde_mm_xor_si128(c2[5195],simde_mm_xor_si128(c2[14892],simde_mm_xor_si128(c2[373],simde_mm_xor_si128(c2[12992],simde_mm_xor_si128(c2[8148],simde_mm_xor_si128(c2[12012],simde_mm_xor_si128(c2[7190],simde_mm_xor_si128(c2[412],simde_mm_xor_si128(c2[10087],simde_mm_xor_si128(c2[11055],simde_mm_xor_si128(c2[4325],simde_mm_xor_si128(c2[14968],simde_mm_xor_si128(c2[9157],simde_mm_xor_si128(c2[3345],simde_mm_xor_si128(c2[4313],simde_mm_xor_si128(c2[4316],simde_mm_xor_si128(c2[14013],simde_mm_xor_si128(c2[14981],simde_mm_xor_si128(c2[11151],simde_mm_xor_si128(c2[6307],simde_mm_xor_si128(c2[1460],simde_mm_xor_si128(c2[12103],simde_mm_xor_si128(c2[497],simde_mm_xor_si128(c2[10172],simde_mm_xor_si128(c2[11140],simde_mm_xor_si128(c2[13130],simde_mm_xor_si128(c2[8286],simde_mm_xor_si128(c2[7317],simde_mm_xor_si128(c2[2473],simde_mm_xor_si128(c2[3451],simde_mm_xor_si128(c2[13126],simde_mm_xor_si128(c2[14094],simde_mm_xor_si128(c2[8291],simde_mm_xor_si128(c2[3486],simde_mm_xor_si128(c2[14129],simde_mm_xor_si128(c2[8320],simde_mm_xor_si128(c2[2508],simde_mm_xor_si128(c2[3476],simde_mm_xor_si128(c2[7362],simde_mm_xor_si128(c2[1550],simde_mm_xor_si128(c2[2518],simde_mm_xor_si128(c2[8378],simde_mm_xor_si128(c2[3534],simde_mm_xor_si128(c2[8375],simde_mm_xor_si128(c2[2563],simde_mm_xor_si128(c2[3531],simde_mm_xor_si128(c2[3529],simde_mm_xor_si128(c2[13204],simde_mm_xor_si128(c2[14172],simde_mm_xor_si128(c2[15180],simde_mm_xor_si128(c2[10358],simde_mm_xor_si128(c2[12288],simde_mm_xor_si128(c2[7444],simde_mm_xor_si128(c2[7441],simde_mm_xor_si128(c2[1629],simde_mm_xor_si128(c2[2597],simde_mm_xor_si128(c2[5563],simde_mm_xor_si128(c2[719],simde_mm_xor_si128(c2[11365],simde_mm_xor_si128(c2[6521],simde_mm_xor_si128(c2[3621],simde_mm_xor_si128(c2[13296],simde_mm_xor_si128(c2[14264],simde_mm_xor_si128(c2[718],simde_mm_xor_si128(c2[15273],simde_mm_xor_si128(c2[10429],simde_mm_xor_si128(c2[13337],simde_mm_xor_si128(c2[7525],simde_mm_xor_si128(c2[8493],simde_mm_xor_si128(c2[1727],simde_mm_xor_si128(c2[11402],simde_mm_xor_si128(c2[12370],simde_mm_xor_si128(c2[10488],simde_mm_xor_si128(c2[5644],simde_mm_xor_si128(c2[13383],simde_mm_xor_si128(c2[7571],simde_mm_xor_si128(c2[8539],simde_mm_xor_si128(c2[9510],simde_mm_xor_si128(c2[3698],simde_mm_xor_si128(c2[4666],simde_mm_xor_si128(c2[1812],simde_mm_xor_si128(c2[12455],simde_mm_xor_si128(c2[3760],simde_mm_xor_si128(c2[14403],simde_mm_xor_si128(c2[12462],simde_mm_xor_si128(c2[6650],simde_mm_xor_si128(c2[7618],simde_mm_xor_si128(c2[1805],simde_mm_xor_si128(c2[9592],simde_mm_xor_si128(c2[4770],simde_mm_xor_si128(c2[1868],simde_mm_xor_si128(c2[11543],simde_mm_xor_si128(c2[12511],simde_mm_xor_si128(c2[3789],simde_mm_xor_si128(c2[13464],simde_mm_xor_si128(c2[14432],simde_mm_xor_si128(c2[6741],simde_mm_xor_si128(c2[1897],simde_mm_xor_si128(c2[8684],simde_mm_xor_si128(c2[3840],simde_mm_xor_si128(c2[3833],simde_mm_xor_si128(c2[13508],simde_mm_xor_si128(c2[14476],c2[11575]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 9
+     d2[198]=simde_mm_xor_si128(c2[11625],simde_mm_xor_si128(c2[7793],simde_mm_xor_si128(c2[5298],simde_mm_xor_si128(c2[15010],simde_mm_xor_si128(c2[12193],simde_mm_xor_si128(c2[8501],simde_mm_xor_si128(c2[7569],c2[1869])))))));
+
+//row: 10
+     d2[220]=simde_mm_xor_si128(c2[14574],simde_mm_xor_si128(c2[12687],simde_mm_xor_si128(c2[10825],simde_mm_xor_si128(c2[7098],simde_mm_xor_si128(c2[1323],c2[11280])))));
+
+//row: 11
+     d2[242]=simde_mm_xor_si128(c2[7761],simde_mm_xor_si128(c2[5816],simde_mm_xor_si128(c2[6784],simde_mm_xor_si128(c2[14523],simde_mm_xor_si128(c2[13568],simde_mm_xor_si128(c2[3885],simde_mm_xor_si128(c2[2908],simde_mm_xor_si128(c2[2909],simde_mm_xor_si128(c2[1954],simde_mm_xor_si128(c2[8713],simde_mm_xor_si128(c2[7794],simde_mm_xor_si128(c2[5871],simde_mm_xor_si128(c2[6839],simde_mm_xor_si128(c2[4885],simde_mm_xor_si128(c2[3930],simde_mm_xor_si128(c2[49],simde_mm_xor_si128(c2[14581],simde_mm_xor_si128(c2[2000],simde_mm_xor_si128(c2[8809],simde_mm_xor_si128(c2[6864],simde_mm_xor_si128(c2[7832],simde_mm_xor_si128(c2[3005],simde_mm_xor_si128(c2[2028],simde_mm_xor_si128(c2[6876],simde_mm_xor_si128(c2[5899],simde_mm_xor_si128(c2[11767],simde_mm_xor_si128(c2[9822],simde_mm_xor_si128(c2[10790],simde_mm_xor_si128(c2[2073],simde_mm_xor_si128(c2[1118],simde_mm_xor_si128(c2[3038],simde_mm_xor_si128(c2[2083],simde_mm_xor_si128(c2[4068],simde_mm_xor_si128(c2[3091],simde_mm_xor_si128(c2[194],simde_mm_xor_si128(c2[14704],simde_mm_xor_si128(c2[8904],simde_mm_xor_si128(c2[7927],simde_mm_xor_si128(c2[1200],simde_mm_xor_si128(c2[14742],simde_mm_xor_si128(c2[223],simde_mm_xor_si128(c2[13787],simde_mm_xor_si128(c2[12810],simde_mm_xor_si128(c2[1194],simde_mm_xor_si128(c2[239],simde_mm_xor_si128(c2[7049],simde_mm_xor_si128(c2[5104],simde_mm_xor_si128(c2[6072],simde_mm_xor_si128(c2[10927],simde_mm_xor_si128(c2[9950],simde_mm_xor_si128(c2[12855],simde_mm_xor_si128(c2[11900],simde_mm_xor_si128(c2[2250],simde_mm_xor_si128(c2[1295],simde_mm_xor_si128(c2[2251],simde_mm_xor_si128(c2[1296],simde_mm_xor_si128(c2[8061],simde_mm_xor_si128(c2[7084],simde_mm_xor_si128(c2[370],simde_mm_xor_si128(c2[14880],simde_mm_xor_si128(c2[14885],simde_mm_xor_si128(c2[13908],simde_mm_xor_si128(c2[12940],simde_mm_xor_si128(c2[11985],simde_mm_xor_si128(c2[5250],simde_mm_xor_si128(c2[3305],simde_mm_xor_si128(c2[4273],simde_mm_xor_si128(c2[4270],simde_mm_xor_si128(c2[3315],simde_mm_xor_si128(c2[8157],simde_mm_xor_si128(c2[7180],simde_mm_xor_si128(c2[12070],simde_mm_xor_si128(c2[10125],simde_mm_xor_si128(c2[11093],simde_mm_xor_si128(c2[1415],simde_mm_xor_si128(c2[460],simde_mm_xor_si128(c2[12061],simde_mm_xor_si128(c2[11106],simde_mm_xor_si128(c2[3409],simde_mm_xor_si128(c2[1464],simde_mm_xor_si128(c2[2432],simde_mm_xor_si128(c2[9205],simde_mm_xor_si128(c2[8228],simde_mm_xor_si128(c2[8242],simde_mm_xor_si128(c2[7265],simde_mm_xor_si128(c2[5388],simde_mm_xor_si128(c2[3443],simde_mm_xor_si128(c2[4411],simde_mm_xor_si128(c2[15062],simde_mm_xor_si128(c2[14085],simde_mm_xor_si128(c2[11196],simde_mm_xor_si128(c2[10219],simde_mm_xor_si128(c2[8284],simde_mm_xor_si128(c2[11231],simde_mm_xor_si128(c2[9286],simde_mm_xor_si128(c2[10254],simde_mm_xor_si128(c2[578],simde_mm_xor_si128(c2[15110],simde_mm_xor_si128(c2[15107],simde_mm_xor_si128(c2[14130],simde_mm_xor_si128(c2[636],simde_mm_xor_si128(c2[15146],simde_mm_xor_si128(c2[633],simde_mm_xor_si128(c2[15143],simde_mm_xor_si128(c2[11274],simde_mm_xor_si128(c2[10297],simde_mm_xor_si128(c2[7438],simde_mm_xor_si128(c2[5515],simde_mm_xor_si128(c2[6483],simde_mm_xor_si128(c2[4546],simde_mm_xor_si128(c2[3569],simde_mm_xor_si128(c2[15186],simde_mm_xor_si128(c2[14231],simde_mm_xor_si128(c2[13308],simde_mm_xor_si128(c2[11363],simde_mm_xor_si128(c2[12331],simde_mm_xor_si128(c2[3623],simde_mm_xor_si128(c2[2646],simde_mm_xor_si128(c2[11366],simde_mm_xor_si128(c2[10389],simde_mm_xor_si128(c2[15245],simde_mm_xor_si128(c2[7531],simde_mm_xor_si128(c2[6576],simde_mm_xor_si128(c2[5595],simde_mm_xor_si128(c2[4640],simde_mm_xor_si128(c2[9472],simde_mm_xor_si128(c2[8495],simde_mm_xor_si128(c2[2746],simde_mm_xor_si128(c2[801],simde_mm_xor_si128(c2[1769],simde_mm_xor_si128(c2[5641],simde_mm_xor_si128(c2[4664],simde_mm_xor_si128(c2[1768],simde_mm_xor_si128(c2[813],simde_mm_xor_si128(c2[9557],simde_mm_xor_si128(c2[7612],simde_mm_xor_si128(c2[8580],simde_mm_xor_si128(c2[11505],simde_mm_xor_si128(c2[10528],simde_mm_xor_si128(c2[4720],simde_mm_xor_si128(c2[3743],simde_mm_xor_si128(c2[1850],simde_mm_xor_si128(c2[15414],simde_mm_xor_si128(c2[895],simde_mm_xor_si128(c2[9613],simde_mm_xor_si128(c2[8636],simde_mm_xor_si128(c2[11534],simde_mm_xor_si128(c2[10579],simde_mm_xor_si128(c2[14486],simde_mm_xor_si128(c2[12541],simde_mm_xor_si128(c2[13509],simde_mm_xor_si128(c2[942],simde_mm_xor_si128(c2[15452],simde_mm_xor_si128(c2[11578],simde_mm_xor_si128(c2[10623],c2[6742])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 12
+     d2[264]=simde_mm_xor_si128(c2[10655],simde_mm_xor_si128(c2[8769],simde_mm_xor_si128(c2[14000],simde_mm_xor_si128(c2[14042],simde_mm_xor_si128(c2[583],c2[12412])))));
+
+//row: 13
+     d2[286]=simde_mm_xor_si128(c2[6777],simde_mm_xor_si128(c2[7745],simde_mm_xor_si128(c2[14529],simde_mm_xor_si128(c2[3891],simde_mm_xor_si128(c2[2915],simde_mm_xor_si128(c2[6795],simde_mm_xor_si128(c2[6832],simde_mm_xor_si128(c2[7800],simde_mm_xor_si128(c2[4891],simde_mm_xor_si128(c2[55],simde_mm_xor_si128(c2[7847],simde_mm_xor_si128(c2[8815],simde_mm_xor_si128(c2[3011],simde_mm_xor_si128(c2[6882],simde_mm_xor_si128(c2[10783],simde_mm_xor_si128(c2[11751],simde_mm_xor_si128(c2[2079],simde_mm_xor_si128(c2[3044],simde_mm_xor_si128(c2[10795],simde_mm_xor_si128(c2[4052],simde_mm_xor_si128(c2[178],simde_mm_xor_si128(c2[8888],simde_mm_xor_si128(c2[238],simde_mm_xor_si128(c2[1206],simde_mm_xor_si128(c2[13793],simde_mm_xor_si128(c2[1200],simde_mm_xor_si128(c2[6087],simde_mm_xor_si128(c2[7055],simde_mm_xor_si128(c2[10933],simde_mm_xor_si128(c2[12861],simde_mm_xor_si128(c2[2256],simde_mm_xor_si128(c2[2257],simde_mm_xor_si128(c2[8067],simde_mm_xor_si128(c2[9036],simde_mm_xor_si128(c2[354],simde_mm_xor_si128(c2[14891],simde_mm_xor_si128(c2[12946],simde_mm_xor_si128(c2[4288],simde_mm_xor_si128(c2[5256],simde_mm_xor_si128(c2[4276],simde_mm_xor_si128(c2[8141],simde_mm_xor_si128(c2[11108],simde_mm_xor_si128(c2[12076],simde_mm_xor_si128(c2[1421],simde_mm_xor_si128(c2[12067],simde_mm_xor_si128(c2[2425],simde_mm_xor_si128(c2[3393],simde_mm_xor_si128(c2[9211],simde_mm_xor_si128(c2[8248],simde_mm_xor_si128(c2[4404],simde_mm_xor_si128(c2[5372],simde_mm_xor_si128(c2[15068],simde_mm_xor_si128(c2[11180],simde_mm_xor_si128(c2[10269],simde_mm_xor_si128(c2[11237],simde_mm_xor_si128(c2[584],simde_mm_xor_si128(c2[15113],simde_mm_xor_si128(c2[620],simde_mm_xor_si128(c2[617],simde_mm_xor_si128(c2[11280],simde_mm_xor_si128(c2[6476],simde_mm_xor_si128(c2[7444],simde_mm_xor_si128(c2[4552],simde_mm_xor_si128(c2[15192],simde_mm_xor_si128(c2[12324],simde_mm_xor_si128(c2[13292],simde_mm_xor_si128(c2[3629],simde_mm_xor_si128(c2[11372],simde_mm_xor_si128(c2[7537],simde_mm_xor_si128(c2[5601],simde_mm_xor_si128(c2[9478],simde_mm_xor_si128(c2[1762],simde_mm_xor_si128(c2[2730],simde_mm_xor_si128(c2[5647],simde_mm_xor_si128(c2[1774],simde_mm_xor_si128(c2[8595],simde_mm_xor_si128(c2[9563],simde_mm_xor_si128(c2[11489],simde_mm_xor_si128(c2[4726],simde_mm_xor_si128(c2[888],simde_mm_xor_si128(c2[1856],simde_mm_xor_si128(c2[9597],simde_mm_xor_si128(c2[11540],simde_mm_xor_si128(c2[14437],simde_mm_xor_si128(c2[13524],simde_mm_xor_si128(c2[14492],simde_mm_xor_si128(c2[926],c2[11584])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 14
+     d2[308]=simde_mm_xor_si128(c2[5809],simde_mm_xor_si128(c2[2484],simde_mm_xor_si128(c2[5517],simde_mm_xor_si128(c2[12329],simde_mm_xor_si128(c2[2688],c2[14480])))));
+
+//row: 15
+     d2[330]=simde_mm_xor_si128(c2[7747],simde_mm_xor_si128(c2[14531],simde_mm_xor_si128(c2[3893],simde_mm_xor_si128(c2[1949],simde_mm_xor_si128(c2[2917],simde_mm_xor_si128(c2[11],simde_mm_xor_si128(c2[7802],simde_mm_xor_si128(c2[4893],simde_mm_xor_si128(c2[14576],simde_mm_xor_si128(c2[57],simde_mm_xor_si128(c2[11681],simde_mm_xor_si128(c2[8817],simde_mm_xor_si128(c2[3013],simde_mm_xor_si128(c2[6884],simde_mm_xor_si128(c2[11753],simde_mm_xor_si128(c2[2081],simde_mm_xor_si128(c2[2078],simde_mm_xor_si128(c2[3046],simde_mm_xor_si128(c2[4054],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[7922],simde_mm_xor_si128(c2[8890],simde_mm_xor_si128(c2[1208],simde_mm_xor_si128(c2[13773],simde_mm_xor_si128(c2[1202],simde_mm_xor_si128(c2[7057],simde_mm_xor_si128(c2[10913],simde_mm_xor_si128(c2[11895],simde_mm_xor_si128(c2[12863],simde_mm_xor_si128(c2[2258],simde_mm_xor_si128(c2[2259],simde_mm_xor_si128(c2[7101],simde_mm_xor_si128(c2[8069],simde_mm_xor_si128(c2[356],simde_mm_xor_si128(c2[14893],simde_mm_xor_si128(c2[11980],simde_mm_xor_si128(c2[12948],simde_mm_xor_si128(c2[5236],simde_mm_xor_si128(c2[4278],simde_mm_xor_si128(c2[8143],simde_mm_xor_si128(c2[12056],simde_mm_xor_si128(c2[1423],simde_mm_xor_si128(c2[11101],simde_mm_xor_si128(c2[12069],simde_mm_xor_si128(c2[14960],simde_mm_xor_si128(c2[3395],simde_mm_xor_si128(c2[9213],simde_mm_xor_si128(c2[7260],simde_mm_xor_si128(c2[8228],simde_mm_xor_si128(c2[5374],simde_mm_xor_si128(c2[15048],simde_mm_xor_si128(c2[10214],simde_mm_xor_si128(c2[11182],simde_mm_xor_si128(c2[11239],simde_mm_xor_si128(c2[586],simde_mm_xor_si128(c2[14125],simde_mm_xor_si128(c2[15093],simde_mm_xor_si128(c2[1545],simde_mm_xor_si128(c2[622],simde_mm_xor_si128(c2[619],simde_mm_xor_si128(c2[10314],simde_mm_xor_si128(c2[11282],simde_mm_xor_si128(c2[7446],simde_mm_xor_si128(c2[4532],simde_mm_xor_si128(c2[15194],simde_mm_xor_si128(c2[13294],simde_mm_xor_si128(c2[3609],simde_mm_xor_si128(c2[10384],simde_mm_xor_si128(c2[11352],simde_mm_xor_si128(c2[7539],simde_mm_xor_si128(c2[5603],simde_mm_xor_si128(c2[8512],simde_mm_xor_si128(c2[9480],simde_mm_xor_si128(c2[2732],simde_mm_xor_si128(c2[5649],simde_mm_xor_si128(c2[808],simde_mm_xor_si128(c2[1776],simde_mm_xor_si128(c2[803],simde_mm_xor_si128(c2[9565],simde_mm_xor_si128(c2[11491],simde_mm_xor_si128(c2[4728],simde_mm_xor_si128(c2[1858],simde_mm_xor_si128(c2[9599],simde_mm_xor_si128(c2[10574],simde_mm_xor_si128(c2[11542],simde_mm_xor_si128(c2[14494],simde_mm_xor_si128(c2[928],simde_mm_xor_si128(c2[10618],c2[11586]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 16
+     d2[352]=simde_mm_xor_si128(c2[7747],simde_mm_xor_si128(c2[14531],simde_mm_xor_si128(c2[3893],simde_mm_xor_si128(c2[2917],simde_mm_xor_si128(c2[7802],simde_mm_xor_si128(c2[4893],simde_mm_xor_si128(c2[57],simde_mm_xor_si128(c2[13607],simde_mm_xor_si128(c2[8817],simde_mm_xor_si128(c2[3013],simde_mm_xor_si128(c2[6884],simde_mm_xor_si128(c2[11753],simde_mm_xor_si128(c2[2081],simde_mm_xor_si128(c2[3046],simde_mm_xor_si128(c2[4990],simde_mm_xor_si128(c2[4054],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[8890],simde_mm_xor_si128(c2[1208],simde_mm_xor_si128(c2[13773],simde_mm_xor_si128(c2[1202],simde_mm_xor_si128(c2[7057],simde_mm_xor_si128(c2[10913],simde_mm_xor_si128(c2[12863],simde_mm_xor_si128(c2[2258],simde_mm_xor_si128(c2[2259],simde_mm_xor_si128(c2[8069],simde_mm_xor_si128(c2[356],simde_mm_xor_si128(c2[14893],simde_mm_xor_si128(c2[12948],simde_mm_xor_si128(c2[5236],simde_mm_xor_si128(c2[4278],simde_mm_xor_si128(c2[8143],simde_mm_xor_si128(c2[12056],simde_mm_xor_si128(c2[1423],simde_mm_xor_si128(c2[12069],simde_mm_xor_si128(c2[3395],simde_mm_xor_si128(c2[9213],simde_mm_xor_si128(c2[8228],simde_mm_xor_si128(c2[12120],simde_mm_xor_si128(c2[5374],simde_mm_xor_si128(c2[15048],simde_mm_xor_si128(c2[11182],simde_mm_xor_si128(c2[11239],simde_mm_xor_si128(c2[586],simde_mm_xor_si128(c2[15093],simde_mm_xor_si128(c2[622],simde_mm_xor_si128(c2[619],simde_mm_xor_si128(c2[11282],simde_mm_xor_si128(c2[7446],simde_mm_xor_si128(c2[4532],simde_mm_xor_si128(c2[15194],simde_mm_xor_si128(c2[13294],simde_mm_xor_si128(c2[3609],simde_mm_xor_si128(c2[11352],simde_mm_xor_si128(c2[7539],simde_mm_xor_si128(c2[5603],simde_mm_xor_si128(c2[9480],simde_mm_xor_si128(c2[2732],simde_mm_xor_si128(c2[5649],simde_mm_xor_si128(c2[1776],simde_mm_xor_si128(c2[9565],simde_mm_xor_si128(c2[11491],simde_mm_xor_si128(c2[4728],simde_mm_xor_si128(c2[1858],simde_mm_xor_si128(c2[9599],simde_mm_xor_si128(c2[11542],simde_mm_xor_si128(c2[11548],simde_mm_xor_si128(c2[14494],simde_mm_xor_si128(c2[928],c2[11586]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 17
+     d2[374]=simde_mm_xor_si128(c2[13558],simde_mm_xor_si128(c2[4502],simde_mm_xor_si128(c2[7495],simde_mm_xor_si128(c2[4627],c2[14487]))));
+
+//row: 18
+     d2[396]=simde_mm_xor_si128(c2[14566],simde_mm_xor_si128(c2[14097],simde_mm_xor_si128(c2[6395],simde_mm_xor_si128(c2[5643],c2[9552]))));
+
+//row: 19
+     d2[418]=simde_mm_xor_si128(c2[6781],simde_mm_xor_si128(c2[13602],simde_mm_xor_si128(c2[3221],simde_mm_xor_si128(c2[2304],c2[12068]))));
+
+//row: 20
+     d2[440]=simde_mm_xor_si128(c2[3892],simde_mm_xor_si128(c2[10654],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[14527],simde_mm_xor_si128(c2[8717],simde_mm_xor_si128(c2[3925],simde_mm_xor_si128(c2[1016],simde_mm_xor_si128(c2[11667],simde_mm_xor_si128(c2[4940],simde_mm_xor_si128(c2[14623],simde_mm_xor_si128(c2[3007],simde_mm_xor_si128(c2[7876],simde_mm_xor_si128(c2[13691],simde_mm_xor_si128(c2[14656],simde_mm_xor_si128(c2[1104],simde_mm_xor_si128(c2[177],simde_mm_xor_si128(c2[11812],simde_mm_xor_si128(c2[5035],simde_mm_xor_si128(c2[12818],simde_mm_xor_si128(c2[9918],simde_mm_xor_si128(c2[12812],simde_mm_xor_si128(c2[3180],simde_mm_xor_si128(c2[7058],simde_mm_xor_si128(c2[8986],simde_mm_xor_si128(c2[13868],simde_mm_xor_si128(c2[13869],simde_mm_xor_si128(c2[4192],simde_mm_xor_si128(c2[11988],simde_mm_xor_si128(c2[11016],simde_mm_xor_si128(c2[9071],simde_mm_xor_si128(c2[1381],simde_mm_xor_si128(c2[401],simde_mm_xor_si128(c2[4288],simde_mm_xor_si128(c2[11053],simde_mm_xor_si128(c2[8201],simde_mm_xor_si128(c2[13033],simde_mm_xor_si128(c2[8192],simde_mm_xor_si128(c2[15005],simde_mm_xor_si128(c2[5336],simde_mm_xor_si128(c2[4373],simde_mm_xor_si128(c2[4371],simde_mm_xor_si128(c2[1497],simde_mm_xor_si128(c2[11193],simde_mm_xor_si128(c2[7305],simde_mm_xor_si128(c2[7362],simde_mm_xor_si128(c2[12196],simde_mm_xor_si128(c2[11238],simde_mm_xor_si128(c2[12232],simde_mm_xor_si128(c2[12251],simde_mm_xor_si128(c2[7405],simde_mm_xor_si128(c2[3569],simde_mm_xor_si128(c2[677],simde_mm_xor_si128(c2[11317],simde_mm_xor_si128(c2[9417],simde_mm_xor_si128(c2[15241],simde_mm_xor_si128(c2[7497],simde_mm_xor_si128(c2[3662],simde_mm_xor_si128(c2[1726],simde_mm_xor_si128(c2[5603],simde_mm_xor_si128(c2[14364],simde_mm_xor_si128(c2[1772],simde_mm_xor_si128(c2[13386],simde_mm_xor_si128(c2[5688],simde_mm_xor_si128(c2[7614],simde_mm_xor_si128(c2[851],simde_mm_xor_si128(c2[13468],simde_mm_xor_si128(c2[5722],simde_mm_xor_si128(c2[7665],simde_mm_xor_si128(c2[10617],simde_mm_xor_si128(c2[12560],c2[7709]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 21
+     d2[462]=simde_mm_xor_si128(c2[1990],simde_mm_xor_si128(c2[7980],simde_mm_xor_si128(c2[10405],simde_mm_xor_si128(c2[15408],c2[13514]))));
+
+//row: 22
+     d2[484]=simde_mm_xor_si128(c2[7761],simde_mm_xor_si128(c2[13121],simde_mm_xor_si128(c2[12202],c2[1723])));
+
+//row: 23
+     d2[506]=simde_mm_xor_si128(c2[1981],simde_mm_xor_si128(c2[5896],simde_mm_xor_si128(c2[5291],c2[811])));
+
+//row: 24
+     d2[528]=simde_mm_xor_si128(c2[7752],simde_mm_xor_si128(c2[14536],simde_mm_xor_si128(c2[3876],simde_mm_xor_si128(c2[2922],simde_mm_xor_si128(c2[5810],simde_mm_xor_si128(c2[7807],simde_mm_xor_si128(c2[4898],simde_mm_xor_si128(c2[62],simde_mm_xor_si128(c2[8800],simde_mm_xor_si128(c2[2996],simde_mm_xor_si128(c2[6867],simde_mm_xor_si128(c2[11758],simde_mm_xor_si128(c2[2086],simde_mm_xor_si128(c2[3051],simde_mm_xor_si128(c2[9822],simde_mm_xor_si128(c2[4059],simde_mm_xor_si128(c2[185],simde_mm_xor_si128(c2[8895],simde_mm_xor_si128(c2[8903],simde_mm_xor_si128(c2[1191],simde_mm_xor_si128(c2[13778],simde_mm_xor_si128(c2[1207],simde_mm_xor_si128(c2[7040],simde_mm_xor_si128(c2[10918],simde_mm_xor_si128(c2[12868],simde_mm_xor_si128(c2[2263],simde_mm_xor_si128(c2[2264],simde_mm_xor_si128(c2[8052],simde_mm_xor_si128(c2[361],simde_mm_xor_si128(c2[14876],simde_mm_xor_si128(c2[12953],simde_mm_xor_si128(c2[5241],simde_mm_xor_si128(c2[4283],simde_mm_xor_si128(c2[8148],simde_mm_xor_si128(c2[12061],simde_mm_xor_si128(c2[1428],simde_mm_xor_si128(c2[12074],simde_mm_xor_si128(c2[3400],simde_mm_xor_si128(c2[9196],simde_mm_xor_si128(c2[8233],simde_mm_xor_si128(c2[502],simde_mm_xor_si128(c2[5379],simde_mm_xor_si128(c2[15053],simde_mm_xor_si128(c2[11187],simde_mm_xor_si128(c2[11222],simde_mm_xor_si128(c2[591],simde_mm_xor_si128(c2[15098],simde_mm_xor_si128(c2[627],simde_mm_xor_si128(c2[624],simde_mm_xor_si128(c2[11265],simde_mm_xor_si128(c2[7451],simde_mm_xor_si128(c2[4537],simde_mm_xor_si128(c2[15199],simde_mm_xor_si128(c2[13299],simde_mm_xor_si128(c2[3614],simde_mm_xor_si128(c2[11357],simde_mm_xor_si128(c2[7544],simde_mm_xor_si128(c2[5608],simde_mm_xor_si128(c2[9463],simde_mm_xor_si128(c2[2737],simde_mm_xor_si128(c2[5632],simde_mm_xor_si128(c2[1781],simde_mm_xor_si128(c2[9548],simde_mm_xor_si128(c2[11496],simde_mm_xor_si128(c2[4711],simde_mm_xor_si128(c2[1863],simde_mm_xor_si128(c2[9604],simde_mm_xor_si128(c2[11547],simde_mm_xor_si128(c2[14477],simde_mm_xor_si128(c2[933],c2[11591]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 25
+     d2[550]=simde_mm_xor_si128(c2[6837],simde_mm_xor_si128(c2[14799],simde_mm_xor_si128(c2[14834],c2[6427])));
+
+//row: 26
+     d2[572]=simde_mm_xor_si128(c2[4860],simde_mm_xor_si128(c2[5916],simde_mm_xor_si128(c2[2126],c2[3570])));
+
+//row: 27
+     d2[594]=simde_mm_xor_si128(c2[10697],simde_mm_xor_si128(c2[6092],c2[11984]));
+
+//row: 28
+     d2[616]=simde_mm_xor_si128(c2[5814],simde_mm_xor_si128(c2[1144],simde_mm_xor_si128(c2[8582],c2[7710])));
+
+//row: 29
+     d2[638]=simde_mm_xor_si128(c2[1939],simde_mm_xor_si128(c2[8723],simde_mm_xor_si128(c2[13572],simde_mm_xor_si128(c2[11628],simde_mm_xor_si128(c2[12596],simde_mm_xor_si128(c2[1994],simde_mm_xor_si128(c2[14572],simde_mm_xor_si128(c2[8768],simde_mm_xor_si128(c2[9736],simde_mm_xor_si128(c2[1029],simde_mm_xor_si128(c2[3009],simde_mm_xor_si128(c2[12692],simde_mm_xor_si128(c2[1076],simde_mm_xor_si128(c2[5945],simde_mm_xor_si128(c2[11760],simde_mm_xor_si128(c2[11757],simde_mm_xor_si128(c2[12725],simde_mm_xor_si128(c2[13733],simde_mm_xor_si128(c2[9859],simde_mm_xor_si128(c2[2114],simde_mm_xor_si128(c2[3082],simde_mm_xor_si128(c2[10887],simde_mm_xor_si128(c2[7965],simde_mm_xor_si128(c2[10881],simde_mm_xor_si128(c2[1249],simde_mm_xor_si128(c2[5105],simde_mm_xor_si128(c2[6087],simde_mm_xor_si128(c2[7055],simde_mm_xor_si128(c2[11937],simde_mm_xor_si128(c2[11938],simde_mm_xor_si128(c2[1293],simde_mm_xor_si128(c2[2261],simde_mm_xor_si128(c2[10035],simde_mm_xor_si128(c2[9085],simde_mm_xor_si128(c2[6172],simde_mm_xor_si128(c2[7140],simde_mm_xor_si128(c2[14937],simde_mm_xor_si128(c2[13957],simde_mm_xor_si128(c2[2335],simde_mm_xor_si128(c2[6248],simde_mm_xor_si128(c2[11102],simde_mm_xor_si128(c2[5293],simde_mm_xor_si128(c2[6261],simde_mm_xor_si128(c2[13074],simde_mm_xor_si128(c2[3405],simde_mm_xor_si128(c2[1452],simde_mm_xor_si128(c2[2420],simde_mm_xor_si128(c2[15053],simde_mm_xor_si128(c2[9240],simde_mm_xor_si128(c2[4406],simde_mm_xor_si128(c2[5374],simde_mm_xor_si128(c2[5431],simde_mm_xor_si128(c2[10265],simde_mm_xor_si128(c2[8317],simde_mm_xor_si128(c2[9285],simde_mm_xor_si128(c2[10301],simde_mm_xor_si128(c2[10298],simde_mm_xor_si128(c2[4506],simde_mm_xor_si128(c2[5474],simde_mm_xor_si128(c2[8366],simde_mm_xor_si128(c2[1638],simde_mm_xor_si128(c2[14233],simde_mm_xor_si128(c2[9386],simde_mm_xor_si128(c2[7486],simde_mm_xor_si128(c2[13288],simde_mm_xor_si128(c2[4576],simde_mm_xor_si128(c2[5544],simde_mm_xor_si128(c2[1731],simde_mm_xor_si128(c2[15282],simde_mm_xor_si128(c2[2704],simde_mm_xor_si128(c2[3672],simde_mm_xor_si128(c2[12411],simde_mm_xor_si128(c2[15328],simde_mm_xor_si128(c2[10487],simde_mm_xor_si128(c2[11455],simde_mm_xor_si128(c2[3711],simde_mm_xor_si128(c2[3757],simde_mm_xor_si128(c2[5683],simde_mm_xor_si128(c2[14407],simde_mm_xor_si128(c2[11537],simde_mm_xor_si128(c2[3791],simde_mm_xor_si128(c2[4766],simde_mm_xor_si128(c2[5734],simde_mm_xor_si128(c2[8686],simde_mm_xor_si128(c2[10607],simde_mm_xor_si128(c2[4810],c2[5778]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 30
+     d2[660]=simde_mm_xor_si128(c2[10657],simde_mm_xor_si128(c2[1954],simde_mm_xor_si128(c2[5813],simde_mm_xor_si128(c2[6781],simde_mm_xor_si128(c2[4859],simde_mm_xor_si128(c2[5827],simde_mm_xor_si128(c2[10658],simde_mm_xor_si128(c2[10712],simde_mm_xor_si128(c2[6835],simde_mm_xor_si128(c2[7803],simde_mm_xor_si128(c2[1999],simde_mm_xor_si128(c2[2967],simde_mm_xor_si128(c2[11705],simde_mm_xor_si128(c2[5901],simde_mm_xor_si128(c2[8804],simde_mm_xor_si128(c2[9772],simde_mm_xor_si128(c2[14663],simde_mm_xor_si128(c2[4991],simde_mm_xor_si128(c2[4988],simde_mm_xor_si128(c2[5956],simde_mm_xor_si128(c2[6964],simde_mm_xor_si128(c2[2122],simde_mm_xor_si128(c2[3090],simde_mm_xor_si128(c2[10832],simde_mm_xor_si128(c2[11800],simde_mm_xor_si128(c2[4096],simde_mm_xor_si128(c2[1196],simde_mm_xor_si128(c2[3144],simde_mm_xor_si128(c2[4112],simde_mm_xor_si128(c2[9945],simde_mm_xor_si128(c2[12855],simde_mm_xor_si128(c2[13823],simde_mm_xor_si128(c2[14805],simde_mm_xor_si128(c2[264],simde_mm_xor_si128(c2[5168],simde_mm_xor_si128(c2[4201],simde_mm_xor_si128(c2[5169],simde_mm_xor_si128(c2[9989],simde_mm_xor_si128(c2[10957],simde_mm_xor_si128(c2[3266],simde_mm_xor_si128(c2[1326],simde_mm_xor_si128(c2[2294],simde_mm_xor_si128(c2[14890],simde_mm_xor_si128(c2[371],simde_mm_xor_si128(c2[8146],simde_mm_xor_si128(c2[7188],simde_mm_xor_si128(c2[10085],simde_mm_xor_si128(c2[11053],simde_mm_xor_si128(c2[14966],simde_mm_xor_si128(c2[3365],simde_mm_xor_si128(c2[4333],simde_mm_xor_si128(c2[14011],simde_mm_xor_si128(c2[14979],simde_mm_xor_si128(c2[441],simde_mm_xor_si128(c2[6305],simde_mm_xor_si128(c2[12101],simde_mm_xor_si128(c2[10170],simde_mm_xor_si128(c2[11138],simde_mm_xor_si128(c2[8284],simde_mm_xor_si128(c2[2471],simde_mm_xor_si128(c2[13124],simde_mm_xor_si128(c2[14092],simde_mm_xor_si128(c2[14127],simde_mm_xor_si128(c2[2528],simde_mm_xor_si128(c2[3496],simde_mm_xor_si128(c2[1548],simde_mm_xor_si128(c2[2516],simde_mm_xor_si128(c2[15097],simde_mm_xor_si128(c2[3532],simde_mm_xor_si128(c2[2561],simde_mm_xor_si128(c2[3529],simde_mm_xor_si128(c2[13202],simde_mm_xor_si128(c2[14170],simde_mm_xor_si128(c2[10356],simde_mm_xor_si128(c2[7442],simde_mm_xor_si128(c2[1649],simde_mm_xor_si128(c2[2617],simde_mm_xor_si128(c2[717],simde_mm_xor_si128(c2[6519],simde_mm_xor_si128(c2[13294],simde_mm_xor_si128(c2[14262],simde_mm_xor_si128(c2[10449],simde_mm_xor_si128(c2[7545],simde_mm_xor_si128(c2[8513],simde_mm_xor_si128(c2[11400],simde_mm_xor_si128(c2[12368],simde_mm_xor_si128(c2[5642],simde_mm_xor_si128(c2[7569],simde_mm_xor_si128(c2[8537],simde_mm_xor_si128(c2[3696],simde_mm_xor_si128(c2[4664],simde_mm_xor_si128(c2[12453],simde_mm_xor_si128(c2[14401],simde_mm_xor_si128(c2[6648],simde_mm_xor_si128(c2[7616],simde_mm_xor_si128(c2[4768],simde_mm_xor_si128(c2[11541],simde_mm_xor_si128(c2[12509],simde_mm_xor_si128(c2[13484],simde_mm_xor_si128(c2[14452],simde_mm_xor_si128(c2[1895],simde_mm_xor_si128(c2[3838],simde_mm_xor_si128(c2[13528],c2[14496])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 31
+     d2[682]=simde_mm_xor_si128(c2[12597],simde_mm_xor_si128(c2[4850],simde_mm_xor_si128(c2[3872],simde_mm_xor_si128(c2[11634],simde_mm_xor_si128(c2[8721],simde_mm_xor_si128(c2[974],simde_mm_xor_si128(c2[7745],simde_mm_xor_si128(c2[14539],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[12630],simde_mm_xor_si128(c2[4905],simde_mm_xor_si128(c2[9743],simde_mm_xor_si128(c2[1996],simde_mm_xor_si128(c2[4885],simde_mm_xor_si128(c2[11679],simde_mm_xor_si128(c2[12647],simde_mm_xor_si128(c2[4890],simde_mm_xor_si128(c2[13645],simde_mm_xor_si128(c2[5898],simde_mm_xor_si128(c2[7841],simde_mm_xor_si128(c2[94],simde_mm_xor_si128(c2[11712],simde_mm_xor_si128(c2[3965],simde_mm_xor_si128(c2[1116],simde_mm_xor_si128(c2[8856],simde_mm_xor_si128(c2[6909],simde_mm_xor_si128(c2[14671],simde_mm_xor_si128(c2[7896],simde_mm_xor_si128(c2[14668],simde_mm_xor_si128(c2[149],simde_mm_xor_si128(c2[8904],simde_mm_xor_si128(c2[1157],simde_mm_xor_si128(c2[5030],simde_mm_xor_si128(c2[12770],simde_mm_xor_si128(c2[13740],simde_mm_xor_si128(c2[5025],simde_mm_xor_si128(c2[5993],simde_mm_xor_si128(c2[6036],simde_mm_xor_si128(c2[13776],simde_mm_xor_si128(c2[3136],simde_mm_xor_si128(c2[10876],simde_mm_xor_si128(c2[6030],simde_mm_xor_si128(c2[13792],simde_mm_xor_si128(c2[11885],simde_mm_xor_si128(c2[4138],simde_mm_xor_si128(c2[276],simde_mm_xor_si128(c2[8016],simde_mm_xor_si128(c2[2204],simde_mm_xor_si128(c2[8976],simde_mm_xor_si128(c2[9944],simde_mm_xor_si128(c2[7086],simde_mm_xor_si128(c2[14848],simde_mm_xor_si128(c2[7087],simde_mm_xor_si128(c2[14849],simde_mm_xor_si128(c2[12897],simde_mm_xor_si128(c2[4182],simde_mm_xor_si128(c2[5150],simde_mm_xor_si128(c2[9038],simde_mm_xor_si128(c2[5206],simde_mm_xor_si128(c2[12946],simde_mm_xor_si128(c2[4234],simde_mm_xor_si128(c2[11974],simde_mm_xor_si128(c2[2289],simde_mm_xor_si128(c2[9083],simde_mm_xor_si128(c2[10051],simde_mm_xor_si128(c2[10086],simde_mm_xor_si128(c2[2339],simde_mm_xor_si128(c2[9128],simde_mm_xor_si128(c2[1381],simde_mm_xor_si128(c2[12993],simde_mm_xor_si128(c2[5246],simde_mm_xor_si128(c2[1419],simde_mm_xor_si128(c2[9159],simde_mm_xor_si128(c2[6251],simde_mm_xor_si128(c2[14013],simde_mm_xor_si128(c2[1410],simde_mm_xor_si128(c2[8204],simde_mm_xor_si128(c2[9172],simde_mm_xor_si128(c2[8245],simde_mm_xor_si128(c2[498],simde_mm_xor_si128(c2[14041],simde_mm_xor_si128(c2[6294],simde_mm_xor_si128(c2[13078],simde_mm_xor_si128(c2[4363],simde_mm_xor_si128(c2[5331],simde_mm_xor_si128(c2[10224],simde_mm_xor_si128(c2[2477],simde_mm_xor_si128(c2[4411],simde_mm_xor_si128(c2[12151],simde_mm_xor_si128(c2[545],simde_mm_xor_si128(c2[7317],simde_mm_xor_si128(c2[8285],simde_mm_xor_si128(c2[580],simde_mm_xor_si128(c2[8320],simde_mm_xor_si128(c2[5414],simde_mm_xor_si128(c2[13176],simde_mm_xor_si128(c2[4456],simde_mm_xor_si128(c2[11228],simde_mm_xor_si128(c2[12196],simde_mm_xor_si128(c2[5472],simde_mm_xor_si128(c2[13212],simde_mm_xor_si128(c2[5469],simde_mm_xor_si128(c2[13209],simde_mm_xor_si128(c2[623],simde_mm_xor_si128(c2[7395],simde_mm_xor_si128(c2[8363],simde_mm_xor_si128(c2[12296],simde_mm_xor_si128(c2[4549],simde_mm_xor_si128(c2[9382],simde_mm_xor_si128(c2[1635],simde_mm_xor_si128(c2[4535],simde_mm_xor_si128(c2[12297],simde_mm_xor_si128(c2[2657],simde_mm_xor_si128(c2[10397],simde_mm_xor_si128(c2[8459],simde_mm_xor_si128(c2[712],simde_mm_xor_si128(c2[715],simde_mm_xor_si128(c2[7487],simde_mm_xor_si128(c2[8455],simde_mm_xor_si128(c2[12367],simde_mm_xor_si128(c2[4620],simde_mm_xor_si128(c2[10431],simde_mm_xor_si128(c2[2684],simde_mm_xor_si128(c2[14308],simde_mm_xor_si128(c2[5593],simde_mm_xor_si128(c2[6561],simde_mm_xor_si128(c2[7582],simde_mm_xor_si128(c2[15322],simde_mm_xor_si128(c2[10477],simde_mm_xor_si128(c2[2730],simde_mm_xor_si128(c2[6604],simde_mm_xor_si128(c2[13376],simde_mm_xor_si128(c2[14344],simde_mm_xor_si128(c2[14393],simde_mm_xor_si128(c2[6646],simde_mm_xor_si128(c2[854],simde_mm_xor_si128(c2[8594],simde_mm_xor_si128(c2[9556],simde_mm_xor_si128(c2[1809],simde_mm_xor_si128(c2[6708],simde_mm_xor_si128(c2[14448],simde_mm_xor_si128(c2[14449],simde_mm_xor_si128(c2[6702],simde_mm_xor_si128(c2[883],simde_mm_xor_si128(c2[7677],simde_mm_xor_si128(c2[8645],simde_mm_xor_si128(c2[3835],simde_mm_xor_si128(c2[11575],simde_mm_xor_si128(c2[5778],simde_mm_xor_si128(c2[13518],simde_mm_xor_si128(c2[927],simde_mm_xor_si128(c2[7721],c2[8689]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 32
+     d2[704]=simde_mm_xor_si128(c2[9681],simde_mm_xor_si128(c2[978],simde_mm_xor_si128(c2[4859],simde_mm_xor_si128(c2[5827],simde_mm_xor_si128(c2[3883],simde_mm_xor_si128(c2[4851],simde_mm_xor_si128(c2[14541],simde_mm_xor_si128(c2[9736],simde_mm_xor_si128(c2[5859],simde_mm_xor_si128(c2[6827],simde_mm_xor_si128(c2[1023],simde_mm_xor_si128(c2[1991],simde_mm_xor_si128(c2[10751],simde_mm_xor_si128(c2[4947],simde_mm_xor_si128(c2[7850],simde_mm_xor_si128(c2[8818],simde_mm_xor_si128(c2[13687],simde_mm_xor_si128(c2[4015],simde_mm_xor_si128(c2[4012],simde_mm_xor_si128(c2[4980],simde_mm_xor_si128(c2[5988],simde_mm_xor_si128(c2[1146],simde_mm_xor_si128(c2[2114],simde_mm_xor_si128(c2[9856],simde_mm_xor_si128(c2[10824],simde_mm_xor_si128(c2[3142],simde_mm_xor_si128(c2[220],simde_mm_xor_si128(c2[2168],simde_mm_xor_si128(c2[3136],simde_mm_xor_si128(c2[8991],simde_mm_xor_si128(c2[11901],simde_mm_xor_si128(c2[12869],simde_mm_xor_si128(c2[13829],simde_mm_xor_si128(c2[14797],simde_mm_xor_si128(c2[4192],simde_mm_xor_si128(c2[3225],simde_mm_xor_si128(c2[4193],simde_mm_xor_si128(c2[9035],simde_mm_xor_si128(c2[10003],simde_mm_xor_si128(c2[2290],simde_mm_xor_si128(c2[372],simde_mm_xor_si128(c2[1340],simde_mm_xor_si128(c2[13914],simde_mm_xor_si128(c2[14882],simde_mm_xor_si128(c2[7192],simde_mm_xor_si128(c2[6212],simde_mm_xor_si128(c2[9109],simde_mm_xor_si128(c2[10077],simde_mm_xor_si128(c2[14012],simde_mm_xor_si128(c2[2389],simde_mm_xor_si128(c2[3357],simde_mm_xor_si128(c2[13035],simde_mm_xor_si128(c2[14003],simde_mm_xor_si128(c2[5329],simde_mm_xor_si128(c2[11147],simde_mm_xor_si128(c2[9216],simde_mm_xor_si128(c2[10184],simde_mm_xor_si128(c2[7308],simde_mm_xor_si128(c2[1517],simde_mm_xor_si128(c2[12148],simde_mm_xor_si128(c2[13116],simde_mm_xor_si128(c2[9256],simde_mm_xor_si128(c2[13173],simde_mm_xor_si128(c2[1552],simde_mm_xor_si128(c2[2520],simde_mm_xor_si128(c2[572],simde_mm_xor_si128(c2[1540],simde_mm_xor_si128(c2[2556],simde_mm_xor_si128(c2[1585],simde_mm_xor_si128(c2[2553],simde_mm_xor_si128(c2[12248],simde_mm_xor_si128(c2[13216],simde_mm_xor_si128(c2[2573],simde_mm_xor_si128(c2[9380],simde_mm_xor_si128(c2[6488],simde_mm_xor_si128(c2[673],simde_mm_xor_si128(c2[1641],simde_mm_xor_si128(c2[15228],simde_mm_xor_si128(c2[5565],simde_mm_xor_si128(c2[12340],simde_mm_xor_si128(c2[13308],simde_mm_xor_si128(c2[9473],simde_mm_xor_si128(c2[6569],simde_mm_xor_si128(c2[7537],simde_mm_xor_si128(c2[10446],simde_mm_xor_si128(c2[11414],simde_mm_xor_si128(c2[4666],simde_mm_xor_si128(c2[6615],simde_mm_xor_si128(c2[7583],simde_mm_xor_si128(c2[2742],simde_mm_xor_si128(c2[3710],simde_mm_xor_si128(c2[11499],simde_mm_xor_si128(c2[13425],simde_mm_xor_si128(c2[5694],simde_mm_xor_si128(c2[6662],simde_mm_xor_si128(c2[3792],simde_mm_xor_si128(c2[10565],simde_mm_xor_si128(c2[11533],simde_mm_xor_si128(c2[12508],simde_mm_xor_si128(c2[13476],simde_mm_xor_si128(c2[941],simde_mm_xor_si128(c2[2862],simde_mm_xor_si128(c2[12552],c2[13520])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 33
+     d2[726]=simde_mm_xor_si128(c2[7791],simde_mm_xor_si128(c2[107],simde_mm_xor_si128(c2[13076],c2[5770])));
+
+//row: 34
+     d2[748]=simde_mm_xor_si128(c2[11619],simde_mm_xor_si128(c2[328],simde_mm_xor_si128(c2[667],c2[6559])));
+
+//row: 35
+     d2[770]=simde_mm_xor_si128(c2[1949],simde_mm_xor_si128(c2[8733],simde_mm_xor_si128(c2[13560],simde_mm_xor_si128(c2[12584],simde_mm_xor_si128(c2[1982],simde_mm_xor_si128(c2[14582],simde_mm_xor_si128(c2[9724],simde_mm_xor_si128(c2[3922],simde_mm_xor_si128(c2[2997],simde_mm_xor_si128(c2[12680],simde_mm_xor_si128(c2[1064],simde_mm_xor_si128(c2[5955],simde_mm_xor_si128(c2[11748],simde_mm_xor_si128(c2[12735],simde_mm_xor_si128(c2[13743],simde_mm_xor_si128(c2[9869],simde_mm_xor_si128(c2[3092],simde_mm_xor_si128(c2[10875],simde_mm_xor_si128(c2[7975],simde_mm_xor_si128(c2[10869],simde_mm_xor_si128(c2[1237],simde_mm_xor_si128(c2[5115],simde_mm_xor_si128(c2[7043],simde_mm_xor_si128(c2[2213],simde_mm_xor_si128(c2[11925],simde_mm_xor_si128(c2[11926],simde_mm_xor_si128(c2[2249],simde_mm_xor_si128(c2[10045],simde_mm_xor_si128(c2[9073],simde_mm_xor_si128(c2[7128],simde_mm_xor_si128(c2[14925],simde_mm_xor_si128(c2[13967],simde_mm_xor_si128(c2[2345],simde_mm_xor_si128(c2[6258],simde_mm_xor_si128(c2[11090],simde_mm_xor_si128(c2[6249],simde_mm_xor_si128(c2[13084],simde_mm_xor_si128(c2[3393],simde_mm_xor_si128(c2[2430],simde_mm_xor_si128(c2[15063],simde_mm_xor_si128(c2[9250],simde_mm_xor_si128(c2[5384],simde_mm_xor_si128(c2[3444],simde_mm_xor_si128(c2[5419],simde_mm_xor_si128(c2[10253],simde_mm_xor_si128(c2[9295],simde_mm_xor_si128(c2[10311],simde_mm_xor_si128(c2[10308],simde_mm_xor_si128(c2[5462],simde_mm_xor_si128(c2[1648],simde_mm_xor_si128(c2[14221],simde_mm_xor_si128(c2[9374],simde_mm_xor_si128(c2[7496],simde_mm_xor_si128(c2[13298],simde_mm_xor_si128(c2[5554],simde_mm_xor_si128(c2[1719],simde_mm_xor_si128(c2[15270],simde_mm_xor_si128(c2[3660],simde_mm_xor_si128(c2[12421],simde_mm_xor_si128(c2[15316],simde_mm_xor_si128(c2[11443],simde_mm_xor_si128(c2[3745],simde_mm_xor_si128(c2[5693],simde_mm_xor_si128(c2[14395],simde_mm_xor_si128(c2[11547],simde_mm_xor_si128(c2[3801],simde_mm_xor_si128(c2[5722],simde_mm_xor_si128(c2[8674],simde_mm_xor_si128(c2[10617],c2[5766])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 36
+     d2[792]=simde_mm_xor_si128(c2[6784],simde_mm_xor_si128(c2[15136],simde_mm_xor_si128(c2[3566],c2[12419])));
+
+//row: 37
+     d2[814]=simde_mm_xor_si128(c2[10649],simde_mm_xor_si128(c2[11617],simde_mm_xor_si128(c2[2914],simde_mm_xor_si128(c2[7763],simde_mm_xor_si128(c2[6787],simde_mm_xor_si128(c2[10704],simde_mm_xor_si128(c2[11672],simde_mm_xor_si128(c2[8763],simde_mm_xor_si128(c2[3927],simde_mm_xor_si128(c2[14583],simde_mm_xor_si128(c2[11719],simde_mm_xor_si128(c2[12687],simde_mm_xor_si128(c2[6883],simde_mm_xor_si128(c2[10754],simde_mm_xor_si128(c2[14655],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[5951],simde_mm_xor_si128(c2[6916],simde_mm_xor_si128(c2[7924],simde_mm_xor_si128(c2[4050],simde_mm_xor_si128(c2[12760],simde_mm_xor_si128(c2[4110],simde_mm_xor_si128(c2[5078],simde_mm_xor_si128(c2[2156],simde_mm_xor_si128(c2[5072],simde_mm_xor_si128(c2[9959],simde_mm_xor_si128(c2[10927],simde_mm_xor_si128(c2[14805],simde_mm_xor_si128(c2[1246],simde_mm_xor_si128(c2[6128],simde_mm_xor_si128(c2[6129],simde_mm_xor_si128(c2[11939],simde_mm_xor_si128(c2[4226],simde_mm_xor_si128(c2[3276],simde_mm_xor_si128(c2[1331],simde_mm_xor_si128(c2[8160],simde_mm_xor_si128(c2[9128],simde_mm_xor_si128(c2[8148],simde_mm_xor_si128(c2[12013],simde_mm_xor_si128(c2[14980],simde_mm_xor_si128(c2[461],simde_mm_xor_si128(c2[5293],simde_mm_xor_si128(c2[452],simde_mm_xor_si128(c2[6297],simde_mm_xor_si128(c2[7265],simde_mm_xor_si128(c2[13083],simde_mm_xor_si128(c2[12120],simde_mm_xor_si128(c2[8276],simde_mm_xor_si128(c2[9244],simde_mm_xor_si128(c2[3453],simde_mm_xor_si128(c2[15052],simde_mm_xor_si128(c2[14141],simde_mm_xor_si128(c2[15109],simde_mm_xor_si128(c2[4456],simde_mm_xor_si128(c2[3476],simde_mm_xor_si128(c2[12202],simde_mm_xor_si128(c2[4492],simde_mm_xor_si128(c2[4489],simde_mm_xor_si128(c2[15152],simde_mm_xor_si128(c2[10348],simde_mm_xor_si128(c2[11316],simde_mm_xor_si128(c2[8424],simde_mm_xor_si128(c2[3577],simde_mm_xor_si128(c2[709],simde_mm_xor_si128(c2[1677],simde_mm_xor_si128(c2[7501],simde_mm_xor_si128(c2[15244],simde_mm_xor_si128(c2[11409],simde_mm_xor_si128(c2[9473],simde_mm_xor_si128(c2[13350],simde_mm_xor_si128(c2[5634],simde_mm_xor_si128(c2[6602],simde_mm_xor_si128(c2[9519],simde_mm_xor_si128(c2[5646],simde_mm_xor_si128(c2[12467],simde_mm_xor_si128(c2[13435],simde_mm_xor_si128(c2[15361],simde_mm_xor_si128(c2[8598],simde_mm_xor_si128(c2[4760],simde_mm_xor_si128(c2[5728],simde_mm_xor_si128(c2[13469],simde_mm_xor_si128(c2[15412],simde_mm_xor_si128(c2[1909],simde_mm_xor_si128(c2[2877],simde_mm_xor_si128(c2[4798],c2[15456])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 38
+     d2[836]=simde_mm_xor_si128(c2[3882],simde_mm_xor_si128(c2[4280],simde_mm_xor_si128(c2[1421],c2[6351])));
+
+//row: 39
+     d2[858]=simde_mm_xor_si128(c2[11674],simde_mm_xor_si128(c2[7892],simde_mm_xor_si128(c2[5150],c2[853])));
+
+//row: 40
+     d2[880]=simde_mm_xor_si128(c2[19],simde_mm_xor_si128(c2[12950],c2[7532]));
+
+//row: 41
+     d2[902]=simde_mm_xor_si128(c2[10699],simde_mm_xor_si128(c2[12720],simde_mm_xor_si128(c2[9109],c2[809])));
+
+//row: 42
+     d2[924]=simde_mm_xor_si128(c2[7763],simde_mm_xor_si128(c2[14525],simde_mm_xor_si128(c2[2919],simde_mm_xor_si128(c2[3887],simde_mm_xor_si128(c2[1943],simde_mm_xor_si128(c2[2911],simde_mm_xor_si128(c2[18],simde_mm_xor_si128(c2[7796],simde_mm_xor_si128(c2[3919],simde_mm_xor_si128(c2[4887],simde_mm_xor_si128(c2[14570],simde_mm_xor_si128(c2[51],simde_mm_xor_si128(c2[8811],simde_mm_xor_si128(c2[3007],simde_mm_xor_si128(c2[5910],simde_mm_xor_si128(c2[6878],simde_mm_xor_si128(c2[11769],simde_mm_xor_si128(c2[2075],simde_mm_xor_si128(c2[2072],simde_mm_xor_si128(c2[3040],simde_mm_xor_si128(c2[4048],simde_mm_xor_si128(c2[14715],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[7938],simde_mm_xor_si128(c2[8906],simde_mm_xor_si128(c2[3085],simde_mm_xor_si128(c2[1202],simde_mm_xor_si128(c2[13789],simde_mm_xor_si128(c2[228],simde_mm_xor_si128(c2[1196],simde_mm_xor_si128(c2[7051],simde_mm_xor_si128(c2[9961],simde_mm_xor_si128(c2[10929],simde_mm_xor_si128(c2[11889],simde_mm_xor_si128(c2[12857],simde_mm_xor_si128(c2[2252],simde_mm_xor_si128(c2[1285],simde_mm_xor_si128(c2[2253],simde_mm_xor_si128(c2[7095],simde_mm_xor_si128(c2[8063],simde_mm_xor_si128(c2[372],simde_mm_xor_si128(c2[13919],simde_mm_xor_si128(c2[14887],simde_mm_xor_si128(c2[11974],simde_mm_xor_si128(c2[12942],simde_mm_xor_si128(c2[5252],simde_mm_xor_si128(c2[4272],simde_mm_xor_si128(c2[7191],simde_mm_xor_si128(c2[8159],simde_mm_xor_si128(c2[12072],simde_mm_xor_si128(c2[449],simde_mm_xor_si128(c2[1417],simde_mm_xor_si128(c2[11095],simde_mm_xor_si128(c2[12063],simde_mm_xor_si128(c2[3389],simde_mm_xor_si128(c2[9207],simde_mm_xor_si128(c2[7276],simde_mm_xor_si128(c2[8244],simde_mm_xor_si128(c2[5368],simde_mm_xor_si128(c2[15064],simde_mm_xor_si128(c2[10208],simde_mm_xor_si128(c2[11176],simde_mm_xor_si128(c2[11233],simde_mm_xor_si128(c2[15099],simde_mm_xor_si128(c2[580],simde_mm_xor_si128(c2[14141],simde_mm_xor_si128(c2[15109],simde_mm_xor_si128(c2[616],simde_mm_xor_si128(c2[15154],simde_mm_xor_si128(c2[635],simde_mm_xor_si128(c2[10308],simde_mm_xor_si128(c2[11276],simde_mm_xor_si128(c2[7440],simde_mm_xor_si128(c2[4548],simde_mm_xor_si128(c2[14220],simde_mm_xor_si128(c2[15188],simde_mm_xor_si128(c2[13288],simde_mm_xor_si128(c2[3625],simde_mm_xor_si128(c2[10400],simde_mm_xor_si128(c2[11368],simde_mm_xor_si128(c2[7533],simde_mm_xor_si128(c2[4629],simde_mm_xor_si128(c2[5597],simde_mm_xor_si128(c2[8506],simde_mm_xor_si128(c2[9474],simde_mm_xor_si128(c2[2748],simde_mm_xor_si128(c2[4675],simde_mm_xor_si128(c2[5643],simde_mm_xor_si128(c2[802],simde_mm_xor_si128(c2[1770],simde_mm_xor_si128(c2[9559],simde_mm_xor_si128(c2[11485],simde_mm_xor_si128(c2[3754],simde_mm_xor_si128(c2[4722],simde_mm_xor_si128(c2[1852],simde_mm_xor_si128(c2[8625],simde_mm_xor_si128(c2[9593],simde_mm_xor_si128(c2[10568],simde_mm_xor_si128(c2[11536],simde_mm_xor_si128(c2[14488],simde_mm_xor_si128(c2[944],simde_mm_xor_si128(c2[10612],c2[11580]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 43
+     d2[946]=simde_mm_xor_si128(c2[13558],simde_mm_xor_si128(c2[4855],simde_mm_xor_si128(c2[9682],simde_mm_xor_si128(c2[7760],simde_mm_xor_si128(c2[8728],simde_mm_xor_si128(c2[13613],simde_mm_xor_si128(c2[10704],simde_mm_xor_si128(c2[4900],simde_mm_xor_si128(c2[5868],simde_mm_xor_si128(c2[1993],simde_mm_xor_si128(c2[14628],simde_mm_xor_si128(c2[8802],simde_mm_xor_si128(c2[12673],simde_mm_xor_si128(c2[2077],simde_mm_xor_si128(c2[7892],simde_mm_xor_si128(c2[7889],simde_mm_xor_si128(c2[8857],simde_mm_xor_si128(c2[9865],simde_mm_xor_si128(c2[5991],simde_mm_xor_si128(c2[13733],simde_mm_xor_si128(c2[14701],simde_mm_xor_si128(c2[6997],simde_mm_xor_si128(c2[4097],simde_mm_xor_si128(c2[7013],simde_mm_xor_si128(c2[12868],simde_mm_xor_si128(c2[1237],simde_mm_xor_si128(c2[2219],simde_mm_xor_si128(c2[3187],simde_mm_xor_si128(c2[8069],simde_mm_xor_si128(c2[8070],simde_mm_xor_si128(c2[12912],simde_mm_xor_si128(c2[13880],simde_mm_xor_si128(c2[6167],simde_mm_xor_si128(c2[5195],simde_mm_xor_si128(c2[2304],simde_mm_xor_si128(c2[3272],simde_mm_xor_si128(c2[11047],simde_mm_xor_si128(c2[10089],simde_mm_xor_si128(c2[13954],simde_mm_xor_si128(c2[2380],simde_mm_xor_si128(c2[7234],simde_mm_xor_si128(c2[1425],simde_mm_xor_si128(c2[2393],simde_mm_xor_si128(c2[9206],simde_mm_xor_si128(c2[15024],simde_mm_xor_si128(c2[13071],simde_mm_xor_si128(c2[14039],simde_mm_xor_si128(c2[11185],simde_mm_xor_si128(c2[5372],simde_mm_xor_si128(c2[538],simde_mm_xor_si128(c2[1506],simde_mm_xor_si128(c2[1541],simde_mm_xor_si128(c2[6397],simde_mm_xor_si128(c2[4449],simde_mm_xor_si128(c2[5417],simde_mm_xor_si128(c2[6433],simde_mm_xor_si128(c2[6430],simde_mm_xor_si128(c2[616],simde_mm_xor_si128(c2[1584],simde_mm_xor_si128(c2[13257],simde_mm_xor_si128(c2[10343],simde_mm_xor_si128(c2[5518],simde_mm_xor_si128(c2[3618],simde_mm_xor_si128(c2[9420],simde_mm_xor_si128(c2[708],simde_mm_xor_si128(c2[1676],simde_mm_xor_si128(c2[3608],simde_mm_xor_si128(c2[13350],simde_mm_xor_si128(c2[11414],simde_mm_xor_si128(c2[14301],simde_mm_xor_si128(c2[15269],simde_mm_xor_si128(c2[8543],simde_mm_xor_si128(c2[11460],simde_mm_xor_si128(c2[6619],simde_mm_xor_si128(c2[7587],simde_mm_xor_si128(c2[5635],simde_mm_xor_si128(c2[15376],simde_mm_xor_si128(c2[1815],simde_mm_xor_si128(c2[10517],simde_mm_xor_si128(c2[7669],simde_mm_xor_si128(c2[15410],simde_mm_xor_si128(c2[898],simde_mm_xor_si128(c2[1866],simde_mm_xor_si128(c2[4796],simde_mm_xor_si128(c2[6739],simde_mm_xor_si128(c2[942],c2[1910]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 44
+     d2[968]=simde_mm_xor_si128(c2[5821],simde_mm_xor_si128(c2[12605],simde_mm_xor_si128(c2[1945],simde_mm_xor_si128(c2[969],simde_mm_xor_si128(c2[14524],simde_mm_xor_si128(c2[5854],simde_mm_xor_si128(c2[2967],simde_mm_xor_si128(c2[13596],simde_mm_xor_si128(c2[6869],simde_mm_xor_si128(c2[1065],simde_mm_xor_si128(c2[4936],simde_mm_xor_si128(c2[9827],simde_mm_xor_si128(c2[133],simde_mm_xor_si128(c2[1120],simde_mm_xor_si128(c2[2128],simde_mm_xor_si128(c2[13741],simde_mm_xor_si128(c2[6964],simde_mm_xor_si128(c2[14747],simde_mm_xor_si128(c2[11847],simde_mm_xor_si128(c2[14741],simde_mm_xor_si128(c2[5109],simde_mm_xor_si128(c2[8987],simde_mm_xor_si128(c2[10915],simde_mm_xor_si128(c2[310],simde_mm_xor_si128(c2[311],simde_mm_xor_si128(c2[6121],simde_mm_xor_si128(c2[4195],simde_mm_xor_si128(c2[13917],simde_mm_xor_si128(c2[12945],simde_mm_xor_si128(c2[11000],simde_mm_xor_si128(c2[3310],simde_mm_xor_si128(c2[2352],simde_mm_xor_si128(c2[6217],simde_mm_xor_si128(c2[5254],simde_mm_xor_si128(c2[10130],simde_mm_xor_si128(c2[14962],simde_mm_xor_si128(c2[10121],simde_mm_xor_si128(c2[1469],simde_mm_xor_si128(c2[7265],simde_mm_xor_si128(c2[6302],simde_mm_xor_si128(c2[3448],simde_mm_xor_si128(c2[13122],simde_mm_xor_si128(c2[9256],simde_mm_xor_si128(c2[9291],simde_mm_xor_si128(c2[14125],simde_mm_xor_si128(c2[13167],simde_mm_xor_si128(c2[14183],simde_mm_xor_si128(c2[14180],simde_mm_xor_si128(c2[9334],simde_mm_xor_si128(c2[5520],simde_mm_xor_si128(c2[2606],simde_mm_xor_si128(c2[13246],simde_mm_xor_si128(c2[11368],simde_mm_xor_si128(c2[1683],simde_mm_xor_si128(c2[9426],simde_mm_xor_si128(c2[5591],simde_mm_xor_si128(c2[3655],simde_mm_xor_si128(c2[7532],simde_mm_xor_si128(c2[806],simde_mm_xor_si128(c2[3701],simde_mm_xor_si128(c2[15315],simde_mm_xor_si128(c2[7617],simde_mm_xor_si128(c2[9565],simde_mm_xor_si128(c2[2780],simde_mm_xor_si128(c2[15419],simde_mm_xor_si128(c2[7673],simde_mm_xor_si128(c2[9594],simde_mm_xor_si128(c2[12546],simde_mm_xor_si128(c2[14489],c2[9638])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 45
+     d2[990]=simde_mm_xor_si128(c2[1985],simde_mm_xor_si128(c2[3172],c2[11102]));
+  }
+}
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc384_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc384_byte.c
index fa906bbefb1fa4e41e9c91c61c780c1004d81e5d..7c1092276743f2787040f4bf1e2f672069848930 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc384_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc384_byte.c
@@ -1,9 +1,10 @@
+#ifdef __AVX2__
 #include "PHY/sse_intrin.h"
 // generated code for Zc=384, byte encoding
 static inline void ldpc384_byte(uint8_t *c,uint8_t *d) {
-  __m256i *csimd=(__m256i *)c,*dsimd=(__m256i *)d;
+  simde__m256i *csimd=(simde__m256i *)c,*dsimd=(simde__m256i *)d;
 
-  __m256i *c2,*d2;
+  simde__m256i *c2,*d2;
 
   int i2;
   for (i2=0; i2<12; i2++) {
@@ -149,3 +150,4 @@ static inline void ldpc384_byte(uint8_t *c,uint8_t *d) {
      d2[540]=simde_mm256_xor_si256(c2[3724],simde_mm256_xor_si256(c2[11236],c2[8160]));
   }
 }
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc384_byte_128.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc384_byte_128.c
new file mode 100644
index 0000000000000000000000000000000000000000..d22c054209d2b140deb3ed75b08541c3f6e864a0
--- /dev/null
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc384_byte_128.c
@@ -0,0 +1,153 @@
+#ifndef __AVX2__
+#include "PHY/sse_intrin.h"
+// generated code for Zc=384, byte encoding
+static inline void ldpc384_byte(uint8_t *c,uint8_t *d) {
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
+
+  simde__m128i *c2,*d2;
+
+  int i2;
+  for (i2=0; i2<24; i2++) {
+     c2=&csimd[i2];
+     d2=&dsimd[i2];
+
+//row: 0
+     d2[0]=simde_mm_xor_si128(c2[3187],simde_mm_xor_si128(c2[12676],simde_mm_xor_si128(c2[13740],simde_mm_xor_si128(c2[4241],simde_mm_xor_si128(c2[3217],simde_mm_xor_si128(c2[10623],simde_mm_xor_si128(c2[7445],simde_mm_xor_si128(c2[2211],simde_mm_xor_si128(c2[12772],simde_mm_xor_si128(c2[8564],simde_mm_xor_si128(c2[1223],simde_mm_xor_si128(c2[9652],simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[210],simde_mm_xor_si128(c2[12884],simde_mm_xor_si128(c2[3377],simde_mm_xor_si128(c2[5531],simde_mm_xor_si128(c2[249],simde_mm_xor_si128(c2[256],simde_mm_xor_si128(c2[8749],simde_mm_xor_si128(c2[1354],simde_mm_xor_si128(c2[7692],simde_mm_xor_si128(c2[11972],simde_mm_xor_si128(c2[11968],simde_mm_xor_si128(c2[9849],simde_mm_xor_si128(c2[12020],simde_mm_xor_si128(c2[394],simde_mm_xor_si128(c2[8835],simde_mm_xor_si128(c2[14179],simde_mm_xor_si128(c2[2555],simde_mm_xor_si128(c2[16275],simde_mm_xor_si128(c2[498],simde_mm_xor_si128(c2[1544],simde_mm_xor_si128(c2[4712],simde_mm_xor_si128(c2[14262],simde_mm_xor_si128(c2[7938],simde_mm_xor_si128(c2[1603],simde_mm_xor_si128(c2[1633],simde_mm_xor_si128(c2[6933],simde_mm_xor_si128(c2[7982],simde_mm_xor_si128(c2[5926],simde_mm_xor_si128(c2[9084],simde_mm_xor_si128(c2[5925],simde_mm_xor_si128(c2[10189],simde_mm_xor_si128(c2[9125],simde_mm_xor_si128(c2[4909],simde_mm_xor_si128(c2[8125],simde_mm_xor_si128(c2[3894],simde_mm_xor_si128(c2[6003],simde_mm_xor_si128(c2[11334],simde_mm_xor_si128(c2[2902],simde_mm_xor_si128(c2[787],simde_mm_xor_si128(c2[2935],simde_mm_xor_si128(c2[3992],simde_mm_xor_si128(c2[13506],simde_mm_xor_si128(c2[2991],simde_mm_xor_si128(c2[879],simde_mm_xor_si128(c2[16720],simde_mm_xor_si128(c2[5147],simde_mm_xor_si128(c2[12548],simde_mm_xor_si128(c2[14652],simde_mm_xor_si128(c2[11540],simde_mm_xor_si128(c2[14688],simde_mm_xor_si128(c2[8354],simde_mm_xor_si128(c2[11589],simde_mm_xor_si128(c2[1015],c2[6310]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 1
+     d2[24]=simde_mm_xor_si128(c2[3187],simde_mm_xor_si128(c2[4243],simde_mm_xor_si128(c2[13732],simde_mm_xor_si128(c2[14796],simde_mm_xor_si128(c2[5297],simde_mm_xor_si128(c2[3217],simde_mm_xor_si128(c2[4273],simde_mm_xor_si128(c2[11679],simde_mm_xor_si128(c2[8501],simde_mm_xor_si128(c2[2211],simde_mm_xor_si128(c2[3267],simde_mm_xor_si128(c2[13828],simde_mm_xor_si128(c2[9620],simde_mm_xor_si128(c2[1223],simde_mm_xor_si128(c2[2279],simde_mm_xor_si128(c2[10708],simde_mm_xor_si128(c2[1200],simde_mm_xor_si128(c2[1266],simde_mm_xor_si128(c2[13940],simde_mm_xor_si128(c2[4433],simde_mm_xor_si128(c2[5531],simde_mm_xor_si128(c2[6587],simde_mm_xor_si128(c2[1305],simde_mm_xor_si128(c2[1312],simde_mm_xor_si128(c2[8749],simde_mm_xor_si128(c2[9805],simde_mm_xor_si128(c2[2410],simde_mm_xor_si128(c2[8748],simde_mm_xor_si128(c2[13028],simde_mm_xor_si128(c2[13024],simde_mm_xor_si128(c2[10905],simde_mm_xor_si128(c2[13076],simde_mm_xor_si128(c2[1450],simde_mm_xor_si128(c2[9891],simde_mm_xor_si128(c2[14179],simde_mm_xor_si128(c2[15235],simde_mm_xor_si128(c2[3611],simde_mm_xor_si128(c2[436],simde_mm_xor_si128(c2[498],simde_mm_xor_si128(c2[1554],simde_mm_xor_si128(c2[2600],simde_mm_xor_si128(c2[5768],simde_mm_xor_si128(c2[14262],simde_mm_xor_si128(c2[15318],simde_mm_xor_si128(c2[8994],simde_mm_xor_si128(c2[2659],simde_mm_xor_si128(c2[1633],simde_mm_xor_si128(c2[2689],simde_mm_xor_si128(c2[7989],simde_mm_xor_si128(c2[9038],simde_mm_xor_si128(c2[5926],simde_mm_xor_si128(c2[6982],simde_mm_xor_si128(c2[10140],simde_mm_xor_si128(c2[6981],simde_mm_xor_si128(c2[11245],simde_mm_xor_si128(c2[10181],simde_mm_xor_si128(c2[5965],simde_mm_xor_si128(c2[8125],simde_mm_xor_si128(c2[9181],simde_mm_xor_si128(c2[4950],simde_mm_xor_si128(c2[7059],simde_mm_xor_si128(c2[11334],simde_mm_xor_si128(c2[12390],simde_mm_xor_si128(c2[3958],simde_mm_xor_si128(c2[1843],simde_mm_xor_si128(c2[3991],simde_mm_xor_si128(c2[5048],simde_mm_xor_si128(c2[14562],simde_mm_xor_si128(c2[2991],simde_mm_xor_si128(c2[4047],simde_mm_xor_si128(c2[1935],simde_mm_xor_si128(c2[881],simde_mm_xor_si128(c2[5147],simde_mm_xor_si128(c2[6203],simde_mm_xor_si128(c2[13604],simde_mm_xor_si128(c2[15708],simde_mm_xor_si128(c2[11540],simde_mm_xor_si128(c2[12596],simde_mm_xor_si128(c2[15744],simde_mm_xor_si128(c2[9410],simde_mm_xor_si128(c2[11589],simde_mm_xor_si128(c2[12645],simde_mm_xor_si128(c2[2071],c2[7366])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 2
+     d2[48]=simde_mm_xor_si128(c2[4243],simde_mm_xor_si128(c2[13732],simde_mm_xor_si128(c2[13740],simde_mm_xor_si128(c2[14796],simde_mm_xor_si128(c2[4241],simde_mm_xor_si128(c2[5297],simde_mm_xor_si128(c2[4273],simde_mm_xor_si128(c2[10623],simde_mm_xor_si128(c2[11679],simde_mm_xor_si128(c2[7445],simde_mm_xor_si128(c2[8501],simde_mm_xor_si128(c2[3267],simde_mm_xor_si128(c2[13828],simde_mm_xor_si128(c2[8564],simde_mm_xor_si128(c2[9620],simde_mm_xor_si128(c2[2279],simde_mm_xor_si128(c2[10708],simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[1200],simde_mm_xor_si128(c2[1266],simde_mm_xor_si128(c2[12884],simde_mm_xor_si128(c2[13940],simde_mm_xor_si128(c2[3377],simde_mm_xor_si128(c2[4433],simde_mm_xor_si128(c2[6587],simde_mm_xor_si128(c2[1305],simde_mm_xor_si128(c2[256],simde_mm_xor_si128(c2[1312],simde_mm_xor_si128(c2[9805],simde_mm_xor_si128(c2[1354],simde_mm_xor_si128(c2[2410],simde_mm_xor_si128(c2[7692],simde_mm_xor_si128(c2[8748],simde_mm_xor_si128(c2[13028],simde_mm_xor_si128(c2[11968],simde_mm_xor_si128(c2[13024],simde_mm_xor_si128(c2[9849],simde_mm_xor_si128(c2[10905],simde_mm_xor_si128(c2[13076],simde_mm_xor_si128(c2[394],simde_mm_xor_si128(c2[1450],simde_mm_xor_si128(c2[8835],simde_mm_xor_si128(c2[9891],simde_mm_xor_si128(c2[15235],simde_mm_xor_si128(c2[3611],simde_mm_xor_si128(c2[16275],simde_mm_xor_si128(c2[436],simde_mm_xor_si128(c2[1554],simde_mm_xor_si128(c2[1544],simde_mm_xor_si128(c2[2600],simde_mm_xor_si128(c2[4712],simde_mm_xor_si128(c2[5768],simde_mm_xor_si128(c2[15318],simde_mm_xor_si128(c2[8994],simde_mm_xor_si128(c2[1603],simde_mm_xor_si128(c2[2659],simde_mm_xor_si128(c2[2689],simde_mm_xor_si128(c2[7989],simde_mm_xor_si128(c2[7982],simde_mm_xor_si128(c2[9038],simde_mm_xor_si128(c2[6982],simde_mm_xor_si128(c2[9084],simde_mm_xor_si128(c2[10140],simde_mm_xor_si128(c2[5925],simde_mm_xor_si128(c2[6981],simde_mm_xor_si128(c2[11245],simde_mm_xor_si128(c2[9125],simde_mm_xor_si128(c2[10181],simde_mm_xor_si128(c2[4909],simde_mm_xor_si128(c2[5965],simde_mm_xor_si128(c2[9181],simde_mm_xor_si128(c2[4950],simde_mm_xor_si128(c2[6003],simde_mm_xor_si128(c2[7059],simde_mm_xor_si128(c2[12390],simde_mm_xor_si128(c2[3958],simde_mm_xor_si128(c2[787],simde_mm_xor_si128(c2[1843],simde_mm_xor_si128(c2[3991],simde_mm_xor_si128(c2[3992],simde_mm_xor_si128(c2[5048],simde_mm_xor_si128(c2[13506],simde_mm_xor_si128(c2[14562],simde_mm_xor_si128(c2[4047],simde_mm_xor_si128(c2[879],simde_mm_xor_si128(c2[1935],simde_mm_xor_si128(c2[16720],simde_mm_xor_si128(c2[881],simde_mm_xor_si128(c2[6203],simde_mm_xor_si128(c2[13604],simde_mm_xor_si128(c2[14652],simde_mm_xor_si128(c2[15708],simde_mm_xor_si128(c2[12596],simde_mm_xor_si128(c2[14688],simde_mm_xor_si128(c2[15744],simde_mm_xor_si128(c2[8354],simde_mm_xor_si128(c2[9410],simde_mm_xor_si128(c2[12645],simde_mm_xor_si128(c2[2071],simde_mm_xor_si128(c2[6310],c2[7366]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 3
+     d2[72]=simde_mm_xor_si128(c2[4243],simde_mm_xor_si128(c2[13732],simde_mm_xor_si128(c2[14796],simde_mm_xor_si128(c2[4241],simde_mm_xor_si128(c2[5297],simde_mm_xor_si128(c2[4273],simde_mm_xor_si128(c2[11679],simde_mm_xor_si128(c2[7445],simde_mm_xor_si128(c2[8501],simde_mm_xor_si128(c2[3267],simde_mm_xor_si128(c2[13828],simde_mm_xor_si128(c2[9620],simde_mm_xor_si128(c2[2279],simde_mm_xor_si128(c2[10708],simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[1200],simde_mm_xor_si128(c2[1266],simde_mm_xor_si128(c2[13940],simde_mm_xor_si128(c2[3377],simde_mm_xor_si128(c2[4433],simde_mm_xor_si128(c2[6587],simde_mm_xor_si128(c2[1305],simde_mm_xor_si128(c2[1312],simde_mm_xor_si128(c2[9805],simde_mm_xor_si128(c2[2410],simde_mm_xor_si128(c2[7692],simde_mm_xor_si128(c2[8748],simde_mm_xor_si128(c2[13028],simde_mm_xor_si128(c2[13024],simde_mm_xor_si128(c2[9849],simde_mm_xor_si128(c2[10905],simde_mm_xor_si128(c2[13076],simde_mm_xor_si128(c2[1450],simde_mm_xor_si128(c2[8835],simde_mm_xor_si128(c2[9891],simde_mm_xor_si128(c2[15235],simde_mm_xor_si128(c2[3611],simde_mm_xor_si128(c2[436],simde_mm_xor_si128(c2[1554],simde_mm_xor_si128(c2[2600],simde_mm_xor_si128(c2[4712],simde_mm_xor_si128(c2[5768],simde_mm_xor_si128(c2[15318],simde_mm_xor_si128(c2[8994],simde_mm_xor_si128(c2[1603],simde_mm_xor_si128(c2[2659],simde_mm_xor_si128(c2[2689],simde_mm_xor_si128(c2[7989],simde_mm_xor_si128(c2[7982],simde_mm_xor_si128(c2[9038],simde_mm_xor_si128(c2[6982],simde_mm_xor_si128(c2[10140],simde_mm_xor_si128(c2[5925],simde_mm_xor_si128(c2[6981],simde_mm_xor_si128(c2[11245],simde_mm_xor_si128(c2[10181],simde_mm_xor_si128(c2[4909],simde_mm_xor_si128(c2[5965],simde_mm_xor_si128(c2[9181],simde_mm_xor_si128(c2[4950],simde_mm_xor_si128(c2[7059],simde_mm_xor_si128(c2[12390],simde_mm_xor_si128(c2[3958],simde_mm_xor_si128(c2[787],simde_mm_xor_si128(c2[1843],simde_mm_xor_si128(c2[3991],simde_mm_xor_si128(c2[5048],simde_mm_xor_si128(c2[13506],simde_mm_xor_si128(c2[14562],simde_mm_xor_si128(c2[4047],simde_mm_xor_si128(c2[1935],simde_mm_xor_si128(c2[16720],simde_mm_xor_si128(c2[881],simde_mm_xor_si128(c2[6203],simde_mm_xor_si128(c2[13604],simde_mm_xor_si128(c2[15708],simde_mm_xor_si128(c2[12596],simde_mm_xor_si128(c2[15744],simde_mm_xor_si128(c2[8354],simde_mm_xor_si128(c2[9410],simde_mm_xor_si128(c2[12645],simde_mm_xor_si128(c2[2071],simde_mm_xor_si128(c2[6310],c2[7366])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 4
+     d2[96]=simde_mm_xor_si128(c2[12692],c2[5339]);
+
+//row: 5
+     d2[120]=simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[9518],simde_mm_xor_si128(c2[10582],simde_mm_xor_si128(c2[1059],simde_mm_xor_si128(c2[3180],simde_mm_xor_si128(c2[59],simde_mm_xor_si128(c2[7441],simde_mm_xor_si128(c2[4287],simde_mm_xor_si128(c2[14832],simde_mm_xor_si128(c2[15948],simde_mm_xor_si128(c2[9614],simde_mm_xor_si128(c2[5382],simde_mm_xor_si128(c2[14936],simde_mm_xor_si128(c2[6494],simde_mm_xor_si128(c2[13881],simde_mm_xor_si128(c2[3319],simde_mm_xor_si128(c2[13923],simde_mm_xor_si128(c2[9702],simde_mm_xor_si128(c2[195],simde_mm_xor_si128(c2[2373],simde_mm_xor_si128(c2[13986],simde_mm_xor_si128(c2[13969],simde_mm_xor_si128(c2[5591],simde_mm_xor_si128(c2[15091],simde_mm_xor_si128(c2[4534],simde_mm_xor_si128(c2[8790],simde_mm_xor_si128(c2[8786],simde_mm_xor_si128(c2[6691],simde_mm_xor_si128(c2[8838],simde_mm_xor_si128(c2[14131],simde_mm_xor_si128(c2[5677],simde_mm_xor_si128(c2[10997],simde_mm_xor_si128(c2[16292],simde_mm_xor_si128(c2[13117],simde_mm_xor_si128(c2[14211],simde_mm_xor_si128(c2[15281],simde_mm_xor_si128(c2[1554],simde_mm_xor_si128(c2[11104],simde_mm_xor_si128(c2[4756],simde_mm_xor_si128(c2[15316],simde_mm_xor_si128(c2[15370],simde_mm_xor_si128(c2[3751],simde_mm_xor_si128(c2[4800],simde_mm_xor_si128(c2[6922],simde_mm_xor_si128(c2[2744],simde_mm_xor_si128(c2[5926],simde_mm_xor_si128(c2[2743],simde_mm_xor_si128(c2[7031],simde_mm_xor_si128(c2[5967],simde_mm_xor_si128(c2[1751],simde_mm_xor_si128(c2[4967],simde_mm_xor_si128(c2[736],simde_mm_xor_si128(c2[2845],simde_mm_xor_si128(c2[8176],simde_mm_xor_si128(c2[16615],simde_mm_xor_si128(c2[14500],simde_mm_xor_si128(c2[1839],simde_mm_xor_si128(c2[16672],simde_mm_xor_si128(c2[834],simde_mm_xor_si128(c2[10324],simde_mm_xor_si128(c2[16704],simde_mm_xor_si128(c2[14592],simde_mm_xor_si128(c2[13538],simde_mm_xor_si128(c2[1989],simde_mm_xor_si128(c2[9366],simde_mm_xor_si128(c2[11494],simde_mm_xor_si128(c2[8358],simde_mm_xor_si128(c2[11530],simde_mm_xor_si128(c2[5196],simde_mm_xor_si128(c2[8407],simde_mm_xor_si128(c2[14752],simde_mm_xor_si128(c2[3128],c2[4179]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 6
+     d2[144]=simde_mm_xor_si128(c2[6353],simde_mm_xor_si128(c2[1360],simde_mm_xor_si128(c2[1536],simde_mm_xor_si128(c2[16389],simde_mm_xor_si128(c2[13301],simde_mm_xor_si128(c2[14559],simde_mm_xor_si128(c2[2977],c2[2030])))))));
+
+//row: 7
+     d2[168]=simde_mm_xor_si128(c2[9504],simde_mm_xor_si128(c2[14835],simde_mm_xor_si128(c2[12883],simde_mm_xor_si128(c2[14084],simde_mm_xor_si128(c2[2514],c2[2791])))));
+
+//row: 8
+     d2[192]=simde_mm_xor_si128(c2[6359],simde_mm_xor_si128(c2[14789],simde_mm_xor_si128(c2[15848],simde_mm_xor_si128(c2[7407],simde_mm_xor_si128(c2[17],simde_mm_xor_si128(c2[7415],simde_mm_xor_si128(c2[8471],simde_mm_xor_si128(c2[7413],simde_mm_xor_si128(c2[14787],simde_mm_xor_si128(c2[15843],simde_mm_xor_si128(c2[3187],simde_mm_xor_si128(c2[6389],simde_mm_xor_si128(c2[14843],simde_mm_xor_si128(c2[13795],simde_mm_xor_si128(c2[4274],simde_mm_xor_si128(c2[5330],simde_mm_xor_si128(c2[10617],simde_mm_xor_si128(c2[1120],simde_mm_xor_si128(c2[2176],simde_mm_xor_si128(c2[3227],simde_mm_xor_si128(c2[5383],simde_mm_xor_si128(c2[13837],simde_mm_xor_si128(c2[15944],simde_mm_xor_si128(c2[7503],simde_mm_xor_si128(c2[11712],simde_mm_xor_si128(c2[2215],simde_mm_xor_si128(c2[3271],simde_mm_xor_si128(c2[4371],simde_mm_xor_si128(c2[12825],simde_mm_xor_si128(c2[12824],simde_mm_xor_si128(c2[4383],simde_mm_xor_si128(c2[3316],simde_mm_xor_si128(c2[10714],simde_mm_xor_si128(c2[11770],simde_mm_xor_si128(c2[5434],simde_mm_xor_si128(c2[3382],simde_mm_xor_si128(c2[11812],simde_mm_xor_si128(c2[16032],simde_mm_xor_si128(c2[6535],simde_mm_xor_si128(c2[7591],simde_mm_xor_si128(c2[6549],simde_mm_xor_si128(c2[13923],simde_mm_xor_si128(c2[14979],simde_mm_xor_si128(c2[8703],simde_mm_xor_si128(c2[262],simde_mm_xor_si128(c2[3421],simde_mm_xor_si128(c2[11875],simde_mm_xor_si128(c2[3428],simde_mm_xor_si128(c2[10802],simde_mm_xor_si128(c2[11858],simde_mm_xor_si128(c2[11921],simde_mm_xor_si128(c2[3456],simde_mm_xor_si128(c2[4526],simde_mm_xor_si128(c2[11924],simde_mm_xor_si128(c2[12980],simde_mm_xor_si128(c2[10864],simde_mm_xor_si128(c2[1367],simde_mm_xor_si128(c2[2423],simde_mm_xor_si128(c2[15120],simde_mm_xor_si128(c2[6679],simde_mm_xor_si128(c2[15140],simde_mm_xor_si128(c2[5619],simde_mm_xor_si128(c2[6675],simde_mm_xor_si128(c2[13021],simde_mm_xor_si128(c2[3524],simde_mm_xor_si128(c2[4580],simde_mm_xor_si128(c2[15168],simde_mm_xor_si128(c2[6727],simde_mm_xor_si128(c2[3566],simde_mm_xor_si128(c2[10964],simde_mm_xor_si128(c2[12020],simde_mm_xor_si128(c2[12007],simde_mm_xor_si128(c2[2510],simde_mm_xor_si128(c2[3566],simde_mm_xor_si128(c2[432],simde_mm_xor_si128(c2[8886],simde_mm_xor_si128(c2[5727],simde_mm_xor_si128(c2[14181],simde_mm_xor_si128(c2[2552],simde_mm_xor_si128(c2[9950],simde_mm_xor_si128(c2[11006],simde_mm_xor_si128(c2[3670],simde_mm_xor_si128(c2[12100],simde_mm_xor_si128(c2[4716],simde_mm_xor_si128(c2[12114],simde_mm_xor_si128(c2[13170],simde_mm_xor_si128(c2[7884],simde_mm_xor_si128(c2[15282],simde_mm_xor_si128(c2[16338],simde_mm_xor_si128(c2[539],simde_mm_xor_si128(c2[8993],simde_mm_xor_si128(c2[11110],simde_mm_xor_si128(c2[2645],simde_mm_xor_si128(c2[4775],simde_mm_xor_si128(c2[12149],simde_mm_xor_si128(c2[13205],simde_mm_xor_si128(c2[4805],simde_mm_xor_si128(c2[13259],simde_mm_xor_si128(c2[10081],simde_mm_xor_si128(c2[1640],simde_mm_xor_si128(c2[11154],simde_mm_xor_si128(c2[1633],simde_mm_xor_si128(c2[2689],simde_mm_xor_si128(c2[2689],simde_mm_xor_si128(c2[9074],simde_mm_xor_si128(c2[633],simde_mm_xor_si128(c2[12256],simde_mm_xor_si128(c2[2759],simde_mm_xor_si128(c2[3815],simde_mm_xor_si128(c2[9073],simde_mm_xor_si128(c2[16471],simde_mm_xor_si128(c2[632],simde_mm_xor_si128(c2[13361],simde_mm_xor_si128(c2[4896],simde_mm_xor_si128(c2[12297],simde_mm_xor_si128(c2[2800],simde_mm_xor_si128(c2[3856],simde_mm_xor_si128(c2[8081],simde_mm_xor_si128(c2[15479],simde_mm_xor_si128(c2[16535],simde_mm_xor_si128(c2[11297],simde_mm_xor_si128(c2[2832],simde_mm_xor_si128(c2[7066],simde_mm_xor_si128(c2[15520],simde_mm_xor_si128(c2[9175],simde_mm_xor_si128(c2[16573],simde_mm_xor_si128(c2[734],simde_mm_xor_si128(c2[14506],simde_mm_xor_si128(c2[6065],simde_mm_xor_si128(c2[6050],simde_mm_xor_si128(c2[14504],simde_mm_xor_si128(c2[3959],simde_mm_xor_si128(c2[11333],simde_mm_xor_si128(c2[12389],simde_mm_xor_si128(c2[8162],simde_mm_xor_si128(c2[6107],simde_mm_xor_si128(c2[14561],simde_mm_xor_si128(c2[7164],simde_mm_xor_si128(c2[14562],simde_mm_xor_si128(c2[15618],simde_mm_xor_si128(c2[16678],simde_mm_xor_si128(c2[7157],simde_mm_xor_si128(c2[8213],simde_mm_xor_si128(c2[6163],simde_mm_xor_si128(c2[14593],simde_mm_xor_si128(c2[4051],simde_mm_xor_si128(c2[11425],simde_mm_xor_si128(c2[12481],simde_mm_xor_si128(c2[2997],simde_mm_xor_si128(c2[10371],simde_mm_xor_si128(c2[11427],simde_mm_xor_si128(c2[8319],simde_mm_xor_si128(c2[16773],simde_mm_xor_si128(c2[15696],simde_mm_xor_si128(c2[7255],simde_mm_xor_si128(c2[929],simde_mm_xor_si128(c2[8327],simde_mm_xor_si128(c2[9383],simde_mm_xor_si128(c2[926],simde_mm_xor_si128(c2[14688],simde_mm_xor_si128(c2[6247],simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[8363],simde_mm_xor_si128(c2[9419],simde_mm_xor_si128(c2[11526],simde_mm_xor_si128(c2[2029],simde_mm_xor_si128(c2[3085],simde_mm_xor_si128(c2[14737],simde_mm_xor_si128(c2[6296],simde_mm_xor_si128(c2[4187],simde_mm_xor_si128(c2[12641],simde_mm_xor_si128(c2[9458],simde_mm_xor_si128(c2[16856],simde_mm_xor_si128(c2[1017],c2[1031]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 9
+     d2[216]=simde_mm_xor_si128(c2[14806],simde_mm_xor_si128(c2[8510],simde_mm_xor_si128(c2[1556],simde_mm_xor_si128(c2[5816],simde_mm_xor_si128(c2[10131],simde_mm_xor_si128(c2[16674],simde_mm_xor_si128(c2[16707],c2[3077])))))));
+
+//row: 10
+     d2[240]=simde_mm_xor_si128(c2[5334],simde_mm_xor_si128(c2[3285],simde_mm_xor_si128(c2[2321],simde_mm_xor_si128(c2[16182],simde_mm_xor_si128(c2[16247],c2[2806])))));
+
+//row: 11
+     d2[264]=simde_mm_xor_si128(c2[1072],simde_mm_xor_si128(c2[6338],simde_mm_xor_si128(c2[7394],simde_mm_xor_si128(c2[10561],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[11625],simde_mm_xor_si128(c2[1076],simde_mm_xor_si128(c2[2126],simde_mm_xor_si128(c2[8448],simde_mm_xor_si128(c2[3],simde_mm_xor_si128(c2[1126],simde_mm_xor_si128(c2[6392],simde_mm_xor_si128(c2[7448],simde_mm_xor_si128(c2[8508],simde_mm_xor_si128(c2[14854],simde_mm_xor_si128(c2[5330],simde_mm_xor_si128(c2[11676],simde_mm_xor_si128(c2[6390],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[5386],simde_mm_xor_si128(c2[6442],simde_mm_xor_si128(c2[10657],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[6449],simde_mm_xor_si128(c2[12771],simde_mm_xor_si128(c2[16003],simde_mm_xor_si128(c2[4374],simde_mm_xor_si128(c2[5430],simde_mm_xor_si128(c2[7537],simde_mm_xor_si128(c2[13883],simde_mm_xor_si128(c2[14948],simde_mm_xor_si128(c2[4375],simde_mm_xor_si128(c2[14990],simde_mm_xor_si128(c2[4417],simde_mm_xor_si128(c2[10769],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[1262],simde_mm_xor_si128(c2[7584],simde_mm_xor_si128(c2[3416],simde_mm_xor_si128(c2[8706],simde_mm_xor_si128(c2[9762],simde_mm_xor_si128(c2[15029],simde_mm_xor_si128(c2[4480],simde_mm_xor_si128(c2[15036],simde_mm_xor_si128(c2[4487],simde_mm_xor_si128(c2[6634],simde_mm_xor_si128(c2[11924],simde_mm_xor_si128(c2[12980],simde_mm_xor_si128(c2[16134],simde_mm_xor_si128(c2[5585],simde_mm_xor_si128(c2[5577],simde_mm_xor_si128(c2[11923],simde_mm_xor_si128(c2[9857],simde_mm_xor_si128(c2[16179],simde_mm_xor_si128(c2[9853],simde_mm_xor_si128(c2[16199],simde_mm_xor_si128(c2[7734],simde_mm_xor_si128(c2[14080],simde_mm_xor_si128(c2[9905],simde_mm_xor_si128(c2[16227],simde_mm_xor_si128(c2[15174],simde_mm_xor_si128(c2[4625],simde_mm_xor_si128(c2[6720],simde_mm_xor_si128(c2[13066],simde_mm_xor_si128(c2[12064],simde_mm_xor_si128(c2[435],simde_mm_xor_si128(c2[1491],simde_mm_xor_si128(c2[440],simde_mm_xor_si128(c2[6786],simde_mm_xor_si128(c2[14160],simde_mm_xor_si128(c2[3611],simde_mm_xor_si128(c2[15278],simde_mm_xor_si128(c2[3649],simde_mm_xor_si128(c2[4705],simde_mm_xor_si128(c2[16324],simde_mm_xor_si128(c2[5775],simde_mm_xor_si128(c2[2597],simde_mm_xor_si128(c2[8943],simde_mm_xor_si128(c2[12147],simde_mm_xor_si128(c2[542],simde_mm_xor_si128(c2[1598],simde_mm_xor_si128(c2[5823],simde_mm_xor_si128(c2[12145],simde_mm_xor_si128(c2[16383],simde_mm_xor_si128(c2[5810],simde_mm_xor_si128(c2[16437],simde_mm_xor_si128(c2[4808],simde_mm_xor_si128(c2[5864],simde_mm_xor_si128(c2[4818],simde_mm_xor_si128(c2[11140],simde_mm_xor_si128(c2[5867],simde_mm_xor_si128(c2[12213],simde_mm_xor_si128(c2[9024],simde_mm_xor_si128(c2[3811],simde_mm_xor_si128(c2[9077],simde_mm_xor_si128(c2[10133],simde_mm_xor_si128(c2[6969],simde_mm_xor_si128(c2[13315],simde_mm_xor_si128(c2[3810],simde_mm_xor_si128(c2[10132],simde_mm_xor_si128(c2[8074],simde_mm_xor_si128(c2[14420],simde_mm_xor_si128(c2[7010],simde_mm_xor_si128(c2[13356],simde_mm_xor_si128(c2[2794],simde_mm_xor_si128(c2[9140],simde_mm_xor_si128(c2[6010],simde_mm_xor_si128(c2[11300],simde_mm_xor_si128(c2[12356],simde_mm_xor_si128(c2[1779],simde_mm_xor_si128(c2[8125],simde_mm_xor_si128(c2[3888],simde_mm_xor_si128(c2[10234],simde_mm_xor_si128(c2[9219],simde_mm_xor_si128(c2[14509],simde_mm_xor_si128(c2[15565],simde_mm_xor_si128(c2[787],simde_mm_xor_si128(c2[7109],simde_mm_xor_si128(c2[15567],simde_mm_xor_si128(c2[4994],simde_mm_xor_si128(c2[16610],simde_mm_xor_si128(c2[820],simde_mm_xor_si128(c2[7166],simde_mm_xor_si128(c2[1877],simde_mm_xor_si128(c2[8223],simde_mm_xor_si128(c2[11391],simde_mm_xor_si128(c2[818],simde_mm_xor_si128(c2[876],simde_mm_xor_si128(c2[6166],simde_mm_xor_si128(c2[7222],simde_mm_xor_si128(c2[15659],simde_mm_xor_si128(c2[5110],simde_mm_xor_si128(c2[14605],simde_mm_xor_si128(c2[4032],simde_mm_xor_si128(c2[3032],simde_mm_xor_si128(c2[8322],simde_mm_xor_si128(c2[9378],simde_mm_xor_si128(c2[10433],simde_mm_xor_si128(c2[16755],simde_mm_xor_si128(c2[12537],simde_mm_xor_si128(c2[1988],simde_mm_xor_si128(c2[9425],simde_mm_xor_si128(c2[14691],simde_mm_xor_si128(c2[15747],simde_mm_xor_si128(c2[12597],simde_mm_xor_si128(c2[2024],simde_mm_xor_si128(c2[6263],simde_mm_xor_si128(c2[12585],simde_mm_xor_si128(c2[9474],simde_mm_xor_si128(c2[14740],simde_mm_xor_si128(c2[15796],simde_mm_xor_si128(c2[15795],simde_mm_xor_si128(c2[5246],simde_mm_xor_si128(c2[4195],simde_mm_xor_si128(c2[10517],c2[13691])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 12
+     d2[288]=simde_mm_xor_si128(c2[13732],simde_mm_xor_si128(c2[10619],simde_mm_xor_si128(c2[15274],simde_mm_xor_si128(c2[8990],simde_mm_xor_si128(c2[2739],c2[11428])))));
+
+//row: 13
+     d2[312]=simde_mm_xor_si128(c2[5298],simde_mm_xor_si128(c2[6354],simde_mm_xor_si128(c2[15843],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[7408],simde_mm_xor_si128(c2[9523],simde_mm_xor_si128(c2[5328],simde_mm_xor_si128(c2[6384],simde_mm_xor_si128(c2[13790],simde_mm_xor_si128(c2[10612],simde_mm_xor_si128(c2[4322],simde_mm_xor_si128(c2[5378],simde_mm_xor_si128(c2[15939],simde_mm_xor_si128(c2[11731],simde_mm_xor_si128(c2[3334],simde_mm_xor_si128(c2[4390],simde_mm_xor_si128(c2[12819],simde_mm_xor_si128(c2[3335],simde_mm_xor_si128(c2[1211],simde_mm_xor_si128(c2[3377],simde_mm_xor_si128(c2[16051],simde_mm_xor_si128(c2[6544],simde_mm_xor_si128(c2[7642],simde_mm_xor_si128(c2[8698],simde_mm_xor_si128(c2[3416],simde_mm_xor_si128(c2[3423],simde_mm_xor_si128(c2[10860],simde_mm_xor_si128(c2[11916],simde_mm_xor_si128(c2[4521],simde_mm_xor_si128(c2[10859],simde_mm_xor_si128(c2[15139],simde_mm_xor_si128(c2[15135],simde_mm_xor_si128(c2[13016],simde_mm_xor_si128(c2[10912],simde_mm_xor_si128(c2[15187],simde_mm_xor_si128(c2[3561],simde_mm_xor_si128(c2[12002],simde_mm_xor_si128(c2[16290],simde_mm_xor_si128(c2[451],simde_mm_xor_si128(c2[5722],simde_mm_xor_si128(c2[2547],simde_mm_xor_si128(c2[2609],simde_mm_xor_si128(c2[3665],simde_mm_xor_si128(c2[4711],simde_mm_xor_si128(c2[7879],simde_mm_xor_si128(c2[16373],simde_mm_xor_si128(c2[534],simde_mm_xor_si128(c2[11105],simde_mm_xor_si128(c2[4770],simde_mm_xor_si128(c2[3744],simde_mm_xor_si128(c2[4800],simde_mm_xor_si128(c2[10100],simde_mm_xor_si128(c2[11149],simde_mm_xor_si128(c2[8037],simde_mm_xor_si128(c2[9093],simde_mm_xor_si128(c2[12251],simde_mm_xor_si128(c2[9092],simde_mm_xor_si128(c2[13356],simde_mm_xor_si128(c2[12292],simde_mm_xor_si128(c2[8076],simde_mm_xor_si128(c2[10236],simde_mm_xor_si128(c2[11292],simde_mm_xor_si128(c2[7061],simde_mm_xor_si128(c2[9170],simde_mm_xor_si128(c2[13445],simde_mm_xor_si128(c2[14501],simde_mm_xor_si128(c2[6069],simde_mm_xor_si128(c2[3954],simde_mm_xor_si128(c2[6102],simde_mm_xor_si128(c2[7159],simde_mm_xor_si128(c2[16673],simde_mm_xor_si128(c2[5102],simde_mm_xor_si128(c2[6158],simde_mm_xor_si128(c2[4046],simde_mm_xor_si128(c2[2992],simde_mm_xor_si128(c2[7258],simde_mm_xor_si128(c2[8314],simde_mm_xor_si128(c2[15715],simde_mm_xor_si128(c2[924],simde_mm_xor_si128(c2[13651],simde_mm_xor_si128(c2[14707],simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[11521],simde_mm_xor_si128(c2[4135],simde_mm_xor_si128(c2[13700],simde_mm_xor_si128(c2[14756],simde_mm_xor_si128(c2[4182],c2[9477])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 14
+     d2[336]=simde_mm_xor_si128(c2[14792],simde_mm_xor_si128(c2[9039],simde_mm_xor_si128(c2[10232],simde_mm_xor_si128(c2[10277],simde_mm_xor_si128(c2[12453],c2[13680])))));
+
+//row: 15
+     d2[360]=simde_mm_xor_si128(c2[1068],simde_mm_xor_si128(c2[10581],simde_mm_xor_si128(c2[11621],simde_mm_xor_si128(c2[1066],simde_mm_xor_si128(c2[2122],simde_mm_xor_si128(c2[1071],simde_mm_xor_si128(c2[1122],simde_mm_xor_si128(c2[8504],simde_mm_xor_si128(c2[4294],simde_mm_xor_si128(c2[5350],simde_mm_xor_si128(c2[2160],simde_mm_xor_si128(c2[116],simde_mm_xor_si128(c2[10677],simde_mm_xor_si128(c2[6445],simde_mm_xor_si128(c2[15999],simde_mm_xor_si128(c2[7557],simde_mm_xor_si128(c2[13888],simde_mm_xor_si128(c2[14944],simde_mm_xor_si128(c2[14986],simde_mm_xor_si128(c2[10765],simde_mm_xor_si128(c2[202],simde_mm_xor_si128(c2[1258],simde_mm_xor_si128(c2[3412],simde_mm_xor_si128(c2[15025],simde_mm_xor_si128(c2[15032],simde_mm_xor_si128(c2[6630],simde_mm_xor_si128(c2[16130],simde_mm_xor_si128(c2[4517],simde_mm_xor_si128(c2[5573],simde_mm_xor_si128(c2[9853],simde_mm_xor_si128(c2[9849],simde_mm_xor_si128(c2[6674],simde_mm_xor_si128(c2[7730],simde_mm_xor_si128(c2[9901],simde_mm_xor_si128(c2[15170],simde_mm_xor_si128(c2[5684],simde_mm_xor_si128(c2[6740],simde_mm_xor_si128(c2[12060],simde_mm_xor_si128(c2[436],simde_mm_xor_si128(c2[14180],simde_mm_xor_si128(c2[15274],simde_mm_xor_si128(c2[16320],simde_mm_xor_si128(c2[1537],simde_mm_xor_si128(c2[2593],simde_mm_xor_si128(c2[2605],simde_mm_xor_si128(c2[12167],simde_mm_xor_si128(c2[5819],simde_mm_xor_si128(c2[15323],simde_mm_xor_si128(c2[16379],simde_mm_xor_si128(c2[16433],simde_mm_xor_si128(c2[4814],simde_mm_xor_si128(c2[4807],simde_mm_xor_si128(c2[5863],simde_mm_xor_si128(c2[3807],simde_mm_xor_si128(c2[6965],simde_mm_xor_si128(c2[2750],simde_mm_xor_si128(c2[3806],simde_mm_xor_si128(c2[15427],simde_mm_xor_si128(c2[8070],simde_mm_xor_si128(c2[7030],simde_mm_xor_si128(c2[1734],simde_mm_xor_si128(c2[2790],simde_mm_xor_si128(c2[6006],simde_mm_xor_si128(c2[1799],simde_mm_xor_si128(c2[3908],simde_mm_xor_si128(c2[9239],simde_mm_xor_si128(c2[783],simde_mm_xor_si128(c2[14507],simde_mm_xor_si128(c2[15563],simde_mm_xor_si128(c2[816],simde_mm_xor_si128(c2[1873],simde_mm_xor_si128(c2[10331],simde_mm_xor_si128(c2[11387],simde_mm_xor_si128(c2[872],simde_mm_xor_si128(c2[15655],simde_mm_xor_si128(c2[13545],simde_mm_xor_si128(c2[14601],simde_mm_xor_si128(c2[8259],simde_mm_xor_si128(c2[3028],simde_mm_xor_si128(c2[10429],simde_mm_xor_si128(c2[12533],simde_mm_xor_si128(c2[9421],simde_mm_xor_si128(c2[12593],simde_mm_xor_si128(c2[5203],simde_mm_xor_si128(c2[6259],simde_mm_xor_si128(c2[9470],simde_mm_xor_si128(c2[15815],simde_mm_xor_si128(c2[3135],c2[4191]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 16
+     d2[384]=simde_mm_xor_si128(c2[12694],simde_mm_xor_si128(c2[5288],simde_mm_xor_si128(c2[6352],simde_mm_xor_si128(c2[13748],simde_mm_xor_si128(c2[12724],simde_mm_xor_si128(c2[3235],simde_mm_xor_si128(c2[57],simde_mm_xor_si128(c2[13776],simde_mm_xor_si128(c2[11718],simde_mm_xor_si128(c2[5384],simde_mm_xor_si128(c2[1152],simde_mm_xor_si128(c2[10706],simde_mm_xor_si128(c2[2264],simde_mm_xor_si128(c2[9651],simde_mm_xor_si128(c2[2277],simde_mm_xor_si128(c2[9717],simde_mm_xor_si128(c2[5472],simde_mm_xor_si128(c2[12884],simde_mm_xor_si128(c2[15038],simde_mm_xor_si128(c2[9756],simde_mm_xor_si128(c2[9763],simde_mm_xor_si128(c2[1361],simde_mm_xor_si128(c2[10861],simde_mm_xor_si128(c2[304],simde_mm_xor_si128(c2[4560],simde_mm_xor_si128(c2[4580],simde_mm_xor_si128(c2[2461],simde_mm_xor_si128(c2[4608],simde_mm_xor_si128(c2[9901],simde_mm_xor_si128(c2[1447],simde_mm_xor_si128(c2[6791],simde_mm_xor_si128(c2[12062],simde_mm_xor_si128(c2[8887],simde_mm_xor_si128(c2[10005],simde_mm_xor_si128(c2[11051],simde_mm_xor_si128(c2[14219],simde_mm_xor_si128(c2[6874],simde_mm_xor_si128(c2[550],simde_mm_xor_si128(c2[11110],simde_mm_xor_si128(c2[10035],simde_mm_xor_si128(c2[11140],simde_mm_xor_si128(c2[16416],simde_mm_xor_si128(c2[594],simde_mm_xor_si128(c2[15409],simde_mm_xor_si128(c2[1696],simde_mm_xor_si128(c2[15408],simde_mm_xor_si128(c2[2801],simde_mm_xor_si128(c2[1737],simde_mm_xor_si128(c2[14416],simde_mm_xor_si128(c2[737],simde_mm_xor_si128(c2[13401],simde_mm_xor_si128(c2[15510],simde_mm_xor_si128(c2[3946],simde_mm_xor_si128(c2[12385],simde_mm_xor_si128(c2[10294],simde_mm_xor_si128(c2[12442],simde_mm_xor_si128(c2[13499],simde_mm_xor_si128(c2[6118],simde_mm_xor_si128(c2[12498],simde_mm_xor_si128(c2[10386],simde_mm_xor_si128(c2[9332],simde_mm_xor_si128(c2[14654],simde_mm_xor_si128(c2[5136],simde_mm_xor_si128(c2[7264],simde_mm_xor_si128(c2[4128],simde_mm_xor_si128(c2[7300],simde_mm_xor_si128(c2[966],simde_mm_xor_si128(c2[2034],simde_mm_xor_si128(c2[4177],simde_mm_xor_si128(c2[10522],c2[15793]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 17
+     d2[408]=simde_mm_xor_si128(c2[4240],simde_mm_xor_si128(c2[16530],simde_mm_xor_si128(c2[1829],simde_mm_xor_si128(c2[7174],c2[8423]))));
+
+//row: 18
+     d2[432]=simde_mm_xor_si128(c2[2168],simde_mm_xor_si128(c2[3754],simde_mm_xor_si128(c2[9089],simde_mm_xor_si128(c2[5096],c2[5136]))));
+
+//row: 19
+     d2[456]=simde_mm_xor_si128(c2[1065],simde_mm_xor_si128(c2[5341],simde_mm_xor_si128(c2[8805],simde_mm_xor_si128(c2[2511],c2[5772]))));
+
+//row: 20
+     d2[480]=simde_mm_xor_si128(c2[14806],simde_mm_xor_si128(c2[7400],simde_mm_xor_si128(c2[8464],simde_mm_xor_si128(c2[15860],simde_mm_xor_si128(c2[11627],simde_mm_xor_si128(c2[14836],simde_mm_xor_si128(c2[5347],simde_mm_xor_si128(c2[2169],simde_mm_xor_si128(c2[13830],simde_mm_xor_si128(c2[7496],simde_mm_xor_si128(c2[3264],simde_mm_xor_si128(c2[12818],simde_mm_xor_si128(c2[4376],simde_mm_xor_si128(c2[11763],simde_mm_xor_si128(c2[14940],simde_mm_xor_si128(c2[11829],simde_mm_xor_si128(c2[7584],simde_mm_xor_si128(c2[14996],simde_mm_xor_si128(c2[255],simde_mm_xor_si128(c2[11868],simde_mm_xor_si128(c2[11875],simde_mm_xor_si128(c2[3473],simde_mm_xor_si128(c2[12973],simde_mm_xor_si128(c2[2416],simde_mm_xor_si128(c2[6672],simde_mm_xor_si128(c2[6692],simde_mm_xor_si128(c2[4573],simde_mm_xor_si128(c2[6720],simde_mm_xor_si128(c2[12013],simde_mm_xor_si128(c2[3559],simde_mm_xor_si128(c2[8903],simde_mm_xor_si128(c2[14174],simde_mm_xor_si128(c2[10999],simde_mm_xor_si128(c2[8896],simde_mm_xor_si128(c2[12117],simde_mm_xor_si128(c2[13163],simde_mm_xor_si128(c2[16331],simde_mm_xor_si128(c2[8986],simde_mm_xor_si128(c2[2662],simde_mm_xor_si128(c2[13222],simde_mm_xor_si128(c2[5829],simde_mm_xor_si128(c2[13252],simde_mm_xor_si128(c2[1633],simde_mm_xor_si128(c2[2706],simde_mm_xor_si128(c2[626],simde_mm_xor_si128(c2[3808],simde_mm_xor_si128(c2[625],simde_mm_xor_si128(c2[4913],simde_mm_xor_si128(c2[3849],simde_mm_xor_si128(c2[16528],simde_mm_xor_si128(c2[2849],simde_mm_xor_si128(c2[15513],simde_mm_xor_si128(c2[727],simde_mm_xor_si128(c2[6058],simde_mm_xor_si128(c2[14497],simde_mm_xor_si128(c2[12406],simde_mm_xor_si128(c2[14554],simde_mm_xor_si128(c2[15611],simde_mm_xor_si128(c2[8230],simde_mm_xor_si128(c2[14610],simde_mm_xor_si128(c2[12498],simde_mm_xor_si128(c2[11444],simde_mm_xor_si128(c2[16766],simde_mm_xor_si128(c2[7248],simde_mm_xor_si128(c2[9376],simde_mm_xor_si128(c2[6240],simde_mm_xor_si128(c2[9412],simde_mm_xor_si128(c2[3078],simde_mm_xor_si128(c2[6289],simde_mm_xor_si128(c2[12634],c2[1010]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 21
+     d2[504]=simde_mm_xor_si128(c2[13788],simde_mm_xor_si128(c2[6582],simde_mm_xor_si128(c2[9236],simde_mm_xor_si128(c2[6253],c2[2070]))));
+
+//row: 22
+     d2[528]=simde_mm_xor_si128(c2[14785],simde_mm_xor_si128(c2[12192],simde_mm_xor_si128(c2[10142],c2[7153])));
+
+//row: 23
+     d2[552]=simde_mm_xor_si128(c2[8497],simde_mm_xor_si128(c2[9605],simde_mm_xor_si128(c2[14211],c2[12481])));
+
+//row: 24
+     d2[576]=simde_mm_xor_si128(c2[13737],simde_mm_xor_si128(c2[6355],simde_mm_xor_si128(c2[7395],simde_mm_xor_si128(c2[14791],simde_mm_xor_si128(c2[10578],simde_mm_xor_si128(c2[13791],simde_mm_xor_si128(c2[4278],simde_mm_xor_si128(c2[1124],simde_mm_xor_si128(c2[12785],simde_mm_xor_si128(c2[6451],simde_mm_xor_si128(c2[2219],simde_mm_xor_si128(c2[11773],simde_mm_xor_si128(c2[3331],simde_mm_xor_si128(c2[10718],simde_mm_xor_si128(c2[14937],simde_mm_xor_si128(c2[10760],simde_mm_xor_si128(c2[6539],simde_mm_xor_si128(c2[13927],simde_mm_xor_si128(c2[11822],simde_mm_xor_si128(c2[16081],simde_mm_xor_si128(c2[10823],simde_mm_xor_si128(c2[10806],simde_mm_xor_si128(c2[2404],simde_mm_xor_si128(c2[11904],simde_mm_xor_si128(c2[1347],simde_mm_xor_si128(c2[5627],simde_mm_xor_si128(c2[5623],simde_mm_xor_si128(c2[3504],simde_mm_xor_si128(c2[5675],simde_mm_xor_si128(c2[10944],simde_mm_xor_si128(c2[2514],simde_mm_xor_si128(c2[7834],simde_mm_xor_si128(c2[13105],simde_mm_xor_si128(c2[9954],simde_mm_xor_si128(c2[11048],simde_mm_xor_si128(c2[12118],simde_mm_xor_si128(c2[15286],simde_mm_xor_si128(c2[7941],simde_mm_xor_si128(c2[1593],simde_mm_xor_si128(c2[12153],simde_mm_xor_si128(c2[3717],simde_mm_xor_si128(c2[12207],simde_mm_xor_si128(c2[588],simde_mm_xor_si128(c2[1637],simde_mm_xor_si128(c2[16476],simde_mm_xor_si128(c2[2739],simde_mm_xor_si128(c2[16475],simde_mm_xor_si128(c2[3844],simde_mm_xor_si128(c2[2804],simde_mm_xor_si128(c2[15459],simde_mm_xor_si128(c2[1780],simde_mm_xor_si128(c2[14468],simde_mm_xor_si128(c2[16577],simde_mm_xor_si128(c2[5013],simde_mm_xor_si128(c2[13452],simde_mm_xor_si128(c2[11337],simde_mm_xor_si128(c2[13509],simde_mm_xor_si128(c2[14566],simde_mm_xor_si128(c2[7161],simde_mm_xor_si128(c2[13541],simde_mm_xor_si128(c2[11429],simde_mm_xor_si128(c2[10375],simde_mm_xor_si128(c2[15697],simde_mm_xor_si128(c2[6203],simde_mm_xor_si128(c2[8307],simde_mm_xor_si128(c2[5195],simde_mm_xor_si128(c2[8367],simde_mm_xor_si128(c2[2033],simde_mm_xor_si128(c2[5244],simde_mm_xor_si128(c2[11589],c2[16860]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 25
+     d2[600]=simde_mm_xor_si128(c2[8500],simde_mm_xor_si128(c2[1345],simde_mm_xor_si128(c2[16199],c2[9139])));
+
+//row: 26
+     d2[624]=simde_mm_xor_si128(c2[7396],simde_mm_xor_si128(c2[1157],simde_mm_xor_si128(c2[12868],c2[9176])));
+
+//row: 27
+     d2[648]=simde_mm_xor_si128(c2[2172],simde_mm_xor_si128(c2[2412],c2[5670]));
+
+//row: 28
+     d2[672]=simde_mm_xor_si128(c2[14797],simde_mm_xor_si128(c2[3361],simde_mm_xor_si128(c2[5151],c2[3137])));
+
+//row: 29
+     d2[696]=simde_mm_xor_si128(c2[2112],simde_mm_xor_si128(c2[11625],simde_mm_xor_si128(c2[12689],simde_mm_xor_si128(c2[2134],simde_mm_xor_si128(c2[3190],simde_mm_xor_si128(c2[2166],simde_mm_xor_si128(c2[9572],simde_mm_xor_si128(c2[5338],simde_mm_xor_si128(c2[6394],simde_mm_xor_si128(c2[12735],simde_mm_xor_si128(c2[1160],simde_mm_xor_si128(c2[11721],simde_mm_xor_si128(c2[7489],simde_mm_xor_si128(c2[148],simde_mm_xor_si128(c2[8601],simde_mm_xor_si128(c2[14932],simde_mm_xor_si128(c2[15988],simde_mm_xor_si128(c2[16054],simde_mm_xor_si128(c2[11809],simde_mm_xor_si128(c2[1270],simde_mm_xor_si128(c2[2326],simde_mm_xor_si128(c2[4480],simde_mm_xor_si128(c2[16093],simde_mm_xor_si128(c2[16100],simde_mm_xor_si128(c2[7698],simde_mm_xor_si128(c2[303],simde_mm_xor_si128(c2[5585],simde_mm_xor_si128(c2[6641],simde_mm_xor_si128(c2[10897],simde_mm_xor_si128(c2[10917],simde_mm_xor_si128(c2[7742],simde_mm_xor_si128(c2[8798],simde_mm_xor_si128(c2[10945],simde_mm_xor_si128(c2[16238],simde_mm_xor_si128(c2[6728],simde_mm_xor_si128(c2[7784],simde_mm_xor_si128(c2[13104],simde_mm_xor_si128(c2[1504],simde_mm_xor_si128(c2[15224],simde_mm_xor_si128(c2[16342],simde_mm_xor_si128(c2[493],simde_mm_xor_si128(c2[2605],simde_mm_xor_si128(c2[3661],simde_mm_xor_si128(c2[13211],simde_mm_xor_si128(c2[6887],simde_mm_xor_si128(c2[16391],simde_mm_xor_si128(c2[528],simde_mm_xor_si128(c2[582],simde_mm_xor_si128(c2[5858],simde_mm_xor_si128(c2[5875],simde_mm_xor_si128(c2[6931],simde_mm_xor_si128(c2[4851],simde_mm_xor_si128(c2[8033],simde_mm_xor_si128(c2[3794],simde_mm_xor_si128(c2[4850],simde_mm_xor_si128(c2[9138],simde_mm_xor_si128(c2[8074],simde_mm_xor_si128(c2[2802],simde_mm_xor_si128(c2[3858],simde_mm_xor_si128(c2[5952],simde_mm_xor_si128(c2[7074],simde_mm_xor_si128(c2[2843],simde_mm_xor_si128(c2[4952],simde_mm_xor_si128(c2[10283],simde_mm_xor_si128(c2[1827],simde_mm_xor_si128(c2[15575],simde_mm_xor_si128(c2[16631],simde_mm_xor_si128(c2[1884],simde_mm_xor_si128(c2[2941],simde_mm_xor_si128(c2[11399],simde_mm_xor_si128(c2[12455],simde_mm_xor_si128(c2[1940],simde_mm_xor_si128(c2[16723],simde_mm_xor_si128(c2[14613],simde_mm_xor_si128(c2[15669],simde_mm_xor_si128(c2[4041],simde_mm_xor_si128(c2[4096],simde_mm_xor_si128(c2[11473],simde_mm_xor_si128(c2[13601],simde_mm_xor_si128(c2[10465],simde_mm_xor_si128(c2[13637],simde_mm_xor_si128(c2[6247],simde_mm_xor_si128(c2[7303],simde_mm_xor_si128(c2[10514],simde_mm_xor_si128(c2[16859],simde_mm_xor_si128(c2[4179],c2[5235]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 30
+     d2[720]=simde_mm_xor_si128(c2[14784],simde_mm_xor_si128(c2[7402],simde_mm_xor_si128(c2[7410],simde_mm_xor_si128(c2[8466],simde_mm_xor_si128(c2[14806],simde_mm_xor_si128(c2[15862],simde_mm_xor_si128(c2[15849],simde_mm_xor_si128(c2[14838],simde_mm_xor_si128(c2[4293],simde_mm_xor_si128(c2[5349],simde_mm_xor_si128(c2[1115],simde_mm_xor_si128(c2[2171],simde_mm_xor_si128(c2[13832],simde_mm_xor_si128(c2[7498],simde_mm_xor_si128(c2[2210],simde_mm_xor_si128(c2[3266],simde_mm_xor_si128(c2[12820],simde_mm_xor_si128(c2[4378],simde_mm_xor_si128(c2[10709],simde_mm_xor_si128(c2[11765],simde_mm_xor_si128(c2[11831],simde_mm_xor_si128(c2[6530],simde_mm_xor_si128(c2[7586],simde_mm_xor_si128(c2[13942],simde_mm_xor_si128(c2[14998],simde_mm_xor_si128(c2[257],simde_mm_xor_si128(c2[11870],simde_mm_xor_si128(c2[10821],simde_mm_xor_si128(c2[11877],simde_mm_xor_si128(c2[3475],simde_mm_xor_si128(c2[11919],simde_mm_xor_si128(c2[12975],simde_mm_xor_si128(c2[1362],simde_mm_xor_si128(c2[2418],simde_mm_xor_si128(c2[6674],simde_mm_xor_si128(c2[5638],simde_mm_xor_si128(c2[6694],simde_mm_xor_si128(c2[3519],simde_mm_xor_si128(c2[4575],simde_mm_xor_si128(c2[6722],simde_mm_xor_si128(c2[10959],simde_mm_xor_si128(c2[12015],simde_mm_xor_si128(c2[2505],simde_mm_xor_si128(c2[3561],simde_mm_xor_si128(c2[8881],simde_mm_xor_si128(c2[14176],simde_mm_xor_si128(c2[9945],simde_mm_xor_si128(c2[11001],simde_mm_xor_si128(c2[12119],simde_mm_xor_si128(c2[12109],simde_mm_xor_si128(c2[13165],simde_mm_xor_si128(c2[15277],simde_mm_xor_si128(c2[16333],simde_mm_xor_si128(c2[5774],simde_mm_xor_si128(c2[8988],simde_mm_xor_si128(c2[2640],simde_mm_xor_si128(c2[12144],simde_mm_xor_si128(c2[13200],simde_mm_xor_si128(c2[13254],simde_mm_xor_si128(c2[1635],simde_mm_xor_si128(c2[1652],simde_mm_xor_si128(c2[2708],simde_mm_xor_si128(c2[628],simde_mm_xor_si128(c2[2754],simde_mm_xor_si128(c2[3810],simde_mm_xor_si128(c2[16466],simde_mm_xor_si128(c2[627],simde_mm_xor_si128(c2[4864],simde_mm_xor_si128(c2[4915],simde_mm_xor_si128(c2[2795],simde_mm_xor_si128(c2[3851],simde_mm_xor_si128(c2[15474],simde_mm_xor_si128(c2[16530],simde_mm_xor_si128(c2[2851],simde_mm_xor_si128(c2[15515],simde_mm_xor_si128(c2[16568],simde_mm_xor_si128(c2[729],simde_mm_xor_si128(c2[6060],simde_mm_xor_si128(c2[14499],simde_mm_xor_si128(c2[11328],simde_mm_xor_si128(c2[12384],simde_mm_xor_si128(c2[14556],simde_mm_xor_si128(c2[14557],simde_mm_xor_si128(c2[15613],simde_mm_xor_si128(c2[7152],simde_mm_xor_si128(c2[8208],simde_mm_xor_si128(c2[14612],simde_mm_xor_si128(c2[11444],simde_mm_xor_si128(c2[12500],simde_mm_xor_si128(c2[10390],simde_mm_xor_si128(c2[11446],simde_mm_xor_si128(c2[16768],simde_mm_xor_si128(c2[7250],simde_mm_xor_si128(c2[8322],simde_mm_xor_si128(c2[9378],simde_mm_xor_si128(c2[6242],simde_mm_xor_si128(c2[8358],simde_mm_xor_si128(c2[9414],simde_mm_xor_si128(c2[2024],simde_mm_xor_si128(c2[3080],simde_mm_xor_si128(c2[6291],simde_mm_xor_si128(c2[12636],simde_mm_xor_si128(c2[16851],c2[1012])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 31
+     d2[744]=simde_mm_xor_si128(c2[5291],simde_mm_xor_si128(c2[4235],simde_mm_xor_si128(c2[14804],simde_mm_xor_si128(c2[13748],simde_mm_xor_si128(c2[15844],simde_mm_xor_si128(c2[14788],simde_mm_xor_si128(c2[6345],simde_mm_xor_si128(c2[4233],simde_mm_xor_si128(c2[5289],simde_mm_xor_si128(c2[5345],simde_mm_xor_si128(c2[4289],simde_mm_xor_si128(c2[12727],simde_mm_xor_si128(c2[11671],simde_mm_xor_si128(c2[9573],simde_mm_xor_si128(c2[7461],simde_mm_xor_si128(c2[8517],simde_mm_xor_si128(c2[4278],simde_mm_xor_si128(c2[4339],simde_mm_xor_si128(c2[3283],simde_mm_xor_si128(c2[14900],simde_mm_xor_si128(c2[13844],simde_mm_xor_si128(c2[10668],simde_mm_xor_si128(c2[9612],simde_mm_xor_si128(c2[3327],simde_mm_xor_si128(c2[2271],simde_mm_xor_si128(c2[11780],simde_mm_xor_si128(c2[10724],simde_mm_xor_si128(c2[2272],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[1216],simde_mm_xor_si128(c2[2314],simde_mm_xor_si128(c2[1258],simde_mm_xor_si128(c2[14988],simde_mm_xor_si128(c2[13932],simde_mm_xor_si128(c2[5481],simde_mm_xor_si128(c2[3369],simde_mm_xor_si128(c2[4425],simde_mm_xor_si128(c2[7635],simde_mm_xor_si128(c2[6579],simde_mm_xor_si128(c2[2353],simde_mm_xor_si128(c2[1297],simde_mm_xor_si128(c2[2360],simde_mm_xor_si128(c2[1304],simde_mm_xor_si128(c2[10853],simde_mm_xor_si128(c2[9797],simde_mm_xor_si128(c2[3458],simde_mm_xor_si128(c2[2402],simde_mm_xor_si128(c2[9796],simde_mm_xor_si128(c2[7684],simde_mm_xor_si128(c2[8740],simde_mm_xor_si128(c2[14076],simde_mm_xor_si128(c2[13020],simde_mm_xor_si128(c2[14072],simde_mm_xor_si128(c2[13016],simde_mm_xor_si128(c2[11953],simde_mm_xor_si128(c2[9841],simde_mm_xor_si128(c2[10897],simde_mm_xor_si128(c2[7741],simde_mm_xor_si128(c2[14124],simde_mm_xor_si128(c2[13068],simde_mm_xor_si128(c2[2498],simde_mm_xor_si128(c2[1442],simde_mm_xor_si128(c2[10963],simde_mm_xor_si128(c2[8851],simde_mm_xor_si128(c2[9907],simde_mm_xor_si128(c2[16283],simde_mm_xor_si128(c2[15227],simde_mm_xor_si128(c2[4659],simde_mm_xor_si128(c2[3603],simde_mm_xor_si128(c2[1508],simde_mm_xor_si128(c2[452],simde_mm_xor_si128(c2[2602],simde_mm_xor_si128(c2[1546],simde_mm_xor_si128(c2[3648],simde_mm_xor_si128(c2[2592],simde_mm_xor_si128(c2[6816],simde_mm_xor_si128(c2[4704],simde_mm_xor_si128(c2[5760],simde_mm_xor_si128(c2[16390],simde_mm_xor_si128(c2[15334],simde_mm_xor_si128(c2[10042],simde_mm_xor_si128(c2[8986],simde_mm_xor_si128(c2[3707],simde_mm_xor_si128(c2[1595],simde_mm_xor_si128(c2[2651],simde_mm_xor_si128(c2[3761],simde_mm_xor_si128(c2[2705],simde_mm_xor_si128(c2[9037],simde_mm_xor_si128(c2[7981],simde_mm_xor_si128(c2[10086],simde_mm_xor_si128(c2[7974],simde_mm_xor_si128(c2[9030],simde_mm_xor_si128(c2[8030],simde_mm_xor_si128(c2[6974],simde_mm_xor_si128(c2[11188],simde_mm_xor_si128(c2[10132],simde_mm_xor_si128(c2[8029],simde_mm_xor_si128(c2[5917],simde_mm_xor_si128(c2[6973],simde_mm_xor_si128(c2[12293],simde_mm_xor_si128(c2[11237],simde_mm_xor_si128(c2[11253],simde_mm_xor_si128(c2[10197],simde_mm_xor_si128(c2[7013],simde_mm_xor_si128(c2[4901],simde_mm_xor_si128(c2[5957],simde_mm_xor_si128(c2[10229],simde_mm_xor_si128(c2[9173],simde_mm_xor_si128(c2[6022],simde_mm_xor_si128(c2[4966],simde_mm_xor_si128(c2[8131],simde_mm_xor_si128(c2[7075],simde_mm_xor_si128(c2[13462],simde_mm_xor_si128(c2[12406],simde_mm_xor_si128(c2[5006],simde_mm_xor_si128(c2[3950],simde_mm_xor_si128(c2[2891],simde_mm_xor_si128(c2[779],simde_mm_xor_si128(c2[1835],simde_mm_xor_si128(c2[5063],simde_mm_xor_si128(c2[4007],simde_mm_xor_si128(c2[6096],simde_mm_xor_si128(c2[5040],simde_mm_xor_si128(c2[15610],simde_mm_xor_si128(c2[13498],simde_mm_xor_si128(c2[14554],simde_mm_xor_si128(c2[5095],simde_mm_xor_si128(c2[4039],simde_mm_xor_si128(c2[2983],simde_mm_xor_si128(c2[1927],simde_mm_xor_si128(c2[1929],simde_mm_xor_si128(c2[16712],simde_mm_xor_si128(c2[873],simde_mm_xor_si128(c2[7251],simde_mm_xor_si128(c2[6195],simde_mm_xor_si128(c2[14652],simde_mm_xor_si128(c2[13596],simde_mm_xor_si128(c2[16756],simde_mm_xor_si128(c2[15700],simde_mm_xor_si128(c2[13644],simde_mm_xor_si128(c2[12588],simde_mm_xor_si128(c2[16816],simde_mm_xor_si128(c2[15760],simde_mm_xor_si128(c2[10482],simde_mm_xor_si128(c2[8370],simde_mm_xor_si128(c2[9426],simde_mm_xor_si128(c2[13693],simde_mm_xor_si128(c2[12637],simde_mm_xor_si128(c2[3143],simde_mm_xor_si128(c2[2087],simde_mm_xor_si128(c2[8414],simde_mm_xor_si128(c2[6302],c2[7358]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 32
+     d2[768]=simde_mm_xor_si128(c2[3181],simde_mm_xor_si128(c2[12694],simde_mm_xor_si128(c2[12678],simde_mm_xor_si128(c2[13734],simde_mm_xor_si128(c2[3179],simde_mm_xor_si128(c2[4235],simde_mm_xor_si128(c2[6342],simde_mm_xor_si128(c2[3235],simde_mm_xor_si128(c2[9561],simde_mm_xor_si128(c2[10617],simde_mm_xor_si128(c2[6407],simde_mm_xor_si128(c2[7463],simde_mm_xor_si128(c2[2229],simde_mm_xor_si128(c2[12790],simde_mm_xor_si128(c2[7502],simde_mm_xor_si128(c2[8558],simde_mm_xor_si128(c2[1217],simde_mm_xor_si128(c2[9670],simde_mm_xor_si128(c2[16001],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[204],simde_mm_xor_si128(c2[11822],simde_mm_xor_si128(c2[12878],simde_mm_xor_si128(c2[2315],simde_mm_xor_si128(c2[3371],simde_mm_xor_si128(c2[5525],simde_mm_xor_si128(c2[243],simde_mm_xor_si128(c2[16089],simde_mm_xor_si128(c2[250],simde_mm_xor_si128(c2[8743],simde_mm_xor_si128(c2[292],simde_mm_xor_si128(c2[1348],simde_mm_xor_si128(c2[6630],simde_mm_xor_si128(c2[7686],simde_mm_xor_si128(c2[11966],simde_mm_xor_si128(c2[10906],simde_mm_xor_si128(c2[11962],simde_mm_xor_si128(c2[8787],simde_mm_xor_si128(c2[9843],simde_mm_xor_si128(c2[12014],simde_mm_xor_si128(c2[16227],simde_mm_xor_si128(c2[388],simde_mm_xor_si128(c2[7797],simde_mm_xor_si128(c2[8853],simde_mm_xor_si128(c2[14173],simde_mm_xor_si128(c2[2549],simde_mm_xor_si128(c2[15237],simde_mm_xor_si128(c2[16293],simde_mm_xor_si128(c2[492],simde_mm_xor_si128(c2[482],simde_mm_xor_si128(c2[1538],simde_mm_xor_si128(c2[3650],simde_mm_xor_si128(c2[4706],simde_mm_xor_si128(c2[14256],simde_mm_xor_si128(c2[7932],simde_mm_xor_si128(c2[541],simde_mm_xor_si128(c2[1597],simde_mm_xor_si128(c2[1651],simde_mm_xor_si128(c2[6927],simde_mm_xor_si128(c2[6920],simde_mm_xor_si128(c2[7976],simde_mm_xor_si128(c2[10092],simde_mm_xor_si128(c2[5920],simde_mm_xor_si128(c2[8022],simde_mm_xor_si128(c2[9078],simde_mm_xor_si128(c2[4863],simde_mm_xor_si128(c2[5919],simde_mm_xor_si128(c2[10183],simde_mm_xor_si128(c2[8087],simde_mm_xor_si128(c2[9143],simde_mm_xor_si128(c2[3847],simde_mm_xor_si128(c2[4903],simde_mm_xor_si128(c2[16522],simde_mm_xor_si128(c2[8119],simde_mm_xor_si128(c2[3888],simde_mm_xor_si128(c2[4965],simde_mm_xor_si128(c2[6021],simde_mm_xor_si128(c2[11328],simde_mm_xor_si128(c2[2896],simde_mm_xor_si128(c2[16620],simde_mm_xor_si128(c2[781],simde_mm_xor_si128(c2[2929],simde_mm_xor_si128(c2[2930],simde_mm_xor_si128(c2[3986],simde_mm_xor_si128(c2[12444],simde_mm_xor_si128(c2[13500],simde_mm_xor_si128(c2[2985],simde_mm_xor_si128(c2[16712],simde_mm_xor_si128(c2[873],simde_mm_xor_si128(c2[15658],simde_mm_xor_si128(c2[16714],simde_mm_xor_si128(c2[5141],simde_mm_xor_si128(c2[12542],simde_mm_xor_si128(c2[13590],simde_mm_xor_si128(c2[14646],simde_mm_xor_si128(c2[11534],simde_mm_xor_si128(c2[13650],simde_mm_xor_si128(c2[14706],simde_mm_xor_si128(c2[7316],simde_mm_xor_si128(c2[8372],simde_mm_xor_si128(c2[11583],simde_mm_xor_si128(c2[1009],simde_mm_xor_si128(c2[5248],c2[6304])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 33
+     d2[792]=simde_mm_xor_si128(c2[3236],simde_mm_xor_si128(c2[8544],simde_mm_xor_si128(c2[10054],c2[10518])));
+
+//row: 34
+     d2[816]=simde_mm_xor_si128(c2[6350],simde_mm_xor_si128(c2[4569],simde_mm_xor_si128(c2[11292],c2[9283])));
+
+//row: 35
+     d2[840]=simde_mm_xor_si128(c2[13739],simde_mm_xor_si128(c2[6357],simde_mm_xor_si128(c2[7397],simde_mm_xor_si128(c2[14793],simde_mm_xor_si128(c2[13793],simde_mm_xor_si128(c2[4280],simde_mm_xor_si128(c2[1126],simde_mm_xor_si128(c2[68],simde_mm_xor_si128(c2[12787],simde_mm_xor_si128(c2[6453],simde_mm_xor_si128(c2[2221],simde_mm_xor_si128(c2[11775],simde_mm_xor_si128(c2[3333],simde_mm_xor_si128(c2[10720],simde_mm_xor_si128(c2[10762],simde_mm_xor_si128(c2[6541],simde_mm_xor_si128(c2[13929],simde_mm_xor_si128(c2[16083],simde_mm_xor_si128(c2[10801],simde_mm_xor_si128(c2[10808],simde_mm_xor_si128(c2[2406],simde_mm_xor_si128(c2[11906],simde_mm_xor_si128(c2[1349],simde_mm_xor_si128(c2[16148],simde_mm_xor_si128(c2[5629],simde_mm_xor_si128(c2[5625],simde_mm_xor_si128(c2[3506],simde_mm_xor_si128(c2[5677],simde_mm_xor_si128(c2[10946],simde_mm_xor_si128(c2[2516],simde_mm_xor_si128(c2[7836],simde_mm_xor_si128(c2[13107],simde_mm_xor_si128(c2[9956],simde_mm_xor_si128(c2[11050],simde_mm_xor_si128(c2[12096],simde_mm_xor_si128(c2[15264],simde_mm_xor_si128(c2[7943],simde_mm_xor_si128(c2[1595],simde_mm_xor_si128(c2[12155],simde_mm_xor_si128(c2[12209],simde_mm_xor_si128(c2[590],simde_mm_xor_si128(c2[1639],simde_mm_xor_si128(c2[2688],simde_mm_xor_si128(c2[16478],simde_mm_xor_si128(c2[2741],simde_mm_xor_si128(c2[16477],simde_mm_xor_si128(c2[3846],simde_mm_xor_si128(c2[2806],simde_mm_xor_si128(c2[15461],simde_mm_xor_si128(c2[1782],simde_mm_xor_si128(c2[14470],simde_mm_xor_si128(c2[16579],simde_mm_xor_si128(c2[5015],simde_mm_xor_si128(c2[13454],simde_mm_xor_si128(c2[11339],simde_mm_xor_si128(c2[13511],simde_mm_xor_si128(c2[14544],simde_mm_xor_si128(c2[7163],simde_mm_xor_si128(c2[13543],simde_mm_xor_si128(c2[11431],simde_mm_xor_si128(c2[10377],simde_mm_xor_si128(c2[15699],simde_mm_xor_si128(c2[6205],simde_mm_xor_si128(c2[8309],simde_mm_xor_si128(c2[5197],simde_mm_xor_si128(c2[8369],simde_mm_xor_si128(c2[2035],simde_mm_xor_si128(c2[5246],simde_mm_xor_si128(c2[11591],c2[16862])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 36
+     d2[864]=simde_mm_xor_si128(c2[2125],simde_mm_xor_si128(c2[10195],simde_mm_xor_si128(c2[10242],c2[6145])));
+
+//row: 37
+     d2[888]=simde_mm_xor_si128(c2[6338],simde_mm_xor_si128(c2[7394],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[1076],simde_mm_xor_si128(c2[8448],simde_mm_xor_si128(c2[6392],simde_mm_xor_si128(c2[7448],simde_mm_xor_si128(c2[14854],simde_mm_xor_si128(c2[11676],simde_mm_xor_si128(c2[13792],simde_mm_xor_si128(c2[5386],simde_mm_xor_si128(c2[6442],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[12771],simde_mm_xor_si128(c2[4374],simde_mm_xor_si128(c2[5430],simde_mm_xor_si128(c2[13883],simde_mm_xor_si128(c2[4375],simde_mm_xor_si128(c2[4417],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[7584],simde_mm_xor_si128(c2[8706],simde_mm_xor_si128(c2[9762],simde_mm_xor_si128(c2[4480],simde_mm_xor_si128(c2[4487],simde_mm_xor_si128(c2[11924],simde_mm_xor_si128(c2[12980],simde_mm_xor_si128(c2[5585],simde_mm_xor_si128(c2[11923],simde_mm_xor_si128(c2[16179],simde_mm_xor_si128(c2[16199],simde_mm_xor_si128(c2[14080],simde_mm_xor_si128(c2[16227],simde_mm_xor_si128(c2[4625],simde_mm_xor_si128(c2[13066],simde_mm_xor_si128(c2[435],simde_mm_xor_si128(c2[1491],simde_mm_xor_si128(c2[6786],simde_mm_xor_si128(c2[3611],simde_mm_xor_si128(c2[3649],simde_mm_xor_si128(c2[4705],simde_mm_xor_si128(c2[5775],simde_mm_xor_si128(c2[8943],simde_mm_xor_si128(c2[542],simde_mm_xor_si128(c2[1598],simde_mm_xor_si128(c2[12145],simde_mm_xor_si128(c2[5810],simde_mm_xor_si128(c2[4808],simde_mm_xor_si128(c2[5864],simde_mm_xor_si128(c2[11140],simde_mm_xor_si128(c2[12213],simde_mm_xor_si128(c2[9077],simde_mm_xor_si128(c2[10133],simde_mm_xor_si128(c2[13315],simde_mm_xor_si128(c2[10132],simde_mm_xor_si128(c2[2741],simde_mm_xor_si128(c2[14420],simde_mm_xor_si128(c2[13356],simde_mm_xor_si128(c2[9140],simde_mm_xor_si128(c2[11300],simde_mm_xor_si128(c2[12356],simde_mm_xor_si128(c2[8125],simde_mm_xor_si128(c2[10234],simde_mm_xor_si128(c2[14509],simde_mm_xor_si128(c2[15565],simde_mm_xor_si128(c2[7109],simde_mm_xor_si128(c2[4994],simde_mm_xor_si128(c2[7166],simde_mm_xor_si128(c2[8223],simde_mm_xor_si128(c2[818],simde_mm_xor_si128(c2[6166],simde_mm_xor_si128(c2[7222],simde_mm_xor_si128(c2[5110],simde_mm_xor_si128(c2[4032],simde_mm_xor_si128(c2[8322],simde_mm_xor_si128(c2[9378],simde_mm_xor_si128(c2[16755],simde_mm_xor_si128(c2[1988],simde_mm_xor_si128(c2[14691],simde_mm_xor_si128(c2[15747],simde_mm_xor_si128(c2[2024],simde_mm_xor_si128(c2[12585],simde_mm_xor_si128(c2[14740],simde_mm_xor_si128(c2[15796],simde_mm_xor_si128(c2[5246],c2[10517])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 38
+     d2[912]=simde_mm_xor_si128(c2[9515],simde_mm_xor_si128(c2[1499],simde_mm_xor_si128(c2[1554],c2[6925])));
+
+//row: 39
+     d2[936]=simde_mm_xor_si128(c2[2176],simde_mm_xor_si128(c2[13877],simde_mm_xor_si128(c2[10917],c2[10434])));
+
+//row: 40
+     d2[960]=simde_mm_xor_si128(c2[15850],simde_mm_xor_si128(c2[5666],c2[9283]));
+
+//row: 41
+     d2[984]=simde_mm_xor_si128(c2[4275],simde_mm_xor_si128(c2[10723],simde_mm_xor_si128(c2[12056],c2[882])));
+
+//row: 42
+     d2[1008]=simde_mm_xor_si128(c2[14792],simde_mm_xor_si128(c2[7410],simde_mm_xor_si128(c2[7394],simde_mm_xor_si128(c2[8450],simde_mm_xor_si128(c2[14790],simde_mm_xor_si128(c2[15846],simde_mm_xor_si128(c2[1063],simde_mm_xor_si128(c2[14846],simde_mm_xor_si128(c2[4277],simde_mm_xor_si128(c2[5333],simde_mm_xor_si128(c2[1123],simde_mm_xor_si128(c2[2179],simde_mm_xor_si128(c2[13840],simde_mm_xor_si128(c2[7506],simde_mm_xor_si128(c2[2218],simde_mm_xor_si128(c2[3274],simde_mm_xor_si128(c2[12828],simde_mm_xor_si128(c2[4386],simde_mm_xor_si128(c2[10717],simde_mm_xor_si128(c2[11773],simde_mm_xor_si128(c2[11815],simde_mm_xor_si128(c2[6538],simde_mm_xor_si128(c2[7594],simde_mm_xor_si128(c2[13926],simde_mm_xor_si128(c2[14982],simde_mm_xor_si128(c2[14976],simde_mm_xor_si128(c2[241],simde_mm_xor_si128(c2[11878],simde_mm_xor_si128(c2[10805],simde_mm_xor_si128(c2[11861],simde_mm_xor_si128(c2[3459],simde_mm_xor_si128(c2[11927],simde_mm_xor_si128(c2[12983],simde_mm_xor_si128(c2[1346],simde_mm_xor_si128(c2[2402],simde_mm_xor_si128(c2[6682],simde_mm_xor_si128(c2[5622],simde_mm_xor_si128(c2[6678],simde_mm_xor_si128(c2[3527],simde_mm_xor_si128(c2[4583],simde_mm_xor_si128(c2[6730],simde_mm_xor_si128(c2[10967],simde_mm_xor_si128(c2[12023],simde_mm_xor_si128(c2[2513],simde_mm_xor_si128(c2[3569],simde_mm_xor_si128(c2[8889],simde_mm_xor_si128(c2[14160],simde_mm_xor_si128(c2[9953],simde_mm_xor_si128(c2[11009],simde_mm_xor_si128(c2[12103],simde_mm_xor_si128(c2[12117],simde_mm_xor_si128(c2[13173],simde_mm_xor_si128(c2[15285],simde_mm_xor_si128(c2[16341],simde_mm_xor_si128(c2[8996],simde_mm_xor_si128(c2[2648],simde_mm_xor_si128(c2[12152],simde_mm_xor_si128(c2[13208],simde_mm_xor_si128(c2[13262],simde_mm_xor_si128(c2[1643],simde_mm_xor_si128(c2[1636],simde_mm_xor_si128(c2[2692],simde_mm_xor_si128(c2[636],simde_mm_xor_si128(c2[2738],simde_mm_xor_si128(c2[3794],simde_mm_xor_si128(c2[16474],simde_mm_xor_si128(c2[635],simde_mm_xor_si128(c2[4899],simde_mm_xor_si128(c2[2803],simde_mm_xor_si128(c2[3859],simde_mm_xor_si128(c2[15458],simde_mm_xor_si128(c2[16514],simde_mm_xor_si128(c2[2835],simde_mm_xor_si128(c2[15523],simde_mm_xor_si128(c2[16576],simde_mm_xor_si128(c2[737],simde_mm_xor_si128(c2[6068],simde_mm_xor_si128(c2[14507],simde_mm_xor_si128(c2[11336],simde_mm_xor_si128(c2[12392],simde_mm_xor_si128(c2[14564],simde_mm_xor_si128(c2[14565],simde_mm_xor_si128(c2[15621],simde_mm_xor_si128(c2[7160],simde_mm_xor_si128(c2[8216],simde_mm_xor_si128(c2[14596],simde_mm_xor_si128(c2[11428],simde_mm_xor_si128(c2[12484],simde_mm_xor_si128(c2[10374],simde_mm_xor_si128(c2[11430],simde_mm_xor_si128(c2[16752],simde_mm_xor_si128(c2[7258],simde_mm_xor_si128(c2[8306],simde_mm_xor_si128(c2[9362],simde_mm_xor_si128(c2[6250],simde_mm_xor_si128(c2[8366],simde_mm_xor_si128(c2[9422],simde_mm_xor_si128(c2[2032],simde_mm_xor_si128(c2[3088],simde_mm_xor_si128(c2[6299],simde_mm_xor_si128(c2[12644],simde_mm_xor_si128(c2[16859],c2[1020]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 43
+     d2[1032]=simde_mm_xor_si128(c2[12677],simde_mm_xor_si128(c2[5295],simde_mm_xor_si128(c2[6359],simde_mm_xor_si128(c2[12675],simde_mm_xor_si128(c2[13731],simde_mm_xor_si128(c2[12731],simde_mm_xor_si128(c2[3218],simde_mm_xor_si128(c2[15903],simde_mm_xor_si128(c2[64],simde_mm_xor_si128(c2[1111],simde_mm_xor_si128(c2[11725],simde_mm_xor_si128(c2[5391],simde_mm_xor_si128(c2[1159],simde_mm_xor_si128(c2[10713],simde_mm_xor_si128(c2[2271],simde_mm_xor_si128(c2[8602],simde_mm_xor_si128(c2[9658],simde_mm_xor_si128(c2[9700],simde_mm_xor_si128(c2[5479],simde_mm_xor_si128(c2[11811],simde_mm_xor_si128(c2[12867],simde_mm_xor_si128(c2[15045],simde_mm_xor_si128(c2[9763],simde_mm_xor_si128(c2[9746],simde_mm_xor_si128(c2[1344],simde_mm_xor_si128(c2[10868],simde_mm_xor_si128(c2[16150],simde_mm_xor_si128(c2[311],simde_mm_xor_si128(c2[4567],simde_mm_xor_si128(c2[4563],simde_mm_xor_si128(c2[1412],simde_mm_xor_si128(c2[2468],simde_mm_xor_si128(c2[4615],simde_mm_xor_si128(c2[9908],simde_mm_xor_si128(c2[398],simde_mm_xor_si128(c2[1454],simde_mm_xor_si128(c2[6774],simde_mm_xor_si128(c2[12069],simde_mm_xor_si128(c2[8894],simde_mm_xor_si128(c2[9988],simde_mm_xor_si128(c2[11058],simde_mm_xor_si128(c2[13170],simde_mm_xor_si128(c2[14226],simde_mm_xor_si128(c2[6881],simde_mm_xor_si128(c2[533],simde_mm_xor_si128(c2[10037],simde_mm_xor_si128(c2[11093],simde_mm_xor_si128(c2[11147],simde_mm_xor_si128(c2[16423],simde_mm_xor_si128(c2[16416],simde_mm_xor_si128(c2[577],simde_mm_xor_si128(c2[15416],simde_mm_xor_si128(c2[1703],simde_mm_xor_si128(c2[14359],simde_mm_xor_si128(c2[15415],simde_mm_xor_si128(c2[2784],simde_mm_xor_si128(c2[1744],simde_mm_xor_si128(c2[13367],simde_mm_xor_si128(c2[14423],simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[13408],simde_mm_xor_si128(c2[15517],simde_mm_xor_si128(c2[3953],simde_mm_xor_si128(c2[12392],simde_mm_xor_si128(c2[9221],simde_mm_xor_si128(c2[10277],simde_mm_xor_si128(c2[5000],simde_mm_xor_si128(c2[12449],simde_mm_xor_si128(c2[13506],simde_mm_xor_si128(c2[5045],simde_mm_xor_si128(c2[6101],simde_mm_xor_si128(c2[12481],simde_mm_xor_si128(c2[10369],simde_mm_xor_si128(c2[8259],simde_mm_xor_si128(c2[9315],simde_mm_xor_si128(c2[2983],simde_mm_xor_si128(c2[14661],simde_mm_xor_si128(c2[5143],simde_mm_xor_si128(c2[7271],simde_mm_xor_si128(c2[4135],simde_mm_xor_si128(c2[7307],simde_mm_xor_si128(c2[16812],simde_mm_xor_si128(c2[973],simde_mm_xor_si128(c2[4184],simde_mm_xor_si128(c2[10529],simde_mm_xor_si128(c2[14744],c2[15800]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 44
+     d2[1056]=simde_mm_xor_si128(c2[5292],simde_mm_xor_si128(c2[14805],simde_mm_xor_si128(c2[15845],simde_mm_xor_si128(c2[6346],simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[5346],simde_mm_xor_si128(c2[12728],simde_mm_xor_si128(c2[9574],simde_mm_xor_si128(c2[4340],simde_mm_xor_si128(c2[14901],simde_mm_xor_si128(c2[10669],simde_mm_xor_si128(c2[3328],simde_mm_xor_si128(c2[11781],simde_mm_xor_si128(c2[2273],simde_mm_xor_si128(c2[2315],simde_mm_xor_si128(c2[14989],simde_mm_xor_si128(c2[5482],simde_mm_xor_si128(c2[7636],simde_mm_xor_si128(c2[2354],simde_mm_xor_si128(c2[2361],simde_mm_xor_si128(c2[10854],simde_mm_xor_si128(c2[3459],simde_mm_xor_si128(c2[9797],simde_mm_xor_si128(c2[14077],simde_mm_xor_si128(c2[14073],simde_mm_xor_si128(c2[11954],simde_mm_xor_si128(c2[15124],simde_mm_xor_si128(c2[14125],simde_mm_xor_si128(c2[2499],simde_mm_xor_si128(c2[10964],simde_mm_xor_si128(c2[16284],simde_mm_xor_si128(c2[4660],simde_mm_xor_si128(c2[1509],simde_mm_xor_si128(c2[3610],simde_mm_xor_si128(c2[2603],simde_mm_xor_si128(c2[3649],simde_mm_xor_si128(c2[6817],simde_mm_xor_si128(c2[16391],simde_mm_xor_si128(c2[10043],simde_mm_xor_si128(c2[3708],simde_mm_xor_si128(c2[3762],simde_mm_xor_si128(c2[9038],simde_mm_xor_si128(c2[10087],simde_mm_xor_si128(c2[8031],simde_mm_xor_si128(c2[11189],simde_mm_xor_si128(c2[8030],simde_mm_xor_si128(c2[12294],simde_mm_xor_si128(c2[11254],simde_mm_xor_si128(c2[7014],simde_mm_xor_si128(c2[10230],simde_mm_xor_si128(c2[6023],simde_mm_xor_si128(c2[8132],simde_mm_xor_si128(c2[13463],simde_mm_xor_si128(c2[5007],simde_mm_xor_si128(c2[2892],simde_mm_xor_si128(c2[5040],simde_mm_xor_si128(c2[6097],simde_mm_xor_si128(c2[15611],simde_mm_xor_si128(c2[5096],simde_mm_xor_si128(c2[2984],simde_mm_xor_si128(c2[1930],simde_mm_xor_si128(c2[7252],simde_mm_xor_si128(c2[14653],simde_mm_xor_si128(c2[16757],simde_mm_xor_si128(c2[13645],simde_mm_xor_si128(c2[16817],simde_mm_xor_si128(c2[10483],simde_mm_xor_si128(c2[13694],simde_mm_xor_si128(c2[3120],c2[8415])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 45
+     d2[1080]=simde_mm_xor_si128(c2[7448],simde_mm_xor_si128(c2[5577],c2[16320]));
+  }
+}
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc104_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc104_byte.c
index 2eea416f89e44092f283fb4c9db9cad93698e12c..7562837edee93ba8c0c92ddf6389542324e41e73 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc104_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc104_byte.c
@@ -1,9 +1,9 @@
 #include "PHY/sse_intrin.h"
 // generated code for Zc=104, byte encoding
 static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG2_Zc104_byte(uint8_t *c,uint8_t *d) {
-  __m64 *csimd=(__m64 *)c,*dsimd=(__m64 *)d;
+  simde__m64 *csimd=(simde__m64 *)c,*dsimd=(simde__m64 *)d;
 
-  __m64 *c2,*d2;
+  simde__m64 *c2,*d2;
 
   int i2;
   for (i2=0; i2<13; i2++) {
@@ -11,129 +11,129 @@ static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG
      d2=&dsimd[i2];
 
 //row: 0
-     d2[0]=_mm_xor_si64(c2[1564],_mm_xor_si64(c2[262],_mm_xor_si64(c2[1302],_mm_xor_si64(c2[548],_mm_xor_si64(c2[553],_mm_xor_si64(c2[810],_mm_xor_si64(c2[1880],_mm_xor_si64(c2[1877],_mm_xor_si64(c2[1125],_mm_xor_si64(c2[601],_mm_xor_si64(c2[1643],_mm_xor_si64(c2[624],_mm_xor_si64(c2[1673],_mm_xor_si64(c2[366],_mm_xor_si64(c2[1442],_mm_xor_si64(c2[1690],_mm_xor_si64(c2[947],_mm_xor_si64(c2[166],_mm_xor_si64(c2[688],_mm_xor_si64(c2[184],_mm_xor_si64(c2[702],_mm_xor_si64(c2[1509],_mm_xor_si64(c2[991],_mm_xor_si64(c2[1778],_mm_xor_si64(c2[1275],_mm_xor_si64(c2[1023],c2[2060]))))))))))))))))))))))))));
+     d2[0]=simde_mm_xor_si64(c2[1564],simde_mm_xor_si64(c2[262],simde_mm_xor_si64(c2[1302],simde_mm_xor_si64(c2[548],simde_mm_xor_si64(c2[553],simde_mm_xor_si64(c2[810],simde_mm_xor_si64(c2[1880],simde_mm_xor_si64(c2[1877],simde_mm_xor_si64(c2[1125],simde_mm_xor_si64(c2[601],simde_mm_xor_si64(c2[1643],simde_mm_xor_si64(c2[624],simde_mm_xor_si64(c2[1673],simde_mm_xor_si64(c2[366],simde_mm_xor_si64(c2[1442],simde_mm_xor_si64(c2[1690],simde_mm_xor_si64(c2[947],simde_mm_xor_si64(c2[166],simde_mm_xor_si64(c2[688],simde_mm_xor_si64(c2[184],simde_mm_xor_si64(c2[702],simde_mm_xor_si64(c2[1509],simde_mm_xor_si64(c2[991],simde_mm_xor_si64(c2[1778],simde_mm_xor_si64(c2[1275],simde_mm_xor_si64(c2[1023],c2[2060]))))))))))))))))))))))))));
 
 //row: 1
-     d2[13]=_mm_xor_si64(c2[1824],_mm_xor_si64(c2[1564],_mm_xor_si64(c2[262],_mm_xor_si64(c2[1302],_mm_xor_si64(c2[808],_mm_xor_si64(c2[548],_mm_xor_si64(c2[553],_mm_xor_si64(c2[810],_mm_xor_si64(c2[61],_mm_xor_si64(c2[1880],_mm_xor_si64(c2[1877],_mm_xor_si64(c2[1385],_mm_xor_si64(c2[1125],_mm_xor_si64(c2[601],_mm_xor_si64(c2[1643],_mm_xor_si64(c2[624],_mm_xor_si64(c2[1673],_mm_xor_si64(c2[366],_mm_xor_si64(c2[1442],_mm_xor_si64(c2[1690],_mm_xor_si64(c2[1207],_mm_xor_si64(c2[947],_mm_xor_si64(c2[166],_mm_xor_si64(c2[688],_mm_xor_si64(c2[184],_mm_xor_si64(c2[702],_mm_xor_si64(c2[1509],_mm_xor_si64(c2[991],_mm_xor_si64(c2[1778],_mm_xor_si64(c2[1535],_mm_xor_si64(c2[1275],_mm_xor_si64(c2[1023],c2[2060]))))))))))))))))))))))))))))))));
+     d2[13]=simde_mm_xor_si64(c2[1824],simde_mm_xor_si64(c2[1564],simde_mm_xor_si64(c2[262],simde_mm_xor_si64(c2[1302],simde_mm_xor_si64(c2[808],simde_mm_xor_si64(c2[548],simde_mm_xor_si64(c2[553],simde_mm_xor_si64(c2[810],simde_mm_xor_si64(c2[61],simde_mm_xor_si64(c2[1880],simde_mm_xor_si64(c2[1877],simde_mm_xor_si64(c2[1385],simde_mm_xor_si64(c2[1125],simde_mm_xor_si64(c2[601],simde_mm_xor_si64(c2[1643],simde_mm_xor_si64(c2[624],simde_mm_xor_si64(c2[1673],simde_mm_xor_si64(c2[366],simde_mm_xor_si64(c2[1442],simde_mm_xor_si64(c2[1690],simde_mm_xor_si64(c2[1207],simde_mm_xor_si64(c2[947],simde_mm_xor_si64(c2[166],simde_mm_xor_si64(c2[688],simde_mm_xor_si64(c2[184],simde_mm_xor_si64(c2[702],simde_mm_xor_si64(c2[1509],simde_mm_xor_si64(c2[991],simde_mm_xor_si64(c2[1778],simde_mm_xor_si64(c2[1535],simde_mm_xor_si64(c2[1275],simde_mm_xor_si64(c2[1023],c2[2060]))))))))))))))))))))))))))))))));
 
 //row: 2
-     d2[26]=_mm_xor_si64(c2[1824],_mm_xor_si64(c2[1564],_mm_xor_si64(c2[522],_mm_xor_si64(c2[262],_mm_xor_si64(c2[1302],_mm_xor_si64(c2[808],_mm_xor_si64(c2[548],_mm_xor_si64(c2[553],_mm_xor_si64(c2[810],_mm_xor_si64(c2[61],_mm_xor_si64(c2[1880],_mm_xor_si64(c2[1877],_mm_xor_si64(c2[1385],_mm_xor_si64(c2[1125],_mm_xor_si64(c2[861],_mm_xor_si64(c2[601],_mm_xor_si64(c2[1643],_mm_xor_si64(c2[884],_mm_xor_si64(c2[624],_mm_xor_si64(c2[1673],_mm_xor_si64(c2[366],_mm_xor_si64(c2[1702],_mm_xor_si64(c2[1442],_mm_xor_si64(c2[1690],_mm_xor_si64(c2[1207],_mm_xor_si64(c2[947],_mm_xor_si64(c2[426],_mm_xor_si64(c2[166],_mm_xor_si64(c2[688],_mm_xor_si64(c2[444],_mm_xor_si64(c2[184],_mm_xor_si64(c2[702],_mm_xor_si64(c2[1769],_mm_xor_si64(c2[1509],_mm_xor_si64(c2[991],_mm_xor_si64(c2[1778],_mm_xor_si64(c2[1535],_mm_xor_si64(c2[1275],_mm_xor_si64(c2[1283],_mm_xor_si64(c2[1023],c2[2060]))))))))))))))))))))))))))))))))))))))));
+     d2[26]=simde_mm_xor_si64(c2[1824],simde_mm_xor_si64(c2[1564],simde_mm_xor_si64(c2[522],simde_mm_xor_si64(c2[262],simde_mm_xor_si64(c2[1302],simde_mm_xor_si64(c2[808],simde_mm_xor_si64(c2[548],simde_mm_xor_si64(c2[553],simde_mm_xor_si64(c2[810],simde_mm_xor_si64(c2[61],simde_mm_xor_si64(c2[1880],simde_mm_xor_si64(c2[1877],simde_mm_xor_si64(c2[1385],simde_mm_xor_si64(c2[1125],simde_mm_xor_si64(c2[861],simde_mm_xor_si64(c2[601],simde_mm_xor_si64(c2[1643],simde_mm_xor_si64(c2[884],simde_mm_xor_si64(c2[624],simde_mm_xor_si64(c2[1673],simde_mm_xor_si64(c2[366],simde_mm_xor_si64(c2[1702],simde_mm_xor_si64(c2[1442],simde_mm_xor_si64(c2[1690],simde_mm_xor_si64(c2[1207],simde_mm_xor_si64(c2[947],simde_mm_xor_si64(c2[426],simde_mm_xor_si64(c2[166],simde_mm_xor_si64(c2[688],simde_mm_xor_si64(c2[444],simde_mm_xor_si64(c2[184],simde_mm_xor_si64(c2[702],simde_mm_xor_si64(c2[1769],simde_mm_xor_si64(c2[1509],simde_mm_xor_si64(c2[991],simde_mm_xor_si64(c2[1778],simde_mm_xor_si64(c2[1535],simde_mm_xor_si64(c2[1275],simde_mm_xor_si64(c2[1283],simde_mm_xor_si64(c2[1023],c2[2060]))))))))))))))))))))))))))))))))))))))));
 
 //row: 3
-     d2[39]=_mm_xor_si64(c2[1564],_mm_xor_si64(c2[262],_mm_xor_si64(c2[1302],_mm_xor_si64(c2[548],_mm_xor_si64(c2[553],_mm_xor_si64(c2[1070],_mm_xor_si64(c2[810],_mm_xor_si64(c2[1880],_mm_xor_si64(c2[58],_mm_xor_si64(c2[1877],_mm_xor_si64(c2[1125],_mm_xor_si64(c2[601],_mm_xor_si64(c2[1643],_mm_xor_si64(c2[624],_mm_xor_si64(c2[1673],_mm_xor_si64(c2[626],_mm_xor_si64(c2[366],_mm_xor_si64(c2[1442],_mm_xor_si64(c2[1950],_mm_xor_si64(c2[1690],_mm_xor_si64(c2[947],_mm_xor_si64(c2[166],_mm_xor_si64(c2[948],_mm_xor_si64(c2[688],_mm_xor_si64(c2[184],_mm_xor_si64(c2[962],_mm_xor_si64(c2[702],_mm_xor_si64(c2[1509],_mm_xor_si64(c2[991],_mm_xor_si64(c2[2038],_mm_xor_si64(c2[1778],_mm_xor_si64(c2[1275],_mm_xor_si64(c2[1023],_mm_xor_si64(c2[241],c2[2060]))))))))))))))))))))))))))))))))));
+     d2[39]=simde_mm_xor_si64(c2[1564],simde_mm_xor_si64(c2[262],simde_mm_xor_si64(c2[1302],simde_mm_xor_si64(c2[548],simde_mm_xor_si64(c2[553],simde_mm_xor_si64(c2[1070],simde_mm_xor_si64(c2[810],simde_mm_xor_si64(c2[1880],simde_mm_xor_si64(c2[58],simde_mm_xor_si64(c2[1877],simde_mm_xor_si64(c2[1125],simde_mm_xor_si64(c2[601],simde_mm_xor_si64(c2[1643],simde_mm_xor_si64(c2[624],simde_mm_xor_si64(c2[1673],simde_mm_xor_si64(c2[626],simde_mm_xor_si64(c2[366],simde_mm_xor_si64(c2[1442],simde_mm_xor_si64(c2[1950],simde_mm_xor_si64(c2[1690],simde_mm_xor_si64(c2[947],simde_mm_xor_si64(c2[166],simde_mm_xor_si64(c2[948],simde_mm_xor_si64(c2[688],simde_mm_xor_si64(c2[184],simde_mm_xor_si64(c2[962],simde_mm_xor_si64(c2[702],simde_mm_xor_si64(c2[1509],simde_mm_xor_si64(c2[991],simde_mm_xor_si64(c2[2038],simde_mm_xor_si64(c2[1778],simde_mm_xor_si64(c2[1275],simde_mm_xor_si64(c2[1023],simde_mm_xor_si64(c2[241],c2[2060]))))))))))))))))))))))))))))))))));
 
 //row: 4
-     d2[52]=_mm_xor_si64(c2[1046],_mm_xor_si64(c2[786],_mm_xor_si64(c2[1563],_mm_xor_si64(c2[524],_mm_xor_si64(c2[785],_mm_xor_si64(c2[30],_mm_xor_si64(c2[1849],_mm_xor_si64(c2[1854],_mm_xor_si64(c2[32],_mm_xor_si64(c2[809],_mm_xor_si64(c2[1362],_mm_xor_si64(c2[1102],_mm_xor_si64(c2[1099],_mm_xor_si64(c2[607],_mm_xor_si64(c2[347],_mm_xor_si64(c2[1902],_mm_xor_si64(c2[865],_mm_xor_si64(c2[1925],_mm_xor_si64(c2[895],_mm_xor_si64(c2[1667],_mm_xor_si64(c2[651],_mm_xor_si64(c2[912],_mm_xor_si64(c2[416],_mm_xor_si64(c2[156],_mm_xor_si64(c2[1467],_mm_xor_si64(c2[1976],_mm_xor_si64(c2[1485],_mm_xor_si64(c2[2003],_mm_xor_si64(c2[731],_mm_xor_si64(c2[213],_mm_xor_si64(c2[1000],_mm_xor_si64(c2[757],_mm_xor_si64(c2[497],_mm_xor_si64(c2[245],c2[1282]))))))))))))))))))))))))))))))))));
+     d2[52]=simde_mm_xor_si64(c2[1046],simde_mm_xor_si64(c2[786],simde_mm_xor_si64(c2[1563],simde_mm_xor_si64(c2[524],simde_mm_xor_si64(c2[785],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[1849],simde_mm_xor_si64(c2[1854],simde_mm_xor_si64(c2[32],simde_mm_xor_si64(c2[809],simde_mm_xor_si64(c2[1362],simde_mm_xor_si64(c2[1102],simde_mm_xor_si64(c2[1099],simde_mm_xor_si64(c2[607],simde_mm_xor_si64(c2[347],simde_mm_xor_si64(c2[1902],simde_mm_xor_si64(c2[865],simde_mm_xor_si64(c2[1925],simde_mm_xor_si64(c2[895],simde_mm_xor_si64(c2[1667],simde_mm_xor_si64(c2[651],simde_mm_xor_si64(c2[912],simde_mm_xor_si64(c2[416],simde_mm_xor_si64(c2[156],simde_mm_xor_si64(c2[1467],simde_mm_xor_si64(c2[1976],simde_mm_xor_si64(c2[1485],simde_mm_xor_si64(c2[2003],simde_mm_xor_si64(c2[731],simde_mm_xor_si64(c2[213],simde_mm_xor_si64(c2[1000],simde_mm_xor_si64(c2[757],simde_mm_xor_si64(c2[497],simde_mm_xor_si64(c2[245],c2[1282]))))))))))))))))))))))))))))))))));
 
 //row: 5
-     d2[65]=_mm_xor_si64(c2[1312],_mm_xor_si64(c2[1052],_mm_xor_si64(c2[1829],_mm_xor_si64(c2[790],_mm_xor_si64(c2[4],_mm_xor_si64(c2[296],_mm_xor_si64(c2[36],_mm_xor_si64(c2[28],_mm_xor_si64(c2[298],_mm_xor_si64(c2[292],_mm_xor_si64(c2[1615],_mm_xor_si64(c2[1355],_mm_xor_si64(c2[1352],_mm_xor_si64(c2[860],_mm_xor_si64(c2[600],_mm_xor_si64(c2[89],_mm_xor_si64(c2[1118],_mm_xor_si64(c2[112],_mm_xor_si64(c2[1148],_mm_xor_si64(c2[1933],_mm_xor_si64(c2[917],_mm_xor_si64(c2[1178],_mm_xor_si64(c2[1174],_mm_xor_si64(c2[682],_mm_xor_si64(c2[422],_mm_xor_si64(c2[1720],_mm_xor_si64(c2[163],_mm_xor_si64(c2[1751],_mm_xor_si64(c2[190],_mm_xor_si64(c2[1225],_mm_xor_si64(c2[997],_mm_xor_si64(c2[479],_mm_xor_si64(c2[1253],_mm_xor_si64(c2[1023],_mm_xor_si64(c2[763],_mm_xor_si64(c2[498],c2[1535]))))))))))))))))))))))))))))))))))));
+     d2[65]=simde_mm_xor_si64(c2[1312],simde_mm_xor_si64(c2[1052],simde_mm_xor_si64(c2[1829],simde_mm_xor_si64(c2[790],simde_mm_xor_si64(c2[4],simde_mm_xor_si64(c2[296],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[298],simde_mm_xor_si64(c2[292],simde_mm_xor_si64(c2[1615],simde_mm_xor_si64(c2[1355],simde_mm_xor_si64(c2[1352],simde_mm_xor_si64(c2[860],simde_mm_xor_si64(c2[600],simde_mm_xor_si64(c2[89],simde_mm_xor_si64(c2[1118],simde_mm_xor_si64(c2[112],simde_mm_xor_si64(c2[1148],simde_mm_xor_si64(c2[1933],simde_mm_xor_si64(c2[917],simde_mm_xor_si64(c2[1178],simde_mm_xor_si64(c2[1174],simde_mm_xor_si64(c2[682],simde_mm_xor_si64(c2[422],simde_mm_xor_si64(c2[1720],simde_mm_xor_si64(c2[163],simde_mm_xor_si64(c2[1751],simde_mm_xor_si64(c2[190],simde_mm_xor_si64(c2[1225],simde_mm_xor_si64(c2[997],simde_mm_xor_si64(c2[479],simde_mm_xor_si64(c2[1253],simde_mm_xor_si64(c2[1023],simde_mm_xor_si64(c2[763],simde_mm_xor_si64(c2[498],c2[1535]))))))))))))))))))))))))))))))))))));
 
 //row: 6
-     d2[78]=_mm_xor_si64(c2[786],_mm_xor_si64(c2[526],_mm_xor_si64(c2[1303],_mm_xor_si64(c2[264],_mm_xor_si64(c2[1820],_mm_xor_si64(c2[1849],_mm_xor_si64(c2[1589],_mm_xor_si64(c2[1594],_mm_xor_si64(c2[1851],_mm_xor_si64(c2[1102],_mm_xor_si64(c2[842],_mm_xor_si64(c2[839],_mm_xor_si64(c2[347],_mm_xor_si64(c2[87],_mm_xor_si64(c2[1642],_mm_xor_si64(c2[605],_mm_xor_si64(c2[1665],_mm_xor_si64(c2[635],_mm_xor_si64(c2[1407],_mm_xor_si64(c2[391],_mm_xor_si64(c2[652],_mm_xor_si64(c2[654],_mm_xor_si64(c2[156],_mm_xor_si64(c2[1988],_mm_xor_si64(c2[1207],_mm_xor_si64(c2[1716],_mm_xor_si64(c2[1225],_mm_xor_si64(c2[1743],_mm_xor_si64(c2[1753],_mm_xor_si64(c2[471],_mm_xor_si64(c2[2032],_mm_xor_si64(c2[740],_mm_xor_si64(c2[497],_mm_xor_si64(c2[237],_mm_xor_si64(c2[2064],_mm_xor_si64(c2[1022],c2[242]))))))))))))))))))))))))))))))))))));
+     d2[78]=simde_mm_xor_si64(c2[786],simde_mm_xor_si64(c2[526],simde_mm_xor_si64(c2[1303],simde_mm_xor_si64(c2[264],simde_mm_xor_si64(c2[1820],simde_mm_xor_si64(c2[1849],simde_mm_xor_si64(c2[1589],simde_mm_xor_si64(c2[1594],simde_mm_xor_si64(c2[1851],simde_mm_xor_si64(c2[1102],simde_mm_xor_si64(c2[842],simde_mm_xor_si64(c2[839],simde_mm_xor_si64(c2[347],simde_mm_xor_si64(c2[87],simde_mm_xor_si64(c2[1642],simde_mm_xor_si64(c2[605],simde_mm_xor_si64(c2[1665],simde_mm_xor_si64(c2[635],simde_mm_xor_si64(c2[1407],simde_mm_xor_si64(c2[391],simde_mm_xor_si64(c2[652],simde_mm_xor_si64(c2[654],simde_mm_xor_si64(c2[156],simde_mm_xor_si64(c2[1988],simde_mm_xor_si64(c2[1207],simde_mm_xor_si64(c2[1716],simde_mm_xor_si64(c2[1225],simde_mm_xor_si64(c2[1743],simde_mm_xor_si64(c2[1753],simde_mm_xor_si64(c2[471],simde_mm_xor_si64(c2[2032],simde_mm_xor_si64(c2[740],simde_mm_xor_si64(c2[497],simde_mm_xor_si64(c2[237],simde_mm_xor_si64(c2[2064],simde_mm_xor_si64(c2[1022],c2[242]))))))))))))))))))))))))))))))))))));
 
 //row: 7
-     d2[91]=_mm_xor_si64(c2[525],_mm_xor_si64(c2[265],_mm_xor_si64(c2[12],_mm_xor_si64(c2[1042],_mm_xor_si64(c2[789],_mm_xor_si64(c2[3],_mm_xor_si64(c2[1829],_mm_xor_si64(c2[1588],_mm_xor_si64(c2[1328],_mm_xor_si64(c2[1075],_mm_xor_si64(c2[1333],_mm_xor_si64(c2[1067],_mm_xor_si64(c2[1590],_mm_xor_si64(c2[1597],_mm_xor_si64(c2[1337],_mm_xor_si64(c2[813],_mm_xor_si64(c2[841],_mm_xor_si64(c2[581],_mm_xor_si64(c2[315],_mm_xor_si64(c2[578],_mm_xor_si64(c2[572],_mm_xor_si64(c2[312],_mm_xor_si64(c2[86],_mm_xor_si64(c2[1905],_mm_xor_si64(c2[1639],_mm_xor_si64(c2[1381],_mm_xor_si64(c2[1128],_mm_xor_si64(c2[344],_mm_xor_si64(c2[78],_mm_xor_si64(c2[1404],_mm_xor_si64(c2[1151],_mm_xor_si64(c2[374],_mm_xor_si64(c2[108],_mm_xor_si64(c2[1146],_mm_xor_si64(c2[1153],_mm_xor_si64(c2[893],_mm_xor_si64(c2[130],_mm_xor_si64(c2[1956],_mm_xor_si64(c2[391],_mm_xor_si64(c2[398],_mm_xor_si64(c2[138],_mm_xor_si64(c2[1699],_mm_xor_si64(c2[1987],_mm_xor_si64(c2[1727],_mm_xor_si64(c2[1461],_mm_xor_si64(c2[946],_mm_xor_si64(c2[680],_mm_xor_si64(c2[1468],_mm_xor_si64(c2[1462],_mm_xor_si64(c2[1202],_mm_xor_si64(c2[964],_mm_xor_si64(c2[711],_mm_xor_si64(c2[1482],_mm_xor_si64(c2[1489],_mm_xor_si64(c2[1229],_mm_xor_si64(c2[2006],_mm_xor_si64(c2[210],_mm_xor_si64(c2[2036],_mm_xor_si64(c2[1771],_mm_xor_si64(c2[1518],_mm_xor_si64(c2[479],_mm_xor_si64(c2[473],_mm_xor_si64(c2[213],_mm_xor_si64(c2[236],_mm_xor_si64(c2[2055],_mm_xor_si64(c2[1802],_mm_xor_si64(c2[1803],_mm_xor_si64(c2[1537],_mm_xor_si64(c2[761],_mm_xor_si64(c2[755],c2[495]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[91]=simde_mm_xor_si64(c2[525],simde_mm_xor_si64(c2[265],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[1042],simde_mm_xor_si64(c2[789],simde_mm_xor_si64(c2[3],simde_mm_xor_si64(c2[1829],simde_mm_xor_si64(c2[1588],simde_mm_xor_si64(c2[1328],simde_mm_xor_si64(c2[1075],simde_mm_xor_si64(c2[1333],simde_mm_xor_si64(c2[1067],simde_mm_xor_si64(c2[1590],simde_mm_xor_si64(c2[1597],simde_mm_xor_si64(c2[1337],simde_mm_xor_si64(c2[813],simde_mm_xor_si64(c2[841],simde_mm_xor_si64(c2[581],simde_mm_xor_si64(c2[315],simde_mm_xor_si64(c2[578],simde_mm_xor_si64(c2[572],simde_mm_xor_si64(c2[312],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[1905],simde_mm_xor_si64(c2[1639],simde_mm_xor_si64(c2[1381],simde_mm_xor_si64(c2[1128],simde_mm_xor_si64(c2[344],simde_mm_xor_si64(c2[78],simde_mm_xor_si64(c2[1404],simde_mm_xor_si64(c2[1151],simde_mm_xor_si64(c2[374],simde_mm_xor_si64(c2[108],simde_mm_xor_si64(c2[1146],simde_mm_xor_si64(c2[1153],simde_mm_xor_si64(c2[893],simde_mm_xor_si64(c2[130],simde_mm_xor_si64(c2[1956],simde_mm_xor_si64(c2[391],simde_mm_xor_si64(c2[398],simde_mm_xor_si64(c2[138],simde_mm_xor_si64(c2[1699],simde_mm_xor_si64(c2[1987],simde_mm_xor_si64(c2[1727],simde_mm_xor_si64(c2[1461],simde_mm_xor_si64(c2[946],simde_mm_xor_si64(c2[680],simde_mm_xor_si64(c2[1468],simde_mm_xor_si64(c2[1462],simde_mm_xor_si64(c2[1202],simde_mm_xor_si64(c2[964],simde_mm_xor_si64(c2[711],simde_mm_xor_si64(c2[1482],simde_mm_xor_si64(c2[1489],simde_mm_xor_si64(c2[1229],simde_mm_xor_si64(c2[2006],simde_mm_xor_si64(c2[210],simde_mm_xor_si64(c2[2036],simde_mm_xor_si64(c2[1771],simde_mm_xor_si64(c2[1518],simde_mm_xor_si64(c2[479],simde_mm_xor_si64(c2[473],simde_mm_xor_si64(c2[213],simde_mm_xor_si64(c2[236],simde_mm_xor_si64(c2[2055],simde_mm_xor_si64(c2[1802],simde_mm_xor_si64(c2[1803],simde_mm_xor_si64(c2[1537],simde_mm_xor_si64(c2[761],simde_mm_xor_si64(c2[755],c2[495]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 8
-     d2[104]=_mm_xor_si64(c2[1307],_mm_xor_si64(c2[1047],_mm_xor_si64(c2[5],_mm_xor_si64(c2[1824],_mm_xor_si64(c2[785],_mm_xor_si64(c2[1312],_mm_xor_si64(c2[291],_mm_xor_si64(c2[31],_mm_xor_si64(c2[36],_mm_xor_si64(c2[293],_mm_xor_si64(c2[295],_mm_xor_si64(c2[1623],_mm_xor_si64(c2[1363],_mm_xor_si64(c2[1360],_mm_xor_si64(c2[868],_mm_xor_si64(c2[608],_mm_xor_si64(c2[344],_mm_xor_si64(c2[84],_mm_xor_si64(c2[1126],_mm_xor_si64(c2[367],_mm_xor_si64(c2[107],_mm_xor_si64(c2[1156],_mm_xor_si64(c2[1928],_mm_xor_si64(c2[1172],_mm_xor_si64(c2[912],_mm_xor_si64(c2[1173],_mm_xor_si64(c2[677],_mm_xor_si64(c2[417],_mm_xor_si64(c2[1988],_mm_xor_si64(c2[1728],_mm_xor_si64(c2[158],_mm_xor_si64(c2[2006],_mm_xor_si64(c2[1746],_mm_xor_si64(c2[185],_mm_xor_si64(c2[1252],_mm_xor_si64(c2[992],_mm_xor_si64(c2[474],_mm_xor_si64(c2[1248],_mm_xor_si64(c2[1018],_mm_xor_si64(c2[758],_mm_xor_si64(c2[766],_mm_xor_si64(c2[506],c2[1543]))))))))))))))))))))))))))))))))))))))))));
+     d2[104]=simde_mm_xor_si64(c2[1307],simde_mm_xor_si64(c2[1047],simde_mm_xor_si64(c2[5],simde_mm_xor_si64(c2[1824],simde_mm_xor_si64(c2[785],simde_mm_xor_si64(c2[1312],simde_mm_xor_si64(c2[291],simde_mm_xor_si64(c2[31],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[293],simde_mm_xor_si64(c2[295],simde_mm_xor_si64(c2[1623],simde_mm_xor_si64(c2[1363],simde_mm_xor_si64(c2[1360],simde_mm_xor_si64(c2[868],simde_mm_xor_si64(c2[608],simde_mm_xor_si64(c2[344],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[1126],simde_mm_xor_si64(c2[367],simde_mm_xor_si64(c2[107],simde_mm_xor_si64(c2[1156],simde_mm_xor_si64(c2[1928],simde_mm_xor_si64(c2[1172],simde_mm_xor_si64(c2[912],simde_mm_xor_si64(c2[1173],simde_mm_xor_si64(c2[677],simde_mm_xor_si64(c2[417],simde_mm_xor_si64(c2[1988],simde_mm_xor_si64(c2[1728],simde_mm_xor_si64(c2[158],simde_mm_xor_si64(c2[2006],simde_mm_xor_si64(c2[1746],simde_mm_xor_si64(c2[185],simde_mm_xor_si64(c2[1252],simde_mm_xor_si64(c2[992],simde_mm_xor_si64(c2[474],simde_mm_xor_si64(c2[1248],simde_mm_xor_si64(c2[1018],simde_mm_xor_si64(c2[758],simde_mm_xor_si64(c2[766],simde_mm_xor_si64(c2[506],c2[1543]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 9
-     d2[117]=_mm_xor_si64(c2[784],_mm_xor_si64(c2[2],_mm_xor_si64(c2[1821],_mm_xor_si64(c2[1561],_mm_xor_si64(c2[532],_mm_xor_si64(c2[522],_mm_xor_si64(c2[1572],_mm_xor_si64(c2[1847],_mm_xor_si64(c2[1078],_mm_xor_si64(c2[818],_mm_xor_si64(c2[1852],_mm_xor_si64(c2[810],_mm_xor_si64(c2[30],_mm_xor_si64(c2[1067],_mm_xor_si64(c2[556],_mm_xor_si64(c2[1100],_mm_xor_si64(c2[318],_mm_xor_si64(c2[58],_mm_xor_si64(c2[1097],_mm_xor_si64(c2[55],_mm_xor_si64(c2[345],_mm_xor_si64(c2[1642],_mm_xor_si64(c2[1382],_mm_xor_si64(c2[1900],_mm_xor_si64(c2[858],_mm_xor_si64(c2[863],_mm_xor_si64(c2[1900],_mm_xor_si64(c2[1936],_mm_xor_si64(c2[894],_mm_xor_si64(c2[893],_mm_xor_si64(c2[1930],_mm_xor_si64(c2[1665],_mm_xor_si64(c2[636],_mm_xor_si64(c2[662],_mm_xor_si64(c2[1699],_mm_xor_si64(c2[910],_mm_xor_si64(c2[1960],_mm_xor_si64(c2[167],_mm_xor_si64(c2[1464],_mm_xor_si64(c2[1204],_mm_xor_si64(c2[1465],_mm_xor_si64(c2[423],_mm_xor_si64(c2[1987],_mm_xor_si64(c2[945],_mm_xor_si64(c2[1483],_mm_xor_si64(c2[454],_mm_xor_si64(c2[2014],_mm_xor_si64(c2[972],_mm_xor_si64(c2[729],_mm_xor_si64(c2[1779],_mm_xor_si64(c2[211],_mm_xor_si64(c2[1248],_mm_xor_si64(c2[998],_mm_xor_si64(c2[2035],_mm_xor_si64(c2[991],_mm_xor_si64(c2[495],_mm_xor_si64(c2[1805],_mm_xor_si64(c2[1545],_mm_xor_si64(c2[243],_mm_xor_si64(c2[1280],_mm_xor_si64(c2[1280],c2[238])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[117]=simde_mm_xor_si64(c2[784],simde_mm_xor_si64(c2[2],simde_mm_xor_si64(c2[1821],simde_mm_xor_si64(c2[1561],simde_mm_xor_si64(c2[532],simde_mm_xor_si64(c2[522],simde_mm_xor_si64(c2[1572],simde_mm_xor_si64(c2[1847],simde_mm_xor_si64(c2[1078],simde_mm_xor_si64(c2[818],simde_mm_xor_si64(c2[1852],simde_mm_xor_si64(c2[810],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[1067],simde_mm_xor_si64(c2[556],simde_mm_xor_si64(c2[1100],simde_mm_xor_si64(c2[318],simde_mm_xor_si64(c2[58],simde_mm_xor_si64(c2[1097],simde_mm_xor_si64(c2[55],simde_mm_xor_si64(c2[345],simde_mm_xor_si64(c2[1642],simde_mm_xor_si64(c2[1382],simde_mm_xor_si64(c2[1900],simde_mm_xor_si64(c2[858],simde_mm_xor_si64(c2[863],simde_mm_xor_si64(c2[1900],simde_mm_xor_si64(c2[1936],simde_mm_xor_si64(c2[894],simde_mm_xor_si64(c2[893],simde_mm_xor_si64(c2[1930],simde_mm_xor_si64(c2[1665],simde_mm_xor_si64(c2[636],simde_mm_xor_si64(c2[662],simde_mm_xor_si64(c2[1699],simde_mm_xor_si64(c2[910],simde_mm_xor_si64(c2[1960],simde_mm_xor_si64(c2[167],simde_mm_xor_si64(c2[1464],simde_mm_xor_si64(c2[1204],simde_mm_xor_si64(c2[1465],simde_mm_xor_si64(c2[423],simde_mm_xor_si64(c2[1987],simde_mm_xor_si64(c2[945],simde_mm_xor_si64(c2[1483],simde_mm_xor_si64(c2[454],simde_mm_xor_si64(c2[2014],simde_mm_xor_si64(c2[972],simde_mm_xor_si64(c2[729],simde_mm_xor_si64(c2[1779],simde_mm_xor_si64(c2[211],simde_mm_xor_si64(c2[1248],simde_mm_xor_si64(c2[998],simde_mm_xor_si64(c2[2035],simde_mm_xor_si64(c2[991],simde_mm_xor_si64(c2[495],simde_mm_xor_si64(c2[1805],simde_mm_xor_si64(c2[1545],simde_mm_xor_si64(c2[243],simde_mm_xor_si64(c2[1280],simde_mm_xor_si64(c2[1280],c2[238])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 10
-     d2[130]=_mm_xor_si64(c2[1302],_mm_xor_si64(c2[1073],_mm_xor_si64(c2[425],c2[968])));
+     d2[130]=simde_mm_xor_si64(c2[1302],simde_mm_xor_si64(c2[1073],simde_mm_xor_si64(c2[425],c2[968])));
 
 //row: 11
-     d2[143]=_mm_xor_si64(c2[1565],_mm_xor_si64(c2[263],_mm_xor_si64(c2[1303],_mm_xor_si64(c2[1824],_mm_xor_si64(c2[549],_mm_xor_si64(c2[554],_mm_xor_si64(c2[1071],_mm_xor_si64(c2[811],_mm_xor_si64(c2[1881],_mm_xor_si64(c2[59],_mm_xor_si64(c2[1878],_mm_xor_si64(c2[1126],_mm_xor_si64(c2[602],_mm_xor_si64(c2[1644],_mm_xor_si64(c2[625],_mm_xor_si64(c2[1674],_mm_xor_si64(c2[627],_mm_xor_si64(c2[367],_mm_xor_si64(c2[1430],_mm_xor_si64(c2[1951],_mm_xor_si64(c2[1691],_mm_xor_si64(c2[948],_mm_xor_si64(c2[167],_mm_xor_si64(c2[936],_mm_xor_si64(c2[676],_mm_xor_si64(c2[185],_mm_xor_si64(c2[963],_mm_xor_si64(c2[703],_mm_xor_si64(c2[1485],_mm_xor_si64(c2[1510],_mm_xor_si64(c2[992],_mm_xor_si64(c2[2039],_mm_xor_si64(c2[1779],_mm_xor_si64(c2[1276],_mm_xor_si64(c2[1024],_mm_xor_si64(c2[242],_mm_xor_si64(c2[2061],c2[1018])))))))))))))))))))))))))))))))))))));
+     d2[143]=simde_mm_xor_si64(c2[1565],simde_mm_xor_si64(c2[263],simde_mm_xor_si64(c2[1303],simde_mm_xor_si64(c2[1824],simde_mm_xor_si64(c2[549],simde_mm_xor_si64(c2[554],simde_mm_xor_si64(c2[1071],simde_mm_xor_si64(c2[811],simde_mm_xor_si64(c2[1881],simde_mm_xor_si64(c2[59],simde_mm_xor_si64(c2[1878],simde_mm_xor_si64(c2[1126],simde_mm_xor_si64(c2[602],simde_mm_xor_si64(c2[1644],simde_mm_xor_si64(c2[625],simde_mm_xor_si64(c2[1674],simde_mm_xor_si64(c2[627],simde_mm_xor_si64(c2[367],simde_mm_xor_si64(c2[1430],simde_mm_xor_si64(c2[1951],simde_mm_xor_si64(c2[1691],simde_mm_xor_si64(c2[948],simde_mm_xor_si64(c2[167],simde_mm_xor_si64(c2[936],simde_mm_xor_si64(c2[676],simde_mm_xor_si64(c2[185],simde_mm_xor_si64(c2[963],simde_mm_xor_si64(c2[703],simde_mm_xor_si64(c2[1485],simde_mm_xor_si64(c2[1510],simde_mm_xor_si64(c2[992],simde_mm_xor_si64(c2[2039],simde_mm_xor_si64(c2[1779],simde_mm_xor_si64(c2[1276],simde_mm_xor_si64(c2[1024],simde_mm_xor_si64(c2[242],simde_mm_xor_si64(c2[2061],c2[1018])))))))))))))))))))))))))))))))))))));
 
 //row: 12
-     d2[156]=_mm_xor_si64(c2[11],_mm_xor_si64(c2[1830],_mm_xor_si64(c2[528],_mm_xor_si64(c2[1568],_mm_xor_si64(c2[1074],_mm_xor_si64(c2[814],_mm_xor_si64(c2[806],_mm_xor_si64(c2[1076],_mm_xor_si64(c2[548],_mm_xor_si64(c2[314],_mm_xor_si64(c2[54],_mm_xor_si64(c2[64],_mm_xor_si64(c2[1638],_mm_xor_si64(c2[1378],_mm_xor_si64(c2[867],_mm_xor_si64(c2[1909],_mm_xor_si64(c2[864],_mm_xor_si64(c2[890],_mm_xor_si64(c2[1926],_mm_xor_si64(c2[632],_mm_xor_si64(c2[1695],_mm_xor_si64(c2[1956],_mm_xor_si64(c2[1460],_mm_xor_si64(c2[1200],_mm_xor_si64(c2[419],_mm_xor_si64(c2[941],_mm_xor_si64(c2[450],_mm_xor_si64(c2[968],_mm_xor_si64(c2[1775],_mm_xor_si64(c2[1257],_mm_xor_si64(c2[2031],_mm_xor_si64(c2[1801],_mm_xor_si64(c2[1541],_mm_xor_si64(c2[1276],c2[234]))))))))))))))))))))))))))))))))));
+     d2[156]=simde_mm_xor_si64(c2[11],simde_mm_xor_si64(c2[1830],simde_mm_xor_si64(c2[528],simde_mm_xor_si64(c2[1568],simde_mm_xor_si64(c2[1074],simde_mm_xor_si64(c2[814],simde_mm_xor_si64(c2[806],simde_mm_xor_si64(c2[1076],simde_mm_xor_si64(c2[548],simde_mm_xor_si64(c2[314],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[64],simde_mm_xor_si64(c2[1638],simde_mm_xor_si64(c2[1378],simde_mm_xor_si64(c2[867],simde_mm_xor_si64(c2[1909],simde_mm_xor_si64(c2[864],simde_mm_xor_si64(c2[890],simde_mm_xor_si64(c2[1926],simde_mm_xor_si64(c2[632],simde_mm_xor_si64(c2[1695],simde_mm_xor_si64(c2[1956],simde_mm_xor_si64(c2[1460],simde_mm_xor_si64(c2[1200],simde_mm_xor_si64(c2[419],simde_mm_xor_si64(c2[941],simde_mm_xor_si64(c2[450],simde_mm_xor_si64(c2[968],simde_mm_xor_si64(c2[1775],simde_mm_xor_si64(c2[1257],simde_mm_xor_si64(c2[2031],simde_mm_xor_si64(c2[1801],simde_mm_xor_si64(c2[1541],simde_mm_xor_si64(c2[1276],c2[234]))))))))))))))))))))))))))))))))));
 
 //row: 13
-     d2[169]=_mm_xor_si64(c2[2],_mm_xor_si64(c2[792],_mm_xor_si64(c2[1832],_mm_xor_si64(c2[4],_mm_xor_si64(c2[1078],_mm_xor_si64(c2[1070],_mm_xor_si64(c2[1587],_mm_xor_si64(c2[1327],_mm_xor_si64(c2[1332],_mm_xor_si64(c2[318],_mm_xor_si64(c2[575],_mm_xor_si64(c2[315],_mm_xor_si64(c2[1642],_mm_xor_si64(c2[1118],_mm_xor_si64(c2[81],_mm_xor_si64(c2[1154],_mm_xor_si64(c2[111],_mm_xor_si64(c2[1156],_mm_xor_si64(c2[896],_mm_xor_si64(c2[1959],_mm_xor_si64(c2[401],_mm_xor_si64(c2[141],_mm_xor_si64(c2[1464],_mm_xor_si64(c2[683],_mm_xor_si64(c2[1465],_mm_xor_si64(c2[1205],_mm_xor_si64(c2[714],_mm_xor_si64(c2[1492],_mm_xor_si64(c2[1232],_mm_xor_si64(c2[2039],_mm_xor_si64(c2[1508],_mm_xor_si64(c2[476],_mm_xor_si64(c2[216],_mm_xor_si64(c2[2039],_mm_xor_si64(c2[1805],_mm_xor_si64(c2[1540],_mm_xor_si64(c2[758],c2[498])))))))))))))))))))))))))))))))))))));
+     d2[169]=simde_mm_xor_si64(c2[2],simde_mm_xor_si64(c2[792],simde_mm_xor_si64(c2[1832],simde_mm_xor_si64(c2[4],simde_mm_xor_si64(c2[1078],simde_mm_xor_si64(c2[1070],simde_mm_xor_si64(c2[1587],simde_mm_xor_si64(c2[1327],simde_mm_xor_si64(c2[1332],simde_mm_xor_si64(c2[318],simde_mm_xor_si64(c2[575],simde_mm_xor_si64(c2[315],simde_mm_xor_si64(c2[1642],simde_mm_xor_si64(c2[1118],simde_mm_xor_si64(c2[81],simde_mm_xor_si64(c2[1154],simde_mm_xor_si64(c2[111],simde_mm_xor_si64(c2[1156],simde_mm_xor_si64(c2[896],simde_mm_xor_si64(c2[1959],simde_mm_xor_si64(c2[401],simde_mm_xor_si64(c2[141],simde_mm_xor_si64(c2[1464],simde_mm_xor_si64(c2[683],simde_mm_xor_si64(c2[1465],simde_mm_xor_si64(c2[1205],simde_mm_xor_si64(c2[714],simde_mm_xor_si64(c2[1492],simde_mm_xor_si64(c2[1232],simde_mm_xor_si64(c2[2039],simde_mm_xor_si64(c2[1508],simde_mm_xor_si64(c2[476],simde_mm_xor_si64(c2[216],simde_mm_xor_si64(c2[2039],simde_mm_xor_si64(c2[1805],simde_mm_xor_si64(c2[1540],simde_mm_xor_si64(c2[758],c2[498])))))))))))))))))))))))))))))))))))));
 
 //row: 14
-     d2[182]=_mm_xor_si64(c2[791],_mm_xor_si64(c2[531],_mm_xor_si64(c2[785],_mm_xor_si64(c2[1308],_mm_xor_si64(c2[1562],_mm_xor_si64(c2[269],_mm_xor_si64(c2[523],_mm_xor_si64(c2[1854],_mm_xor_si64(c2[1594],_mm_xor_si64(c2[1848],_mm_xor_si64(c2[1586],_mm_xor_si64(c2[1853],_mm_xor_si64(c2[1856],_mm_xor_si64(c2[291],_mm_xor_si64(c2[31],_mm_xor_si64(c2[817],_mm_xor_si64(c2[1094],_mm_xor_si64(c2[834],_mm_xor_si64(c2[1101],_mm_xor_si64(c2[844],_mm_xor_si64(c2[1358],_mm_xor_si64(c2[1098],_mm_xor_si64(c2[339],_mm_xor_si64(c2[79],_mm_xor_si64(c2[346],_mm_xor_si64(c2[1647],_mm_xor_si64(c2[1901],_mm_xor_si64(c2[610],_mm_xor_si64(c2[864],_mm_xor_si64(c2[1670],_mm_xor_si64(c2[1924],_mm_xor_si64(c2[627],_mm_xor_si64(c2[894],_mm_xor_si64(c2[1412],_mm_xor_si64(c2[1926],_mm_xor_si64(c2[1666],_mm_xor_si64(c2[396],_mm_xor_si64(c2[650],_mm_xor_si64(c2[657],_mm_xor_si64(c2[1171],_mm_xor_si64(c2[911],_mm_xor_si64(c2[161],_mm_xor_si64(c2[1980],_mm_xor_si64(c2[168],_mm_xor_si64(c2[1199],_mm_xor_si64(c2[1466],_mm_xor_si64(c2[1721],_mm_xor_si64(c2[156],_mm_xor_si64(c2[1988],_mm_xor_si64(c2[1198],_mm_xor_si64(c2[1230],_mm_xor_si64(c2[1484],_mm_xor_si64(c2[1748],_mm_xor_si64(c2[183],_mm_xor_si64(c2[2002],_mm_xor_si64(c2[476],_mm_xor_si64(c2[730],_mm_xor_si64(c2[2037],_mm_xor_si64(c2[212],_mm_xor_si64(c2[732],_mm_xor_si64(c2[1259],_mm_xor_si64(c2[999],_mm_xor_si64(c2[502],_mm_xor_si64(c2[242],_mm_xor_si64(c2[496],_mm_xor_si64(c2[2056],_mm_xor_si64(c2[244],_mm_xor_si64(c2[1014],_mm_xor_si64(c2[1541],c2[1281])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[182]=simde_mm_xor_si64(c2[791],simde_mm_xor_si64(c2[531],simde_mm_xor_si64(c2[785],simde_mm_xor_si64(c2[1308],simde_mm_xor_si64(c2[1562],simde_mm_xor_si64(c2[269],simde_mm_xor_si64(c2[523],simde_mm_xor_si64(c2[1854],simde_mm_xor_si64(c2[1594],simde_mm_xor_si64(c2[1848],simde_mm_xor_si64(c2[1586],simde_mm_xor_si64(c2[1853],simde_mm_xor_si64(c2[1856],simde_mm_xor_si64(c2[291],simde_mm_xor_si64(c2[31],simde_mm_xor_si64(c2[817],simde_mm_xor_si64(c2[1094],simde_mm_xor_si64(c2[834],simde_mm_xor_si64(c2[1101],simde_mm_xor_si64(c2[844],simde_mm_xor_si64(c2[1358],simde_mm_xor_si64(c2[1098],simde_mm_xor_si64(c2[339],simde_mm_xor_si64(c2[79],simde_mm_xor_si64(c2[346],simde_mm_xor_si64(c2[1647],simde_mm_xor_si64(c2[1901],simde_mm_xor_si64(c2[610],simde_mm_xor_si64(c2[864],simde_mm_xor_si64(c2[1670],simde_mm_xor_si64(c2[1924],simde_mm_xor_si64(c2[627],simde_mm_xor_si64(c2[894],simde_mm_xor_si64(c2[1412],simde_mm_xor_si64(c2[1926],simde_mm_xor_si64(c2[1666],simde_mm_xor_si64(c2[396],simde_mm_xor_si64(c2[650],simde_mm_xor_si64(c2[657],simde_mm_xor_si64(c2[1171],simde_mm_xor_si64(c2[911],simde_mm_xor_si64(c2[161],simde_mm_xor_si64(c2[1980],simde_mm_xor_si64(c2[168],simde_mm_xor_si64(c2[1199],simde_mm_xor_si64(c2[1466],simde_mm_xor_si64(c2[1721],simde_mm_xor_si64(c2[156],simde_mm_xor_si64(c2[1988],simde_mm_xor_si64(c2[1198],simde_mm_xor_si64(c2[1230],simde_mm_xor_si64(c2[1484],simde_mm_xor_si64(c2[1748],simde_mm_xor_si64(c2[183],simde_mm_xor_si64(c2[2002],simde_mm_xor_si64(c2[476],simde_mm_xor_si64(c2[730],simde_mm_xor_si64(c2[2037],simde_mm_xor_si64(c2[212],simde_mm_xor_si64(c2[732],simde_mm_xor_si64(c2[1259],simde_mm_xor_si64(c2[999],simde_mm_xor_si64(c2[502],simde_mm_xor_si64(c2[242],simde_mm_xor_si64(c2[496],simde_mm_xor_si64(c2[2056],simde_mm_xor_si64(c2[244],simde_mm_xor_si64(c2[1014],simde_mm_xor_si64(c2[1541],c2[1281])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 15
-     d2[195]=_mm_xor_si64(c2[525],_mm_xor_si64(c2[1304],_mm_xor_si64(c2[1044],_mm_xor_si64(c2[1302],_mm_xor_si64(c2[1821],_mm_xor_si64(c2[263],_mm_xor_si64(c2[782],_mm_xor_si64(c2[1568],_mm_xor_si64(c2[1588],_mm_xor_si64(c2[288],_mm_xor_si64(c2[28],_mm_xor_si64(c2[1593],_mm_xor_si64(c2[33],_mm_xor_si64(c2[1850],_mm_xor_si64(c2[290],_mm_xor_si64(c2[841],_mm_xor_si64(c2[1620],_mm_xor_si64(c2[1360],_mm_xor_si64(c2[838],_mm_xor_si64(c2[1357],_mm_xor_si64(c2[86],_mm_xor_si64(c2[865],_mm_xor_si64(c2[605],_mm_xor_si64(c2[1641],_mm_xor_si64(c2[81],_mm_xor_si64(c2[604],_mm_xor_si64(c2[1123],_mm_xor_si64(c2[1664],_mm_xor_si64(c2[104],_mm_xor_si64(c2[634],_mm_xor_si64(c2[1153],_mm_xor_si64(c2[1406],_mm_xor_si64(c2[1925],_mm_xor_si64(c2[390],_mm_xor_si64(c2[922],_mm_xor_si64(c2[651],_mm_xor_si64(c2[1170],_mm_xor_si64(c2[1987],_mm_xor_si64(c2[687],_mm_xor_si64(c2[427],_mm_xor_si64(c2[1206],_mm_xor_si64(c2[1725],_mm_xor_si64(c2[1728],_mm_xor_si64(c2[168],_mm_xor_si64(c2[1224],_mm_xor_si64(c2[1743],_mm_xor_si64(c2[1742],_mm_xor_si64(c2[182],_mm_xor_si64(c2[470],_mm_xor_si64(c2[989],_mm_xor_si64(c2[2031],_mm_xor_si64(c2[471],_mm_xor_si64(c2[739],_mm_xor_si64(c2[1258],_mm_xor_si64(c2[236],_mm_xor_si64(c2[1015],_mm_xor_si64(c2[755],_mm_xor_si64(c2[2063],_mm_xor_si64(c2[503],_mm_xor_si64(c2[1021],c2[1540]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[195]=simde_mm_xor_si64(c2[525],simde_mm_xor_si64(c2[1304],simde_mm_xor_si64(c2[1044],simde_mm_xor_si64(c2[1302],simde_mm_xor_si64(c2[1821],simde_mm_xor_si64(c2[263],simde_mm_xor_si64(c2[782],simde_mm_xor_si64(c2[1568],simde_mm_xor_si64(c2[1588],simde_mm_xor_si64(c2[288],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[1593],simde_mm_xor_si64(c2[33],simde_mm_xor_si64(c2[1850],simde_mm_xor_si64(c2[290],simde_mm_xor_si64(c2[841],simde_mm_xor_si64(c2[1620],simde_mm_xor_si64(c2[1360],simde_mm_xor_si64(c2[838],simde_mm_xor_si64(c2[1357],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[865],simde_mm_xor_si64(c2[605],simde_mm_xor_si64(c2[1641],simde_mm_xor_si64(c2[81],simde_mm_xor_si64(c2[604],simde_mm_xor_si64(c2[1123],simde_mm_xor_si64(c2[1664],simde_mm_xor_si64(c2[104],simde_mm_xor_si64(c2[634],simde_mm_xor_si64(c2[1153],simde_mm_xor_si64(c2[1406],simde_mm_xor_si64(c2[1925],simde_mm_xor_si64(c2[390],simde_mm_xor_si64(c2[922],simde_mm_xor_si64(c2[651],simde_mm_xor_si64(c2[1170],simde_mm_xor_si64(c2[1987],simde_mm_xor_si64(c2[687],simde_mm_xor_si64(c2[427],simde_mm_xor_si64(c2[1206],simde_mm_xor_si64(c2[1725],simde_mm_xor_si64(c2[1728],simde_mm_xor_si64(c2[168],simde_mm_xor_si64(c2[1224],simde_mm_xor_si64(c2[1743],simde_mm_xor_si64(c2[1742],simde_mm_xor_si64(c2[182],simde_mm_xor_si64(c2[470],simde_mm_xor_si64(c2[989],simde_mm_xor_si64(c2[2031],simde_mm_xor_si64(c2[471],simde_mm_xor_si64(c2[739],simde_mm_xor_si64(c2[1258],simde_mm_xor_si64(c2[236],simde_mm_xor_si64(c2[1015],simde_mm_xor_si64(c2[755],simde_mm_xor_si64(c2[2063],simde_mm_xor_si64(c2[503],simde_mm_xor_si64(c2[1021],c2[1540]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 16
-     d2[208]=_mm_xor_si64(c2[1311],_mm_xor_si64(c2[1051],_mm_xor_si64(c2[1820],_mm_xor_si64(c2[1560],_mm_xor_si64(c2[1828],_mm_xor_si64(c2[531],_mm_xor_si64(c2[271],_mm_xor_si64(c2[789],_mm_xor_si64(c2[1311],_mm_xor_si64(c2[295],_mm_xor_si64(c2[35],_mm_xor_si64(c2[817],_mm_xor_si64(c2[557],_mm_xor_si64(c2[27],_mm_xor_si64(c2[549],_mm_xor_si64(c2[297],_mm_xor_si64(c2[806],_mm_xor_si64(c2[1328],_mm_xor_si64(c2[1614],_mm_xor_si64(c2[1354],_mm_xor_si64(c2[57],_mm_xor_si64(c2[1876],_mm_xor_si64(c2[1364],_mm_xor_si64(c2[1873],_mm_xor_si64(c2[859],_mm_xor_si64(c2[599],_mm_xor_si64(c2[1381],_mm_xor_si64(c2[1121],_mm_xor_si64(c2[88],_mm_xor_si64(c2[870],_mm_xor_si64(c2[610],_mm_xor_si64(c2[1130],_mm_xor_si64(c2[1639],_mm_xor_si64(c2[111],_mm_xor_si64(c2[893],_mm_xor_si64(c2[633],_mm_xor_si64(c2[1147],_mm_xor_si64(c2[1669],_mm_xor_si64(c2[1932],_mm_xor_si64(c2[375],_mm_xor_si64(c2[916],_mm_xor_si64(c2[1698],_mm_xor_si64(c2[1438],_mm_xor_si64(c2[1177],_mm_xor_si64(c2[1699],_mm_xor_si64(c2[681],_mm_xor_si64(c2[421],_mm_xor_si64(c2[1203],_mm_xor_si64(c2[943],_mm_xor_si64(c2[1719],_mm_xor_si64(c2[422],_mm_xor_si64(c2[162],_mm_xor_si64(c2[162],_mm_xor_si64(c2[684],_mm_xor_si64(c2[1750],_mm_xor_si64(c2[453],_mm_xor_si64(c2[193],_mm_xor_si64(c2[189],_mm_xor_si64(c2[711],_mm_xor_si64(c2[996],_mm_xor_si64(c2[1778],_mm_xor_si64(c2[1518],_mm_xor_si64(c2[478],_mm_xor_si64(c2[1000],_mm_xor_si64(c2[1252],_mm_xor_si64(c2[1774],_mm_xor_si64(c2[1022],_mm_xor_si64(c2[762],_mm_xor_si64(c2[1544],_mm_xor_si64(c2[1284],_mm_xor_si64(c2[497],_mm_xor_si64(c2[1279],_mm_xor_si64(c2[1019],_mm_xor_si64(c2[1534],_mm_xor_si64(c2[2056],c2[2057])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[208]=simde_mm_xor_si64(c2[1311],simde_mm_xor_si64(c2[1051],simde_mm_xor_si64(c2[1820],simde_mm_xor_si64(c2[1560],simde_mm_xor_si64(c2[1828],simde_mm_xor_si64(c2[531],simde_mm_xor_si64(c2[271],simde_mm_xor_si64(c2[789],simde_mm_xor_si64(c2[1311],simde_mm_xor_si64(c2[295],simde_mm_xor_si64(c2[35],simde_mm_xor_si64(c2[817],simde_mm_xor_si64(c2[557],simde_mm_xor_si64(c2[27],simde_mm_xor_si64(c2[549],simde_mm_xor_si64(c2[297],simde_mm_xor_si64(c2[806],simde_mm_xor_si64(c2[1328],simde_mm_xor_si64(c2[1614],simde_mm_xor_si64(c2[1354],simde_mm_xor_si64(c2[57],simde_mm_xor_si64(c2[1876],simde_mm_xor_si64(c2[1364],simde_mm_xor_si64(c2[1873],simde_mm_xor_si64(c2[859],simde_mm_xor_si64(c2[599],simde_mm_xor_si64(c2[1381],simde_mm_xor_si64(c2[1121],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[870],simde_mm_xor_si64(c2[610],simde_mm_xor_si64(c2[1130],simde_mm_xor_si64(c2[1639],simde_mm_xor_si64(c2[111],simde_mm_xor_si64(c2[893],simde_mm_xor_si64(c2[633],simde_mm_xor_si64(c2[1147],simde_mm_xor_si64(c2[1669],simde_mm_xor_si64(c2[1932],simde_mm_xor_si64(c2[375],simde_mm_xor_si64(c2[916],simde_mm_xor_si64(c2[1698],simde_mm_xor_si64(c2[1438],simde_mm_xor_si64(c2[1177],simde_mm_xor_si64(c2[1699],simde_mm_xor_si64(c2[681],simde_mm_xor_si64(c2[421],simde_mm_xor_si64(c2[1203],simde_mm_xor_si64(c2[943],simde_mm_xor_si64(c2[1719],simde_mm_xor_si64(c2[422],simde_mm_xor_si64(c2[162],simde_mm_xor_si64(c2[162],simde_mm_xor_si64(c2[684],simde_mm_xor_si64(c2[1750],simde_mm_xor_si64(c2[453],simde_mm_xor_si64(c2[193],simde_mm_xor_si64(c2[189],simde_mm_xor_si64(c2[711],simde_mm_xor_si64(c2[996],simde_mm_xor_si64(c2[1778],simde_mm_xor_si64(c2[1518],simde_mm_xor_si64(c2[478],simde_mm_xor_si64(c2[1000],simde_mm_xor_si64(c2[1252],simde_mm_xor_si64(c2[1774],simde_mm_xor_si64(c2[1022],simde_mm_xor_si64(c2[762],simde_mm_xor_si64(c2[1544],simde_mm_xor_si64(c2[1284],simde_mm_xor_si64(c2[497],simde_mm_xor_si64(c2[1279],simde_mm_xor_si64(c2[1019],simde_mm_xor_si64(c2[1534],simde_mm_xor_si64(c2[2056],c2[2057])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 17
-     d2[221]=_mm_xor_si64(c2[1309],_mm_xor_si64(c2[1049],_mm_xor_si64(c2[529],_mm_xor_si64(c2[269],_mm_xor_si64(c2[1826],_mm_xor_si64(c2[1306],_mm_xor_si64(c2[1046],_mm_xor_si64(c2[787],_mm_xor_si64(c2[7],_mm_xor_si64(c2[293],_mm_xor_si64(c2[33],_mm_xor_si64(c2[1592],_mm_xor_si64(c2[1332],_mm_xor_si64(c2[38],_mm_xor_si64(c2[1337],_mm_xor_si64(c2[295],_mm_xor_si64(c2[1594],_mm_xor_si64(c2[293],_mm_xor_si64(c2[1612],_mm_xor_si64(c2[1352],_mm_xor_si64(c2[832],_mm_xor_si64(c2[572],_mm_xor_si64(c2[1362],_mm_xor_si64(c2[582],_mm_xor_si64(c2[870],_mm_xor_si64(c2[610],_mm_xor_si64(c2[90],_mm_xor_si64(c2[1909],_mm_xor_si64(c2[86],_mm_xor_si64(c2[1645],_mm_xor_si64(c2[1385],_mm_xor_si64(c2[1128],_mm_xor_si64(c2[348],_mm_xor_si64(c2[109],_mm_xor_si64(c2[1668],_mm_xor_si64(c2[1408],_mm_xor_si64(c2[1145],_mm_xor_si64(c2[365],_mm_xor_si64(c2[1930],_mm_xor_si64(c2[1150],_mm_xor_si64(c2[914],_mm_xor_si64(c2[394],_mm_xor_si64(c2[134],_mm_xor_si64(c2[1175],_mm_xor_si64(c2[395],_mm_xor_si64(c2[402],_mm_xor_si64(c2[679],_mm_xor_si64(c2[419],_mm_xor_si64(c2[1978],_mm_xor_si64(c2[1718],_mm_xor_si64(c2[1717],_mm_xor_si64(c2[1197],_mm_xor_si64(c2[937],_mm_xor_si64(c2[160],_mm_xor_si64(c2[1459],_mm_xor_si64(c2[1748],_mm_xor_si64(c2[1228],_mm_xor_si64(c2[968],_mm_xor_si64(c2[187],_mm_xor_si64(c2[1486],_mm_xor_si64(c2[994],_mm_xor_si64(c2[474],_mm_xor_si64(c2[214],_mm_xor_si64(c2[476],_mm_xor_si64(c2[1775],_mm_xor_si64(c2[1250],_mm_xor_si64(c2[470],_mm_xor_si64(c2[1020],_mm_xor_si64(c2[760],_mm_xor_si64(c2[240],_mm_xor_si64(c2[2059],_mm_xor_si64(c2[495],_mm_xor_si64(c2[2054],_mm_xor_si64(c2[1794],_mm_xor_si64(c2[1545],c2[765])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[221]=simde_mm_xor_si64(c2[1309],simde_mm_xor_si64(c2[1049],simde_mm_xor_si64(c2[529],simde_mm_xor_si64(c2[269],simde_mm_xor_si64(c2[1826],simde_mm_xor_si64(c2[1306],simde_mm_xor_si64(c2[1046],simde_mm_xor_si64(c2[787],simde_mm_xor_si64(c2[7],simde_mm_xor_si64(c2[293],simde_mm_xor_si64(c2[33],simde_mm_xor_si64(c2[1592],simde_mm_xor_si64(c2[1332],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[1337],simde_mm_xor_si64(c2[295],simde_mm_xor_si64(c2[1594],simde_mm_xor_si64(c2[293],simde_mm_xor_si64(c2[1612],simde_mm_xor_si64(c2[1352],simde_mm_xor_si64(c2[832],simde_mm_xor_si64(c2[572],simde_mm_xor_si64(c2[1362],simde_mm_xor_si64(c2[582],simde_mm_xor_si64(c2[870],simde_mm_xor_si64(c2[610],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[1909],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[1645],simde_mm_xor_si64(c2[1385],simde_mm_xor_si64(c2[1128],simde_mm_xor_si64(c2[348],simde_mm_xor_si64(c2[109],simde_mm_xor_si64(c2[1668],simde_mm_xor_si64(c2[1408],simde_mm_xor_si64(c2[1145],simde_mm_xor_si64(c2[365],simde_mm_xor_si64(c2[1930],simde_mm_xor_si64(c2[1150],simde_mm_xor_si64(c2[914],simde_mm_xor_si64(c2[394],simde_mm_xor_si64(c2[134],simde_mm_xor_si64(c2[1175],simde_mm_xor_si64(c2[395],simde_mm_xor_si64(c2[402],simde_mm_xor_si64(c2[679],simde_mm_xor_si64(c2[419],simde_mm_xor_si64(c2[1978],simde_mm_xor_si64(c2[1718],simde_mm_xor_si64(c2[1717],simde_mm_xor_si64(c2[1197],simde_mm_xor_si64(c2[937],simde_mm_xor_si64(c2[160],simde_mm_xor_si64(c2[1459],simde_mm_xor_si64(c2[1748],simde_mm_xor_si64(c2[1228],simde_mm_xor_si64(c2[968],simde_mm_xor_si64(c2[187],simde_mm_xor_si64(c2[1486],simde_mm_xor_si64(c2[994],simde_mm_xor_si64(c2[474],simde_mm_xor_si64(c2[214],simde_mm_xor_si64(c2[476],simde_mm_xor_si64(c2[1775],simde_mm_xor_si64(c2[1250],simde_mm_xor_si64(c2[470],simde_mm_xor_si64(c2[1020],simde_mm_xor_si64(c2[760],simde_mm_xor_si64(c2[240],simde_mm_xor_si64(c2[2059],simde_mm_xor_si64(c2[495],simde_mm_xor_si64(c2[2054],simde_mm_xor_si64(c2[1794],simde_mm_xor_si64(c2[1545],c2[765])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 18
-     d2[234]=_mm_xor_si64(c2[263],_mm_xor_si64(c2[948],c2[1226]));
+     d2[234]=simde_mm_xor_si64(c2[263],simde_mm_xor_si64(c2[948],c2[1226]));
 
 //row: 19
-     d2[247]=_mm_xor_si64(c2[531],_mm_xor_si64(c2[1308],_mm_xor_si64(c2[269],_mm_xor_si64(c2[1560],_mm_xor_si64(c2[1594],_mm_xor_si64(c2[1586],_mm_xor_si64(c2[1856],_mm_xor_si64(c2[1068],_mm_xor_si64(c2[834],_mm_xor_si64(c2[844],_mm_xor_si64(c2[79],_mm_xor_si64(c2[1647],_mm_xor_si64(c2[610],_mm_xor_si64(c2[1670],_mm_xor_si64(c2[627],_mm_xor_si64(c2[1412],_mm_xor_si64(c2[396],_mm_xor_si64(c2[657],_mm_xor_si64(c2[1980],_mm_xor_si64(c2[1199],_mm_xor_si64(c2[1721],_mm_xor_si64(c2[1230],_mm_xor_si64(c2[1748],_mm_xor_si64(c2[476],_mm_xor_si64(c2[2037],_mm_xor_si64(c2[732],_mm_xor_si64(c2[242],_mm_xor_si64(c2[2056],c2[1014]))))))))))))))))))))))))))));
+     d2[247]=simde_mm_xor_si64(c2[531],simde_mm_xor_si64(c2[1308],simde_mm_xor_si64(c2[269],simde_mm_xor_si64(c2[1560],simde_mm_xor_si64(c2[1594],simde_mm_xor_si64(c2[1586],simde_mm_xor_si64(c2[1856],simde_mm_xor_si64(c2[1068],simde_mm_xor_si64(c2[834],simde_mm_xor_si64(c2[844],simde_mm_xor_si64(c2[79],simde_mm_xor_si64(c2[1647],simde_mm_xor_si64(c2[610],simde_mm_xor_si64(c2[1670],simde_mm_xor_si64(c2[627],simde_mm_xor_si64(c2[1412],simde_mm_xor_si64(c2[396],simde_mm_xor_si64(c2[657],simde_mm_xor_si64(c2[1980],simde_mm_xor_si64(c2[1199],simde_mm_xor_si64(c2[1721],simde_mm_xor_si64(c2[1230],simde_mm_xor_si64(c2[1748],simde_mm_xor_si64(c2[476],simde_mm_xor_si64(c2[2037],simde_mm_xor_si64(c2[732],simde_mm_xor_si64(c2[242],simde_mm_xor_si64(c2[2056],c2[1014]))))))))))))))))))))))))))));
 
 //row: 20
-     d2[260]=_mm_xor_si64(c2[266],_mm_xor_si64(c2[6],_mm_xor_si64(c2[783],_mm_xor_si64(c2[1823],_mm_xor_si64(c2[1329],_mm_xor_si64(c2[1069],_mm_xor_si64(c2[1074],_mm_xor_si64(c2[1331],_mm_xor_si64(c2[1077],_mm_xor_si64(c2[582],_mm_xor_si64(c2[322],_mm_xor_si64(c2[319],_mm_xor_si64(c2[1906],_mm_xor_si64(c2[1646],_mm_xor_si64(c2[1122],_mm_xor_si64(c2[85],_mm_xor_si64(c2[1145],_mm_xor_si64(c2[115],_mm_xor_si64(c2[887],_mm_xor_si64(c2[888],_mm_xor_si64(c2[1950],_mm_xor_si64(c2[132],_mm_xor_si64(c2[1728],_mm_xor_si64(c2[1468],_mm_xor_si64(c2[687],_mm_xor_si64(c2[1196],_mm_xor_si64(c2[705],_mm_xor_si64(c2[1223],_mm_xor_si64(c2[2030],_mm_xor_si64(c2[1512],_mm_xor_si64(c2[220],_mm_xor_si64(c2[2056],_mm_xor_si64(c2[1796],_mm_xor_si64(c2[1544],c2[502]))))))))))))))))))))))))))))))))));
+     d2[260]=simde_mm_xor_si64(c2[266],simde_mm_xor_si64(c2[6],simde_mm_xor_si64(c2[783],simde_mm_xor_si64(c2[1823],simde_mm_xor_si64(c2[1329],simde_mm_xor_si64(c2[1069],simde_mm_xor_si64(c2[1074],simde_mm_xor_si64(c2[1331],simde_mm_xor_si64(c2[1077],simde_mm_xor_si64(c2[582],simde_mm_xor_si64(c2[322],simde_mm_xor_si64(c2[319],simde_mm_xor_si64(c2[1906],simde_mm_xor_si64(c2[1646],simde_mm_xor_si64(c2[1122],simde_mm_xor_si64(c2[85],simde_mm_xor_si64(c2[1145],simde_mm_xor_si64(c2[115],simde_mm_xor_si64(c2[887],simde_mm_xor_si64(c2[888],simde_mm_xor_si64(c2[1950],simde_mm_xor_si64(c2[132],simde_mm_xor_si64(c2[1728],simde_mm_xor_si64(c2[1468],simde_mm_xor_si64(c2[687],simde_mm_xor_si64(c2[1196],simde_mm_xor_si64(c2[705],simde_mm_xor_si64(c2[1223],simde_mm_xor_si64(c2[2030],simde_mm_xor_si64(c2[1512],simde_mm_xor_si64(c2[220],simde_mm_xor_si64(c2[2056],simde_mm_xor_si64(c2[1796],simde_mm_xor_si64(c2[1544],c2[502]))))))))))))))))))))))))))))))))));
 
 //row: 21
-     d2[273]=_mm_xor_si64(c2[1307],_mm_xor_si64(c2[5],_mm_xor_si64(c2[1045],_mm_xor_si64(c2[521],_mm_xor_si64(c2[291],_mm_xor_si64(c2[296],_mm_xor_si64(c2[813],_mm_xor_si64(c2[553],_mm_xor_si64(c2[1623],_mm_xor_si64(c2[1880],_mm_xor_si64(c2[1620],_mm_xor_si64(c2[868],_mm_xor_si64(c2[344],_mm_xor_si64(c2[1386],_mm_xor_si64(c2[367],_mm_xor_si64(c2[1416],_mm_xor_si64(c2[369],_mm_xor_si64(c2[109],_mm_xor_si64(c2[1172],_mm_xor_si64(c2[1693],_mm_xor_si64(c2[1433],_mm_xor_si64(c2[677],_mm_xor_si64(c2[1988],_mm_xor_si64(c2[678],_mm_xor_si64(c2[418],_mm_xor_si64(c2[2006],_mm_xor_si64(c2[705],_mm_xor_si64(c2[445],_mm_xor_si64(c2[1252],_mm_xor_si64(c2[734],_mm_xor_si64(c2[1768],_mm_xor_si64(c2[1508],_mm_xor_si64(c2[730],_mm_xor_si64(c2[1018],_mm_xor_si64(c2[766],_mm_xor_si64(c2[2063],c2[1803]))))))))))))))))))))))))))))))))))));
+     d2[273]=simde_mm_xor_si64(c2[1307],simde_mm_xor_si64(c2[5],simde_mm_xor_si64(c2[1045],simde_mm_xor_si64(c2[521],simde_mm_xor_si64(c2[291],simde_mm_xor_si64(c2[296],simde_mm_xor_si64(c2[813],simde_mm_xor_si64(c2[553],simde_mm_xor_si64(c2[1623],simde_mm_xor_si64(c2[1880],simde_mm_xor_si64(c2[1620],simde_mm_xor_si64(c2[868],simde_mm_xor_si64(c2[344],simde_mm_xor_si64(c2[1386],simde_mm_xor_si64(c2[367],simde_mm_xor_si64(c2[1416],simde_mm_xor_si64(c2[369],simde_mm_xor_si64(c2[109],simde_mm_xor_si64(c2[1172],simde_mm_xor_si64(c2[1693],simde_mm_xor_si64(c2[1433],simde_mm_xor_si64(c2[677],simde_mm_xor_si64(c2[1988],simde_mm_xor_si64(c2[678],simde_mm_xor_si64(c2[418],simde_mm_xor_si64(c2[2006],simde_mm_xor_si64(c2[705],simde_mm_xor_si64(c2[445],simde_mm_xor_si64(c2[1252],simde_mm_xor_si64(c2[734],simde_mm_xor_si64(c2[1768],simde_mm_xor_si64(c2[1508],simde_mm_xor_si64(c2[730],simde_mm_xor_si64(c2[1018],simde_mm_xor_si64(c2[766],simde_mm_xor_si64(c2[2063],c2[1803]))))))))))))))))))))))))))))))))))));
 
 //row: 22
-     d2[286]=_mm_xor_si64(c2[558],c2[1614]);
+     d2[286]=simde_mm_xor_si64(c2[558],c2[1614]);
 
 //row: 23
-     d2[299]=_mm_xor_si64(c2[1046],_mm_xor_si64(c2[606],c2[1431]));
+     d2[299]=simde_mm_xor_si64(c2[1046],simde_mm_xor_si64(c2[606],c2[1431]));
 
 //row: 24
-     d2[312]=_mm_xor_si64(c2[287],_mm_xor_si64(c2[319],c2[245]));
+     d2[312]=simde_mm_xor_si64(c2[287],simde_mm_xor_si64(c2[319],c2[245]));
 
 //row: 25
-     d2[325]=_mm_xor_si64(c2[1311],c2[661]);
+     d2[325]=simde_mm_xor_si64(c2[1311],c2[661]);
 
 //row: 26
-     d2[338]=_mm_xor_si64(c2[11],_mm_xor_si64(c2[1830],_mm_xor_si64(c2[1831],_mm_xor_si64(c2[788],_mm_xor_si64(c2[528],_mm_xor_si64(c2[529],_mm_xor_si64(c2[1568],_mm_xor_si64(c2[1569],_mm_xor_si64(c2[1074],_mm_xor_si64(c2[814],_mm_xor_si64(c2[815],_mm_xor_si64(c2[806],_mm_xor_si64(c2[807],_mm_xor_si64(c2[1076],_mm_xor_si64(c2[1337],_mm_xor_si64(c2[1077],_mm_xor_si64(c2[314],_mm_xor_si64(c2[54],_mm_xor_si64(c2[55],_mm_xor_si64(c2[64],_mm_xor_si64(c2[312],_mm_xor_si64(c2[52],_mm_xor_si64(c2[1099],_mm_xor_si64(c2[1638],_mm_xor_si64(c2[1378],_mm_xor_si64(c2[1379],_mm_xor_si64(c2[1127],_mm_xor_si64(c2[867],_mm_xor_si64(c2[868],_mm_xor_si64(c2[1909],_mm_xor_si64(c2[1910],_mm_xor_si64(c2[1150],_mm_xor_si64(c2[890],_mm_xor_si64(c2[891],_mm_xor_si64(c2[1926],_mm_xor_si64(c2[1927],_mm_xor_si64(c2[632],_mm_xor_si64(c2[893],_mm_xor_si64(c2[633],_mm_xor_si64(c2[1955],_mm_xor_si64(c2[1695],_mm_xor_si64(c2[1696],_mm_xor_si64(c2[1956],_mm_xor_si64(c2[138],_mm_xor_si64(c2[1957],_mm_xor_si64(c2[1460],_mm_xor_si64(c2[1200],_mm_xor_si64(c2[1201],_mm_xor_si64(c2[679],_mm_xor_si64(c2[419],_mm_xor_si64(c2[420],_mm_xor_si64(c2[941],_mm_xor_si64(c2[1202],_mm_xor_si64(c2[942],_mm_xor_si64(c2[710],_mm_xor_si64(c2[450],_mm_xor_si64(c2[451],_mm_xor_si64(c2[968],_mm_xor_si64(c2[1229],_mm_xor_si64(c2[969],_mm_xor_si64(c2[1230],_mm_xor_si64(c2[2035],_mm_xor_si64(c2[1775],_mm_xor_si64(c2[1776],_mm_xor_si64(c2[1257],_mm_xor_si64(c2[1258],_mm_xor_si64(c2[2031],_mm_xor_si64(c2[213],_mm_xor_si64(c2[2032],_mm_xor_si64(c2[1801],_mm_xor_si64(c2[1541],_mm_xor_si64(c2[1542],_mm_xor_si64(c2[1536],_mm_xor_si64(c2[1276],_mm_xor_si64(c2[1277],_mm_xor_si64(c2[234],_mm_xor_si64(c2[495],c2[235])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[338]=simde_mm_xor_si64(c2[11],simde_mm_xor_si64(c2[1830],simde_mm_xor_si64(c2[1831],simde_mm_xor_si64(c2[788],simde_mm_xor_si64(c2[528],simde_mm_xor_si64(c2[529],simde_mm_xor_si64(c2[1568],simde_mm_xor_si64(c2[1569],simde_mm_xor_si64(c2[1074],simde_mm_xor_si64(c2[814],simde_mm_xor_si64(c2[815],simde_mm_xor_si64(c2[806],simde_mm_xor_si64(c2[807],simde_mm_xor_si64(c2[1076],simde_mm_xor_si64(c2[1337],simde_mm_xor_si64(c2[1077],simde_mm_xor_si64(c2[314],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[55],simde_mm_xor_si64(c2[64],simde_mm_xor_si64(c2[312],simde_mm_xor_si64(c2[52],simde_mm_xor_si64(c2[1099],simde_mm_xor_si64(c2[1638],simde_mm_xor_si64(c2[1378],simde_mm_xor_si64(c2[1379],simde_mm_xor_si64(c2[1127],simde_mm_xor_si64(c2[867],simde_mm_xor_si64(c2[868],simde_mm_xor_si64(c2[1909],simde_mm_xor_si64(c2[1910],simde_mm_xor_si64(c2[1150],simde_mm_xor_si64(c2[890],simde_mm_xor_si64(c2[891],simde_mm_xor_si64(c2[1926],simde_mm_xor_si64(c2[1927],simde_mm_xor_si64(c2[632],simde_mm_xor_si64(c2[893],simde_mm_xor_si64(c2[633],simde_mm_xor_si64(c2[1955],simde_mm_xor_si64(c2[1695],simde_mm_xor_si64(c2[1696],simde_mm_xor_si64(c2[1956],simde_mm_xor_si64(c2[138],simde_mm_xor_si64(c2[1957],simde_mm_xor_si64(c2[1460],simde_mm_xor_si64(c2[1200],simde_mm_xor_si64(c2[1201],simde_mm_xor_si64(c2[679],simde_mm_xor_si64(c2[419],simde_mm_xor_si64(c2[420],simde_mm_xor_si64(c2[941],simde_mm_xor_si64(c2[1202],simde_mm_xor_si64(c2[942],simde_mm_xor_si64(c2[710],simde_mm_xor_si64(c2[450],simde_mm_xor_si64(c2[451],simde_mm_xor_si64(c2[968],simde_mm_xor_si64(c2[1229],simde_mm_xor_si64(c2[969],simde_mm_xor_si64(c2[1230],simde_mm_xor_si64(c2[2035],simde_mm_xor_si64(c2[1775],simde_mm_xor_si64(c2[1776],simde_mm_xor_si64(c2[1257],simde_mm_xor_si64(c2[1258],simde_mm_xor_si64(c2[2031],simde_mm_xor_si64(c2[213],simde_mm_xor_si64(c2[2032],simde_mm_xor_si64(c2[1801],simde_mm_xor_si64(c2[1541],simde_mm_xor_si64(c2[1542],simde_mm_xor_si64(c2[1536],simde_mm_xor_si64(c2[1276],simde_mm_xor_si64(c2[1277],simde_mm_xor_si64(c2[234],simde_mm_xor_si64(c2[495],c2[235])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 27
-     d2[351]=_mm_xor_si64(c2[8],c2[427]);
+     d2[351]=simde_mm_xor_si64(c2[8],c2[427]);
 
 //row: 28
-     d2[364]=_mm_xor_si64(c2[1587],_mm_xor_si64(c2[582],c2[1695]));
+     d2[364]=simde_mm_xor_si64(c2[1587],simde_mm_xor_si64(c2[582],c2[1695]));
 
 //row: 29
-     d2[377]=_mm_xor_si64(c2[526],c2[887]);
+     d2[377]=simde_mm_xor_si64(c2[526],c2[887]);
 
 //row: 30
-     d2[390]=_mm_xor_si64(c2[1620],_mm_xor_si64(c2[392],_mm_xor_si64(c2[708],c2[1794])));
+     d2[390]=simde_mm_xor_si64(c2[1620],simde_mm_xor_si64(c2[392],simde_mm_xor_si64(c2[708],c2[1794])));
 
 //row: 31
-     d2[403]=_mm_xor_si64(c2[782],_mm_xor_si64(c2[1572],_mm_xor_si64(c2[520],_mm_xor_si64(c2[1858],_mm_xor_si64(c2[1850],_mm_xor_si64(c2[288],_mm_xor_si64(c2[28],_mm_xor_si64(c2[807],_mm_xor_si64(c2[1098],_mm_xor_si64(c2[1355],_mm_xor_si64(c2[1095],_mm_xor_si64(c2[343],_mm_xor_si64(c2[1898],_mm_xor_si64(c2[861],_mm_xor_si64(c2[1934],_mm_xor_si64(c2[891],_mm_xor_si64(c2[1936],_mm_xor_si64(c2[1676],_mm_xor_si64(c2[660],_mm_xor_si64(c2[1181],_mm_xor_si64(c2[921],_mm_xor_si64(c2[165],_mm_xor_si64(c2[1463],_mm_xor_si64(c2[166],_mm_xor_si64(c2[1985],_mm_xor_si64(c2[1494],_mm_xor_si64(c2[193],_mm_xor_si64(c2[2012],_mm_xor_si64(c2[740],_mm_xor_si64(c2[209],_mm_xor_si64(c2[1256],_mm_xor_si64(c2[996],_mm_xor_si64(c2[506],_mm_xor_si64(c2[241],_mm_xor_si64(c2[1538],c2[1278])))))))))))))))))))))))))))))))))));
+     d2[403]=simde_mm_xor_si64(c2[782],simde_mm_xor_si64(c2[1572],simde_mm_xor_si64(c2[520],simde_mm_xor_si64(c2[1858],simde_mm_xor_si64(c2[1850],simde_mm_xor_si64(c2[288],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[807],simde_mm_xor_si64(c2[1098],simde_mm_xor_si64(c2[1355],simde_mm_xor_si64(c2[1095],simde_mm_xor_si64(c2[343],simde_mm_xor_si64(c2[1898],simde_mm_xor_si64(c2[861],simde_mm_xor_si64(c2[1934],simde_mm_xor_si64(c2[891],simde_mm_xor_si64(c2[1936],simde_mm_xor_si64(c2[1676],simde_mm_xor_si64(c2[660],simde_mm_xor_si64(c2[1181],simde_mm_xor_si64(c2[921],simde_mm_xor_si64(c2[165],simde_mm_xor_si64(c2[1463],simde_mm_xor_si64(c2[166],simde_mm_xor_si64(c2[1985],simde_mm_xor_si64(c2[1494],simde_mm_xor_si64(c2[193],simde_mm_xor_si64(c2[2012],simde_mm_xor_si64(c2[740],simde_mm_xor_si64(c2[209],simde_mm_xor_si64(c2[1256],simde_mm_xor_si64(c2[996],simde_mm_xor_si64(c2[506],simde_mm_xor_si64(c2[241],simde_mm_xor_si64(c2[1538],c2[1278])))))))))))))))))))))))))))))))))));
 
 //row: 32
-     d2[416]=_mm_xor_si64(c2[532],_mm_xor_si64(c2[272],_mm_xor_si64(c2[1309],_mm_xor_si64(c2[1049],_mm_xor_si64(c2[10],_mm_xor_si64(c2[1560],_mm_xor_si64(c2[1595],_mm_xor_si64(c2[1335],_mm_xor_si64(c2[1327],_mm_xor_si64(c2[1597],_mm_xor_si64(c2[835],_mm_xor_si64(c2[575],_mm_xor_si64(c2[572],_mm_xor_si64(c2[80],_mm_xor_si64(c2[1899],_mm_xor_si64(c2[1648],_mm_xor_si64(c2[1388],_mm_xor_si64(c2[338],_mm_xor_si64(c2[1671],_mm_xor_si64(c2[1411],_mm_xor_si64(c2[368],_mm_xor_si64(c2[1153],_mm_xor_si64(c2[397],_mm_xor_si64(c2[137],_mm_xor_si64(c2[398],_mm_xor_si64(c2[130],_mm_xor_si64(c2[1981],_mm_xor_si64(c2[1721],_mm_xor_si64(c2[1200],_mm_xor_si64(c2[940],_mm_xor_si64(c2[1462],_mm_xor_si64(c2[1231],_mm_xor_si64(c2[971],_mm_xor_si64(c2[1489],_mm_xor_si64(c2[477],_mm_xor_si64(c2[217],_mm_xor_si64(c2[1778],_mm_xor_si64(c2[473],_mm_xor_si64(c2[243],_mm_xor_si64(c2[2062],_mm_xor_si64(c2[2057],_mm_xor_si64(c2[1797],c2[755]))))))))))))))))))))))))))))))))))))))))));
+     d2[416]=simde_mm_xor_si64(c2[532],simde_mm_xor_si64(c2[272],simde_mm_xor_si64(c2[1309],simde_mm_xor_si64(c2[1049],simde_mm_xor_si64(c2[10],simde_mm_xor_si64(c2[1560],simde_mm_xor_si64(c2[1595],simde_mm_xor_si64(c2[1335],simde_mm_xor_si64(c2[1327],simde_mm_xor_si64(c2[1597],simde_mm_xor_si64(c2[835],simde_mm_xor_si64(c2[575],simde_mm_xor_si64(c2[572],simde_mm_xor_si64(c2[80],simde_mm_xor_si64(c2[1899],simde_mm_xor_si64(c2[1648],simde_mm_xor_si64(c2[1388],simde_mm_xor_si64(c2[338],simde_mm_xor_si64(c2[1671],simde_mm_xor_si64(c2[1411],simde_mm_xor_si64(c2[368],simde_mm_xor_si64(c2[1153],simde_mm_xor_si64(c2[397],simde_mm_xor_si64(c2[137],simde_mm_xor_si64(c2[398],simde_mm_xor_si64(c2[130],simde_mm_xor_si64(c2[1981],simde_mm_xor_si64(c2[1721],simde_mm_xor_si64(c2[1200],simde_mm_xor_si64(c2[940],simde_mm_xor_si64(c2[1462],simde_mm_xor_si64(c2[1231],simde_mm_xor_si64(c2[971],simde_mm_xor_si64(c2[1489],simde_mm_xor_si64(c2[477],simde_mm_xor_si64(c2[217],simde_mm_xor_si64(c2[1778],simde_mm_xor_si64(c2[473],simde_mm_xor_si64(c2[243],simde_mm_xor_si64(c2[2062],simde_mm_xor_si64(c2[2057],simde_mm_xor_si64(c2[1797],c2[755]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 33
-     d2[429]=_mm_xor_si64(c2[261],_mm_xor_si64(c2[1051],_mm_xor_si64(c2[12],_mm_xor_si64(c2[1337],_mm_xor_si64(c2[1329],_mm_xor_si64(c2[1586],_mm_xor_si64(c2[577],_mm_xor_si64(c2[574],_mm_xor_si64(c2[839],_mm_xor_si64(c2[1901],_mm_xor_si64(c2[1390],_mm_xor_si64(c2[340],_mm_xor_si64(c2[1413],_mm_xor_si64(c2[370],_mm_xor_si64(c2[1155],_mm_xor_si64(c2[139],_mm_xor_si64(c2[400],_mm_xor_si64(c2[1723],_mm_xor_si64(c2[942],_mm_xor_si64(c2[1464],_mm_xor_si64(c2[973],_mm_xor_si64(c2[1491],_mm_xor_si64(c2[1490],_mm_xor_si64(c2[219],_mm_xor_si64(c2[1780],_mm_xor_si64(c2[475],_mm_xor_si64(c2[2064],_mm_xor_si64(c2[1799],c2[757]))))))))))))))))))))))))))));
+     d2[429]=simde_mm_xor_si64(c2[261],simde_mm_xor_si64(c2[1051],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[1337],simde_mm_xor_si64(c2[1329],simde_mm_xor_si64(c2[1586],simde_mm_xor_si64(c2[577],simde_mm_xor_si64(c2[574],simde_mm_xor_si64(c2[839],simde_mm_xor_si64(c2[1901],simde_mm_xor_si64(c2[1390],simde_mm_xor_si64(c2[340],simde_mm_xor_si64(c2[1413],simde_mm_xor_si64(c2[370],simde_mm_xor_si64(c2[1155],simde_mm_xor_si64(c2[139],simde_mm_xor_si64(c2[400],simde_mm_xor_si64(c2[1723],simde_mm_xor_si64(c2[942],simde_mm_xor_si64(c2[1464],simde_mm_xor_si64(c2[973],simde_mm_xor_si64(c2[1491],simde_mm_xor_si64(c2[1490],simde_mm_xor_si64(c2[219],simde_mm_xor_si64(c2[1780],simde_mm_xor_si64(c2[475],simde_mm_xor_si64(c2[2064],simde_mm_xor_si64(c2[1799],c2[757]))))))))))))))))))))))))))));
 
 //row: 34
-     d2[442]=_mm_xor_si64(c2[1562],_mm_xor_si64(c2[1302],_mm_xor_si64(c2[1822],_mm_xor_si64(c2[260],_mm_xor_si64(c2[0],_mm_xor_si64(c2[520],_mm_xor_si64(c2[1040],_mm_xor_si64(c2[1560],_mm_xor_si64(c2[1311],_mm_xor_si64(c2[546],_mm_xor_si64(c2[286],_mm_xor_si64(c2[806],_mm_xor_si64(c2[291],_mm_xor_si64(c2[811],_mm_xor_si64(c2[548],_mm_xor_si64(c2[1328],_mm_xor_si64(c2[1068],_mm_xor_si64(c2[1878],_mm_xor_si64(c2[1618],_mm_xor_si64(c2[59],_mm_xor_si64(c2[1615],_mm_xor_si64(c2[316],_mm_xor_si64(c2[56],_mm_xor_si64(c2[1123],_mm_xor_si64(c2[863],_mm_xor_si64(c2[1383],_mm_xor_si64(c2[599],_mm_xor_si64(c2[339],_mm_xor_si64(c2[859],_mm_xor_si64(c2[1381],_mm_xor_si64(c2[1901],_mm_xor_si64(c2[635],_mm_xor_si64(c2[375],_mm_xor_si64(c2[895],_mm_xor_si64(c2[1411],_mm_xor_si64(c2[1931],_mm_xor_si64(c2[104],_mm_xor_si64(c2[884],_mm_xor_si64(c2[624],_mm_xor_si64(c2[1440],_mm_xor_si64(c2[1180],_mm_xor_si64(c2[1700],_mm_xor_si64(c2[1441],_mm_xor_si64(c2[142],_mm_xor_si64(c2[1961],_mm_xor_si64(c2[945],_mm_xor_si64(c2[685],_mm_xor_si64(c2[1205],_mm_xor_si64(c2[164],_mm_xor_si64(c2[1983],_mm_xor_si64(c2[424],_mm_xor_si64(c2[426],_mm_xor_si64(c2[1206],_mm_xor_si64(c2[946],_mm_xor_si64(c2[182],_mm_xor_si64(c2[2014],_mm_xor_si64(c2[442],_mm_xor_si64(c2[453],_mm_xor_si64(c2[1233],_mm_xor_si64(c2[973],_mm_xor_si64(c2[1520],_mm_xor_si64(c2[1260],_mm_xor_si64(c2[1780],_mm_xor_si64(c2[729],_mm_xor_si64(c2[1249],_mm_xor_si64(c2[1516],_mm_xor_si64(c2[217],_mm_xor_si64(c2[2036],_mm_xor_si64(c2[1286],_mm_xor_si64(c2[1026],_mm_xor_si64(c2[1546],_mm_xor_si64(c2[1021],_mm_xor_si64(c2[761],_mm_xor_si64(c2[1281],_mm_xor_si64(c2[1798],_mm_xor_si64(c2[499],c2[239]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[442]=simde_mm_xor_si64(c2[1562],simde_mm_xor_si64(c2[1302],simde_mm_xor_si64(c2[1822],simde_mm_xor_si64(c2[260],simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[520],simde_mm_xor_si64(c2[1040],simde_mm_xor_si64(c2[1560],simde_mm_xor_si64(c2[1311],simde_mm_xor_si64(c2[546],simde_mm_xor_si64(c2[286],simde_mm_xor_si64(c2[806],simde_mm_xor_si64(c2[291],simde_mm_xor_si64(c2[811],simde_mm_xor_si64(c2[548],simde_mm_xor_si64(c2[1328],simde_mm_xor_si64(c2[1068],simde_mm_xor_si64(c2[1878],simde_mm_xor_si64(c2[1618],simde_mm_xor_si64(c2[59],simde_mm_xor_si64(c2[1615],simde_mm_xor_si64(c2[316],simde_mm_xor_si64(c2[56],simde_mm_xor_si64(c2[1123],simde_mm_xor_si64(c2[863],simde_mm_xor_si64(c2[1383],simde_mm_xor_si64(c2[599],simde_mm_xor_si64(c2[339],simde_mm_xor_si64(c2[859],simde_mm_xor_si64(c2[1381],simde_mm_xor_si64(c2[1901],simde_mm_xor_si64(c2[635],simde_mm_xor_si64(c2[375],simde_mm_xor_si64(c2[895],simde_mm_xor_si64(c2[1411],simde_mm_xor_si64(c2[1931],simde_mm_xor_si64(c2[104],simde_mm_xor_si64(c2[884],simde_mm_xor_si64(c2[624],simde_mm_xor_si64(c2[1440],simde_mm_xor_si64(c2[1180],simde_mm_xor_si64(c2[1700],simde_mm_xor_si64(c2[1441],simde_mm_xor_si64(c2[142],simde_mm_xor_si64(c2[1961],simde_mm_xor_si64(c2[945],simde_mm_xor_si64(c2[685],simde_mm_xor_si64(c2[1205],simde_mm_xor_si64(c2[164],simde_mm_xor_si64(c2[1983],simde_mm_xor_si64(c2[424],simde_mm_xor_si64(c2[426],simde_mm_xor_si64(c2[1206],simde_mm_xor_si64(c2[946],simde_mm_xor_si64(c2[182],simde_mm_xor_si64(c2[2014],simde_mm_xor_si64(c2[442],simde_mm_xor_si64(c2[453],simde_mm_xor_si64(c2[1233],simde_mm_xor_si64(c2[973],simde_mm_xor_si64(c2[1520],simde_mm_xor_si64(c2[1260],simde_mm_xor_si64(c2[1780],simde_mm_xor_si64(c2[729],simde_mm_xor_si64(c2[1249],simde_mm_xor_si64(c2[1516],simde_mm_xor_si64(c2[217],simde_mm_xor_si64(c2[2036],simde_mm_xor_si64(c2[1286],simde_mm_xor_si64(c2[1026],simde_mm_xor_si64(c2[1546],simde_mm_xor_si64(c2[1021],simde_mm_xor_si64(c2[761],simde_mm_xor_si64(c2[1281],simde_mm_xor_si64(c2[1798],simde_mm_xor_si64(c2[499],c2[239]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 35
-     d2[455]=_mm_xor_si64(c2[1041],_mm_xor_si64(c2[781],_mm_xor_si64(c2[1571],_mm_xor_si64(c2[532],_mm_xor_si64(c2[38],_mm_xor_si64(c2[1857],_mm_xor_si64(c2[1849],_mm_xor_si64(c2[27],_mm_xor_si64(c2[1332],_mm_xor_si64(c2[1357],_mm_xor_si64(c2[1097],_mm_xor_si64(c2[1094],_mm_xor_si64(c2[602],_mm_xor_si64(c2[342],_mm_xor_si64(c2[1910],_mm_xor_si64(c2[860],_mm_xor_si64(c2[1933],_mm_xor_si64(c2[890],_mm_xor_si64(c2[1675],_mm_xor_si64(c2[659],_mm_xor_si64(c2[920],_mm_xor_si64(c2[1957],_mm_xor_si64(c2[424],_mm_xor_si64(c2[164],_mm_xor_si64(c2[1462],_mm_xor_si64(c2[1984],_mm_xor_si64(c2[1493],_mm_xor_si64(c2[2011],_mm_xor_si64(c2[739],_mm_xor_si64(c2[208],_mm_xor_si64(c2[995],_mm_xor_si64(c2[765],_mm_xor_si64(c2[505],_mm_xor_si64(c2[240],c2[1277]))))))))))))))))))))))))))))))))));
+     d2[455]=simde_mm_xor_si64(c2[1041],simde_mm_xor_si64(c2[781],simde_mm_xor_si64(c2[1571],simde_mm_xor_si64(c2[532],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[1857],simde_mm_xor_si64(c2[1849],simde_mm_xor_si64(c2[27],simde_mm_xor_si64(c2[1332],simde_mm_xor_si64(c2[1357],simde_mm_xor_si64(c2[1097],simde_mm_xor_si64(c2[1094],simde_mm_xor_si64(c2[602],simde_mm_xor_si64(c2[342],simde_mm_xor_si64(c2[1910],simde_mm_xor_si64(c2[860],simde_mm_xor_si64(c2[1933],simde_mm_xor_si64(c2[890],simde_mm_xor_si64(c2[1675],simde_mm_xor_si64(c2[659],simde_mm_xor_si64(c2[920],simde_mm_xor_si64(c2[1957],simde_mm_xor_si64(c2[424],simde_mm_xor_si64(c2[164],simde_mm_xor_si64(c2[1462],simde_mm_xor_si64(c2[1984],simde_mm_xor_si64(c2[1493],simde_mm_xor_si64(c2[2011],simde_mm_xor_si64(c2[739],simde_mm_xor_si64(c2[208],simde_mm_xor_si64(c2[995],simde_mm_xor_si64(c2[765],simde_mm_xor_si64(c2[505],simde_mm_xor_si64(c2[240],c2[1277]))))))))))))))))))))))))))))))))));
 
 //row: 36
-     d2[468]=_mm_xor_si64(c2[1311],_mm_xor_si64(c2[1879],c2[971]));
+     d2[468]=simde_mm_xor_si64(c2[1311],simde_mm_xor_si64(c2[1879],c2[971]));
 
 //row: 37
-     d2[481]=_mm_xor_si64(c2[781],_mm_xor_si64(c2[1822],_mm_xor_si64(c2[1571],_mm_xor_si64(c2[520],_mm_xor_si64(c2[532],_mm_xor_si64(c2[1560],_mm_xor_si64(c2[1857],_mm_xor_si64(c2[806],_mm_xor_si64(c2[1849],_mm_xor_si64(c2[811],_mm_xor_si64(c2[27],_mm_xor_si64(c2[1328],_mm_xor_si64(c2[1068],_mm_xor_si64(c2[1097],_mm_xor_si64(c2[59],_mm_xor_si64(c2[1094],_mm_xor_si64(c2[316],_mm_xor_si64(c2[56],_mm_xor_si64(c2[342],_mm_xor_si64(c2[1383],_mm_xor_si64(c2[1910],_mm_xor_si64(c2[859],_mm_xor_si64(c2[860],_mm_xor_si64(c2[1901],_mm_xor_si64(c2[1933],_mm_xor_si64(c2[895],_mm_xor_si64(c2[890],_mm_xor_si64(c2[1931],_mm_xor_si64(c2[1675],_mm_xor_si64(c2[884],_mm_xor_si64(c2[624],_mm_xor_si64(c2[659],_mm_xor_si64(c2[1700],_mm_xor_si64(c2[920],_mm_xor_si64(c2[142],_mm_xor_si64(c2[1961],_mm_xor_si64(c2[164],_mm_xor_si64(c2[1205],_mm_xor_si64(c2[1462],_mm_xor_si64(c2[424],_mm_xor_si64(c2[1984],_mm_xor_si64(c2[1206],_mm_xor_si64(c2[946],_mm_xor_si64(c2[1493],_mm_xor_si64(c2[442],_mm_xor_si64(c2[2011],_mm_xor_si64(c2[1233],_mm_xor_si64(c2[973],_mm_xor_si64(c2[739],_mm_xor_si64(c2[1780],_mm_xor_si64(c2[208],_mm_xor_si64(c2[1249],_mm_xor_si64(c2[995],_mm_xor_si64(c2[217],_mm_xor_si64(c2[2036],_mm_xor_si64(c2[505],_mm_xor_si64(c2[1546],_mm_xor_si64(c2[240],_mm_xor_si64(c2[1281],_mm_xor_si64(c2[1277],_mm_xor_si64(c2[499],c2[239])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[481]=simde_mm_xor_si64(c2[781],simde_mm_xor_si64(c2[1822],simde_mm_xor_si64(c2[1571],simde_mm_xor_si64(c2[520],simde_mm_xor_si64(c2[532],simde_mm_xor_si64(c2[1560],simde_mm_xor_si64(c2[1857],simde_mm_xor_si64(c2[806],simde_mm_xor_si64(c2[1849],simde_mm_xor_si64(c2[811],simde_mm_xor_si64(c2[27],simde_mm_xor_si64(c2[1328],simde_mm_xor_si64(c2[1068],simde_mm_xor_si64(c2[1097],simde_mm_xor_si64(c2[59],simde_mm_xor_si64(c2[1094],simde_mm_xor_si64(c2[316],simde_mm_xor_si64(c2[56],simde_mm_xor_si64(c2[342],simde_mm_xor_si64(c2[1383],simde_mm_xor_si64(c2[1910],simde_mm_xor_si64(c2[859],simde_mm_xor_si64(c2[860],simde_mm_xor_si64(c2[1901],simde_mm_xor_si64(c2[1933],simde_mm_xor_si64(c2[895],simde_mm_xor_si64(c2[890],simde_mm_xor_si64(c2[1931],simde_mm_xor_si64(c2[1675],simde_mm_xor_si64(c2[884],simde_mm_xor_si64(c2[624],simde_mm_xor_si64(c2[659],simde_mm_xor_si64(c2[1700],simde_mm_xor_si64(c2[920],simde_mm_xor_si64(c2[142],simde_mm_xor_si64(c2[1961],simde_mm_xor_si64(c2[164],simde_mm_xor_si64(c2[1205],simde_mm_xor_si64(c2[1462],simde_mm_xor_si64(c2[424],simde_mm_xor_si64(c2[1984],simde_mm_xor_si64(c2[1206],simde_mm_xor_si64(c2[946],simde_mm_xor_si64(c2[1493],simde_mm_xor_si64(c2[442],simde_mm_xor_si64(c2[2011],simde_mm_xor_si64(c2[1233],simde_mm_xor_si64(c2[973],simde_mm_xor_si64(c2[739],simde_mm_xor_si64(c2[1780],simde_mm_xor_si64(c2[208],simde_mm_xor_si64(c2[1249],simde_mm_xor_si64(c2[995],simde_mm_xor_si64(c2[217],simde_mm_xor_si64(c2[2036],simde_mm_xor_si64(c2[505],simde_mm_xor_si64(c2[1546],simde_mm_xor_si64(c2[240],simde_mm_xor_si64(c2[1281],simde_mm_xor_si64(c2[1277],simde_mm_xor_si64(c2[499],c2[239])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 38
-     d2[494]=_mm_xor_si64(c2[1562],_mm_xor_si64(c2[1302],_mm_xor_si64(c2[0],_mm_xor_si64(c2[1040],_mm_xor_si64(c2[546],_mm_xor_si64(c2[286],_mm_xor_si64(c2[291],_mm_xor_si64(c2[548],_mm_xor_si64(c2[1332],_mm_xor_si64(c2[1878],_mm_xor_si64(c2[1618],_mm_xor_si64(c2[1615],_mm_xor_si64(c2[1123],_mm_xor_si64(c2[863],_mm_xor_si64(c2[339],_mm_xor_si64(c2[1381],_mm_xor_si64(c2[375],_mm_xor_si64(c2[1411],_mm_xor_si64(c2[104],_mm_xor_si64(c2[1180],_mm_xor_si64(c2[1441],_mm_xor_si64(c2[1438],_mm_xor_si64(c2[945],_mm_xor_si64(c2[685],_mm_xor_si64(c2[1983],_mm_xor_si64(c2[426],_mm_xor_si64(c2[2014],_mm_xor_si64(c2[453],_mm_xor_si64(c2[1260],_mm_xor_si64(c2[729],_mm_xor_si64(c2[1516],_mm_xor_si64(c2[1286],_mm_xor_si64(c2[1026],_mm_xor_si64(c2[761],c2[1798]))))))))))))))))))))))))))))))))));
+     d2[494]=simde_mm_xor_si64(c2[1562],simde_mm_xor_si64(c2[1302],simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[1040],simde_mm_xor_si64(c2[546],simde_mm_xor_si64(c2[286],simde_mm_xor_si64(c2[291],simde_mm_xor_si64(c2[548],simde_mm_xor_si64(c2[1332],simde_mm_xor_si64(c2[1878],simde_mm_xor_si64(c2[1618],simde_mm_xor_si64(c2[1615],simde_mm_xor_si64(c2[1123],simde_mm_xor_si64(c2[863],simde_mm_xor_si64(c2[339],simde_mm_xor_si64(c2[1381],simde_mm_xor_si64(c2[375],simde_mm_xor_si64(c2[1411],simde_mm_xor_si64(c2[104],simde_mm_xor_si64(c2[1180],simde_mm_xor_si64(c2[1441],simde_mm_xor_si64(c2[1438],simde_mm_xor_si64(c2[945],simde_mm_xor_si64(c2[685],simde_mm_xor_si64(c2[1983],simde_mm_xor_si64(c2[426],simde_mm_xor_si64(c2[2014],simde_mm_xor_si64(c2[453],simde_mm_xor_si64(c2[1260],simde_mm_xor_si64(c2[729],simde_mm_xor_si64(c2[1516],simde_mm_xor_si64(c2[1286],simde_mm_xor_si64(c2[1026],simde_mm_xor_si64(c2[761],c2[1798]))))))))))))))))))))))))))))))))));
 
 //row: 39
-     d2[507]=_mm_xor_si64(c2[1040],_mm_xor_si64(c2[780],_mm_xor_si64(c2[1830],_mm_xor_si64(c2[1570],_mm_xor_si64(c2[531],_mm_xor_si64(c2[1309],_mm_xor_si64(c2[37],_mm_xor_si64(c2[1856],_mm_xor_si64(c2[1848],_mm_xor_si64(c2[26],_mm_xor_si64(c2[1356],_mm_xor_si64(c2[1096],_mm_xor_si64(c2[1093],_mm_xor_si64(c2[601],_mm_xor_si64(c2[341],_mm_xor_si64(c2[90],_mm_xor_si64(c2[1909],_mm_xor_si64(c2[859],_mm_xor_si64(c2[113],_mm_xor_si64(c2[1932],_mm_xor_si64(c2[889],_mm_xor_si64(c2[1674],_mm_xor_si64(c2[918],_mm_xor_si64(c2[658],_mm_xor_si64(c2[919],_mm_xor_si64(c2[423],_mm_xor_si64(c2[163],_mm_xor_si64(c2[1721],_mm_xor_si64(c2[1461],_mm_xor_si64(c2[1983],_mm_xor_si64(c2[1752],_mm_xor_si64(c2[1492],_mm_xor_si64(c2[2010],_mm_xor_si64(c2[1488],_mm_xor_si64(c2[998],_mm_xor_si64(c2[738],_mm_xor_si64(c2[220],_mm_xor_si64(c2[994],_mm_xor_si64(c2[764],_mm_xor_si64(c2[504],_mm_xor_si64(c2[499],_mm_xor_si64(c2[239],c2[1276]))))))))))))))))))))))))))))))))))))))))));
+     d2[507]=simde_mm_xor_si64(c2[1040],simde_mm_xor_si64(c2[780],simde_mm_xor_si64(c2[1830],simde_mm_xor_si64(c2[1570],simde_mm_xor_si64(c2[531],simde_mm_xor_si64(c2[1309],simde_mm_xor_si64(c2[37],simde_mm_xor_si64(c2[1856],simde_mm_xor_si64(c2[1848],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[1356],simde_mm_xor_si64(c2[1096],simde_mm_xor_si64(c2[1093],simde_mm_xor_si64(c2[601],simde_mm_xor_si64(c2[341],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[1909],simde_mm_xor_si64(c2[859],simde_mm_xor_si64(c2[113],simde_mm_xor_si64(c2[1932],simde_mm_xor_si64(c2[889],simde_mm_xor_si64(c2[1674],simde_mm_xor_si64(c2[918],simde_mm_xor_si64(c2[658],simde_mm_xor_si64(c2[919],simde_mm_xor_si64(c2[423],simde_mm_xor_si64(c2[163],simde_mm_xor_si64(c2[1721],simde_mm_xor_si64(c2[1461],simde_mm_xor_si64(c2[1983],simde_mm_xor_si64(c2[1752],simde_mm_xor_si64(c2[1492],simde_mm_xor_si64(c2[2010],simde_mm_xor_si64(c2[1488],simde_mm_xor_si64(c2[998],simde_mm_xor_si64(c2[738],simde_mm_xor_si64(c2[220],simde_mm_xor_si64(c2[994],simde_mm_xor_si64(c2[764],simde_mm_xor_si64(c2[504],simde_mm_xor_si64(c2[499],simde_mm_xor_si64(c2[239],c2[1276]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 40
-     d2[520]=_mm_xor_si64(c2[272],_mm_xor_si64(c2[261],_mm_xor_si64(c2[1049],_mm_xor_si64(c2[1051],_mm_xor_si64(c2[10],_mm_xor_si64(c2[12],_mm_xor_si64(c2[1335],_mm_xor_si64(c2[1337],_mm_xor_si64(c2[1327],_mm_xor_si64(c2[1329],_mm_xor_si64(c2[1597],_mm_xor_si64(c2[1846],_mm_xor_si64(c2[1586],_mm_xor_si64(c2[575],_mm_xor_si64(c2[577],_mm_xor_si64(c2[572],_mm_xor_si64(c2[834],_mm_xor_si64(c2[574],_mm_xor_si64(c2[323],_mm_xor_si64(c2[1899],_mm_xor_si64(c2[1901],_mm_xor_si64(c2[1388],_mm_xor_si64(c2[1390],_mm_xor_si64(c2[338],_mm_xor_si64(c2[340],_mm_xor_si64(c2[1411],_mm_xor_si64(c2[1413],_mm_xor_si64(c2[368],_mm_xor_si64(c2[370],_mm_xor_si64(c2[1153],_mm_xor_si64(c2[1415],_mm_xor_si64(c2[1155],_mm_xor_si64(c2[137],_mm_xor_si64(c2[139],_mm_xor_si64(c2[398],_mm_xor_si64(c2[660],_mm_xor_si64(c2[400],_mm_xor_si64(c2[1721],_mm_xor_si64(c2[1723],_mm_xor_si64(c2[940],_mm_xor_si64(c2[942],_mm_xor_si64(c2[1462],_mm_xor_si64(c2[1724],_mm_xor_si64(c2[1464],_mm_xor_si64(c2[971],_mm_xor_si64(c2[973],_mm_xor_si64(c2[1489],_mm_xor_si64(c2[1751],_mm_xor_si64(c2[1491],_mm_xor_si64(c2[217],_mm_xor_si64(c2[219],_mm_xor_si64(c2[1778],_mm_xor_si64(c2[1780],_mm_xor_si64(c2[473],_mm_xor_si64(c2[735],_mm_xor_si64(c2[475],_mm_xor_si64(c2[2062],_mm_xor_si64(c2[2064],_mm_xor_si64(c2[1797],_mm_xor_si64(c2[1799],_mm_xor_si64(c2[755],_mm_xor_si64(c2[1017],c2[757]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[520]=simde_mm_xor_si64(c2[272],simde_mm_xor_si64(c2[261],simde_mm_xor_si64(c2[1049],simde_mm_xor_si64(c2[1051],simde_mm_xor_si64(c2[10],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[1335],simde_mm_xor_si64(c2[1337],simde_mm_xor_si64(c2[1327],simde_mm_xor_si64(c2[1329],simde_mm_xor_si64(c2[1597],simde_mm_xor_si64(c2[1846],simde_mm_xor_si64(c2[1586],simde_mm_xor_si64(c2[575],simde_mm_xor_si64(c2[577],simde_mm_xor_si64(c2[572],simde_mm_xor_si64(c2[834],simde_mm_xor_si64(c2[574],simde_mm_xor_si64(c2[323],simde_mm_xor_si64(c2[1899],simde_mm_xor_si64(c2[1901],simde_mm_xor_si64(c2[1388],simde_mm_xor_si64(c2[1390],simde_mm_xor_si64(c2[338],simde_mm_xor_si64(c2[340],simde_mm_xor_si64(c2[1411],simde_mm_xor_si64(c2[1413],simde_mm_xor_si64(c2[368],simde_mm_xor_si64(c2[370],simde_mm_xor_si64(c2[1153],simde_mm_xor_si64(c2[1415],simde_mm_xor_si64(c2[1155],simde_mm_xor_si64(c2[137],simde_mm_xor_si64(c2[139],simde_mm_xor_si64(c2[398],simde_mm_xor_si64(c2[660],simde_mm_xor_si64(c2[400],simde_mm_xor_si64(c2[1721],simde_mm_xor_si64(c2[1723],simde_mm_xor_si64(c2[940],simde_mm_xor_si64(c2[942],simde_mm_xor_si64(c2[1462],simde_mm_xor_si64(c2[1724],simde_mm_xor_si64(c2[1464],simde_mm_xor_si64(c2[971],simde_mm_xor_si64(c2[973],simde_mm_xor_si64(c2[1489],simde_mm_xor_si64(c2[1751],simde_mm_xor_si64(c2[1491],simde_mm_xor_si64(c2[217],simde_mm_xor_si64(c2[219],simde_mm_xor_si64(c2[1778],simde_mm_xor_si64(c2[1780],simde_mm_xor_si64(c2[473],simde_mm_xor_si64(c2[735],simde_mm_xor_si64(c2[475],simde_mm_xor_si64(c2[2062],simde_mm_xor_si64(c2[2064],simde_mm_xor_si64(c2[1797],simde_mm_xor_si64(c2[1799],simde_mm_xor_si64(c2[755],simde_mm_xor_si64(c2[1017],c2[757]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 41
-     d2[533]=_mm_xor_si64(c2[1572],_mm_xor_si64(c2[1312],_mm_xor_si64(c2[10],_mm_xor_si64(c2[1050],_mm_xor_si64(c2[556],_mm_xor_si64(c2[296],_mm_xor_si64(c2[288],_mm_xor_si64(c2[558],_mm_xor_si64(c2[1856],_mm_xor_si64(c2[1875],_mm_xor_si64(c2[1615],_mm_xor_si64(c2[1612],_mm_xor_si64(c2[1120],_mm_xor_si64(c2[860],_mm_xor_si64(c2[349],_mm_xor_si64(c2[1378],_mm_xor_si64(c2[372],_mm_xor_si64(c2[1408],_mm_xor_si64(c2[114],_mm_xor_si64(c2[1177],_mm_xor_si64(c2[1438],_mm_xor_si64(c2[1441],_mm_xor_si64(c2[942],_mm_xor_si64(c2[682],_mm_xor_si64(c2[1980],_mm_xor_si64(c2[423],_mm_xor_si64(c2[2011],_mm_xor_si64(c2[450],_mm_xor_si64(c2[1257],_mm_xor_si64(c2[739],_mm_xor_si64(c2[1513],_mm_xor_si64(c2[1283],_mm_xor_si64(c2[1023],_mm_xor_si64(c2[758],c2[1795]))))))))))))))))))))))))))))))))));
+     d2[533]=simde_mm_xor_si64(c2[1572],simde_mm_xor_si64(c2[1312],simde_mm_xor_si64(c2[10],simde_mm_xor_si64(c2[1050],simde_mm_xor_si64(c2[556],simde_mm_xor_si64(c2[296],simde_mm_xor_si64(c2[288],simde_mm_xor_si64(c2[558],simde_mm_xor_si64(c2[1856],simde_mm_xor_si64(c2[1875],simde_mm_xor_si64(c2[1615],simde_mm_xor_si64(c2[1612],simde_mm_xor_si64(c2[1120],simde_mm_xor_si64(c2[860],simde_mm_xor_si64(c2[349],simde_mm_xor_si64(c2[1378],simde_mm_xor_si64(c2[372],simde_mm_xor_si64(c2[1408],simde_mm_xor_si64(c2[114],simde_mm_xor_si64(c2[1177],simde_mm_xor_si64(c2[1438],simde_mm_xor_si64(c2[1441],simde_mm_xor_si64(c2[942],simde_mm_xor_si64(c2[682],simde_mm_xor_si64(c2[1980],simde_mm_xor_si64(c2[423],simde_mm_xor_si64(c2[2011],simde_mm_xor_si64(c2[450],simde_mm_xor_si64(c2[1257],simde_mm_xor_si64(c2[739],simde_mm_xor_si64(c2[1513],simde_mm_xor_si64(c2[1283],simde_mm_xor_si64(c2[1023],simde_mm_xor_si64(c2[758],c2[1795]))))))))))))))))))))))))))))))))));
   }
 }
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc112_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc112_byte.c
index 8808a1b9151551322ce92fe3fd3a84d15e1dd14e..06982400a32e778836180ccf71677a8b0ab0a669 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc112_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc112_byte.c
@@ -1,9 +1,9 @@
 #include "PHY/sse_intrin.h"
 // generated code for Zc=112, byte encoding
 static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG2_Zc112_byte(uint8_t *c,uint8_t *d) {
-  __m128i *csimd=(__m128i *)c,*dsimd=(__m128i *)d;
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
 
-  __m128i *c2,*d2;
+  simde__m128i *c2,*d2;
 
   int i2;
   for (i2=0; i2<7; i2++) {
@@ -11,129 +11,129 @@ static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG
      d2=&dsimd[i2];
 
 //row: 0
-     d2[0]=_mm_xor_si128(c2[1124],_mm_xor_si128(c2[703],_mm_xor_si128(c2[1122],_mm_xor_si128(c2[1980],_mm_xor_si128(c2[435],_mm_xor_si128(c2[1278],_mm_xor_si128(c2[1009],_mm_xor_si128(c2[868],_mm_xor_si128(c2[746],_mm_xor_si128(c2[1724],_mm_xor_si128(c2[2004],_mm_xor_si128(c2[476],_mm_xor_si128(c2[2160],_mm_xor_si128(c2[618],_mm_xor_si128(c2[1752],_mm_xor_si128(c2[1750],_mm_xor_si128(c2[2189],_mm_xor_si128(c2[504],_mm_xor_si128(c2[1764],_mm_xor_si128(c2[1223],_mm_xor_si128(c2[2064],_mm_xor_si128(c2[1933],_mm_xor_si128(c2[1657],_mm_xor_si128(c2[1794],_mm_xor_si128(c2[1246],_mm_xor_si128(c2[2227],c2[827]))))))))))))))))))))))))));
+     d2[0]=simde_mm_xor_si128(c2[1124],simde_mm_xor_si128(c2[703],simde_mm_xor_si128(c2[1122],simde_mm_xor_si128(c2[1980],simde_mm_xor_si128(c2[435],simde_mm_xor_si128(c2[1278],simde_mm_xor_si128(c2[1009],simde_mm_xor_si128(c2[868],simde_mm_xor_si128(c2[746],simde_mm_xor_si128(c2[1724],simde_mm_xor_si128(c2[2004],simde_mm_xor_si128(c2[476],simde_mm_xor_si128(c2[2160],simde_mm_xor_si128(c2[618],simde_mm_xor_si128(c2[1752],simde_mm_xor_si128(c2[1750],simde_mm_xor_si128(c2[2189],simde_mm_xor_si128(c2[504],simde_mm_xor_si128(c2[1764],simde_mm_xor_si128(c2[1223],simde_mm_xor_si128(c2[2064],simde_mm_xor_si128(c2[1933],simde_mm_xor_si128(c2[1657],simde_mm_xor_si128(c2[1794],simde_mm_xor_si128(c2[1246],simde_mm_xor_si128(c2[2227],c2[827]))))))))))))))))))))))))));
 
 //row: 1
-     d2[7]=_mm_xor_si128(c2[1124],_mm_xor_si128(c2[1264],_mm_xor_si128(c2[843],_mm_xor_si128(c2[1262],_mm_xor_si128(c2[1980],_mm_xor_si128(c2[2120],_mm_xor_si128(c2[575],_mm_xor_si128(c2[1418],_mm_xor_si128(c2[1009],_mm_xor_si128(c2[1149],_mm_xor_si128(c2[1008],_mm_xor_si128(c2[746],_mm_xor_si128(c2[886],_mm_xor_si128(c2[1864],_mm_xor_si128(c2[2144],_mm_xor_si128(c2[616],_mm_xor_si128(c2[61],_mm_xor_si128(c2[758],_mm_xor_si128(c2[1892],_mm_xor_si128(c2[1890],_mm_xor_si128(c2[2189],_mm_xor_si128(c2[90],_mm_xor_si128(c2[644],_mm_xor_si128(c2[1904],_mm_xor_si128(c2[1363],_mm_xor_si128(c2[2204],_mm_xor_si128(c2[2073],_mm_xor_si128(c2[1797],_mm_xor_si128(c2[1934],_mm_xor_si128(c2[1246],_mm_xor_si128(c2[1386],_mm_xor_si128(c2[128],c2[967]))))))))))))))))))))))))))))))));
+     d2[7]=simde_mm_xor_si128(c2[1124],simde_mm_xor_si128(c2[1264],simde_mm_xor_si128(c2[843],simde_mm_xor_si128(c2[1262],simde_mm_xor_si128(c2[1980],simde_mm_xor_si128(c2[2120],simde_mm_xor_si128(c2[575],simde_mm_xor_si128(c2[1418],simde_mm_xor_si128(c2[1009],simde_mm_xor_si128(c2[1149],simde_mm_xor_si128(c2[1008],simde_mm_xor_si128(c2[746],simde_mm_xor_si128(c2[886],simde_mm_xor_si128(c2[1864],simde_mm_xor_si128(c2[2144],simde_mm_xor_si128(c2[616],simde_mm_xor_si128(c2[61],simde_mm_xor_si128(c2[758],simde_mm_xor_si128(c2[1892],simde_mm_xor_si128(c2[1890],simde_mm_xor_si128(c2[2189],simde_mm_xor_si128(c2[90],simde_mm_xor_si128(c2[644],simde_mm_xor_si128(c2[1904],simde_mm_xor_si128(c2[1363],simde_mm_xor_si128(c2[2204],simde_mm_xor_si128(c2[2073],simde_mm_xor_si128(c2[1797],simde_mm_xor_si128(c2[1934],simde_mm_xor_si128(c2[1246],simde_mm_xor_si128(c2[1386],simde_mm_xor_si128(c2[128],c2[967]))))))))))))))))))))))))))))))));
 
 //row: 2
-     d2[14]=_mm_xor_si128(c2[1124],_mm_xor_si128(c2[1264],_mm_xor_si128(c2[703],_mm_xor_si128(c2[843],_mm_xor_si128(c2[1262],_mm_xor_si128(c2[1980],_mm_xor_si128(c2[2120],_mm_xor_si128(c2[575],_mm_xor_si128(c2[1418],_mm_xor_si128(c2[1009],_mm_xor_si128(c2[1149],_mm_xor_si128(c2[1008],_mm_xor_si128(c2[746],_mm_xor_si128(c2[886],_mm_xor_si128(c2[1724],_mm_xor_si128(c2[1864],_mm_xor_si128(c2[2144],_mm_xor_si128(c2[476],_mm_xor_si128(c2[616],_mm_xor_si128(c2[61],_mm_xor_si128(c2[758],_mm_xor_si128(c2[1752],_mm_xor_si128(c2[1892],_mm_xor_si128(c2[1890],_mm_xor_si128(c2[2189],_mm_xor_si128(c2[90],_mm_xor_si128(c2[504],_mm_xor_si128(c2[644],_mm_xor_si128(c2[1904],_mm_xor_si128(c2[1223],_mm_xor_si128(c2[1363],_mm_xor_si128(c2[2204],_mm_xor_si128(c2[1933],_mm_xor_si128(c2[2073],_mm_xor_si128(c2[1797],_mm_xor_si128(c2[1934],_mm_xor_si128(c2[1246],_mm_xor_si128(c2[1386],_mm_xor_si128(c2[2227],_mm_xor_si128(c2[128],c2[967]))))))))))))))))))))))))))))))))))))))));
+     d2[14]=simde_mm_xor_si128(c2[1124],simde_mm_xor_si128(c2[1264],simde_mm_xor_si128(c2[703],simde_mm_xor_si128(c2[843],simde_mm_xor_si128(c2[1262],simde_mm_xor_si128(c2[1980],simde_mm_xor_si128(c2[2120],simde_mm_xor_si128(c2[575],simde_mm_xor_si128(c2[1418],simde_mm_xor_si128(c2[1009],simde_mm_xor_si128(c2[1149],simde_mm_xor_si128(c2[1008],simde_mm_xor_si128(c2[746],simde_mm_xor_si128(c2[886],simde_mm_xor_si128(c2[1724],simde_mm_xor_si128(c2[1864],simde_mm_xor_si128(c2[2144],simde_mm_xor_si128(c2[476],simde_mm_xor_si128(c2[616],simde_mm_xor_si128(c2[61],simde_mm_xor_si128(c2[758],simde_mm_xor_si128(c2[1752],simde_mm_xor_si128(c2[1892],simde_mm_xor_si128(c2[1890],simde_mm_xor_si128(c2[2189],simde_mm_xor_si128(c2[90],simde_mm_xor_si128(c2[504],simde_mm_xor_si128(c2[644],simde_mm_xor_si128(c2[1904],simde_mm_xor_si128(c2[1223],simde_mm_xor_si128(c2[1363],simde_mm_xor_si128(c2[2204],simde_mm_xor_si128(c2[1933],simde_mm_xor_si128(c2[2073],simde_mm_xor_si128(c2[1797],simde_mm_xor_si128(c2[1934],simde_mm_xor_si128(c2[1246],simde_mm_xor_si128(c2[1386],simde_mm_xor_si128(c2[2227],simde_mm_xor_si128(c2[128],c2[967]))))))))))))))))))))))))))))))))))))))));
 
 //row: 3
-     d2[21]=_mm_xor_si128(c2[1264],_mm_xor_si128(c2[843],_mm_xor_si128(c2[1262],_mm_xor_si128(c2[2120],_mm_xor_si128(c2[575],_mm_xor_si128(c2[1278],_mm_xor_si128(c2[1418],_mm_xor_si128(c2[1149],_mm_xor_si128(c2[868],_mm_xor_si128(c2[1008],_mm_xor_si128(c2[886],_mm_xor_si128(c2[1864],_mm_xor_si128(c2[2144],_mm_xor_si128(c2[616],_mm_xor_si128(c2[61],_mm_xor_si128(c2[618],_mm_xor_si128(c2[758],_mm_xor_si128(c2[1892],_mm_xor_si128(c2[1750],_mm_xor_si128(c2[1890],_mm_xor_si128(c2[90],_mm_xor_si128(c2[644],_mm_xor_si128(c2[1764],_mm_xor_si128(c2[1904],_mm_xor_si128(c2[1363],_mm_xor_si128(c2[2064],_mm_xor_si128(c2[2204],_mm_xor_si128(c2[2073],_mm_xor_si128(c2[1797],_mm_xor_si128(c2[1794],_mm_xor_si128(c2[1934],_mm_xor_si128(c2[1386],_mm_xor_si128(c2[128],_mm_xor_si128(c2[827],c2[967]))))))))))))))))))))))))))))))))));
+     d2[21]=simde_mm_xor_si128(c2[1264],simde_mm_xor_si128(c2[843],simde_mm_xor_si128(c2[1262],simde_mm_xor_si128(c2[2120],simde_mm_xor_si128(c2[575],simde_mm_xor_si128(c2[1278],simde_mm_xor_si128(c2[1418],simde_mm_xor_si128(c2[1149],simde_mm_xor_si128(c2[868],simde_mm_xor_si128(c2[1008],simde_mm_xor_si128(c2[886],simde_mm_xor_si128(c2[1864],simde_mm_xor_si128(c2[2144],simde_mm_xor_si128(c2[616],simde_mm_xor_si128(c2[61],simde_mm_xor_si128(c2[618],simde_mm_xor_si128(c2[758],simde_mm_xor_si128(c2[1892],simde_mm_xor_si128(c2[1750],simde_mm_xor_si128(c2[1890],simde_mm_xor_si128(c2[90],simde_mm_xor_si128(c2[644],simde_mm_xor_si128(c2[1764],simde_mm_xor_si128(c2[1904],simde_mm_xor_si128(c2[1363],simde_mm_xor_si128(c2[2064],simde_mm_xor_si128(c2[2204],simde_mm_xor_si128(c2[2073],simde_mm_xor_si128(c2[1797],simde_mm_xor_si128(c2[1794],simde_mm_xor_si128(c2[1934],simde_mm_xor_si128(c2[1386],simde_mm_xor_si128(c2[128],simde_mm_xor_si128(c2[827],c2[967]))))))))))))))))))))))))))))))))));
 
 //row: 4
-     d2[28]=_mm_xor_si128(c2[1823],_mm_xor_si128(c2[1963],_mm_xor_si128(c2[1542],_mm_xor_si128(c2[1961],_mm_xor_si128(c2[1125],_mm_xor_si128(c2[440],_mm_xor_si128(c2[580],_mm_xor_si128(c2[1274],_mm_xor_si128(c2[2117],_mm_xor_si128(c2[15],_mm_xor_si128(c2[1708],_mm_xor_si128(c2[1848],_mm_xor_si128(c2[1714],_mm_xor_si128(c2[1445],_mm_xor_si128(c2[1585],_mm_xor_si128(c2[324],_mm_xor_si128(c2[604],_mm_xor_si128(c2[1322],_mm_xor_si128(c2[760],_mm_xor_si128(c2[1457],_mm_xor_si128(c2[352],_mm_xor_si128(c2[350],_mm_xor_si128(c2[649],_mm_xor_si128(c2[789],_mm_xor_si128(c2[1350],_mm_xor_si128(c2[364],_mm_xor_si128(c2[2062],_mm_xor_si128(c2[664],_mm_xor_si128(c2[533],_mm_xor_si128(c2[257],_mm_xor_si128(c2[394],_mm_xor_si128(c2[1952],_mm_xor_si128(c2[2092],_mm_xor_si128(c2[827],c2[1666]))))))))))))))))))))))))))))))))));
+     d2[28]=simde_mm_xor_si128(c2[1823],simde_mm_xor_si128(c2[1963],simde_mm_xor_si128(c2[1542],simde_mm_xor_si128(c2[1961],simde_mm_xor_si128(c2[1125],simde_mm_xor_si128(c2[440],simde_mm_xor_si128(c2[580],simde_mm_xor_si128(c2[1274],simde_mm_xor_si128(c2[2117],simde_mm_xor_si128(c2[15],simde_mm_xor_si128(c2[1708],simde_mm_xor_si128(c2[1848],simde_mm_xor_si128(c2[1714],simde_mm_xor_si128(c2[1445],simde_mm_xor_si128(c2[1585],simde_mm_xor_si128(c2[324],simde_mm_xor_si128(c2[604],simde_mm_xor_si128(c2[1322],simde_mm_xor_si128(c2[760],simde_mm_xor_si128(c2[1457],simde_mm_xor_si128(c2[352],simde_mm_xor_si128(c2[350],simde_mm_xor_si128(c2[649],simde_mm_xor_si128(c2[789],simde_mm_xor_si128(c2[1350],simde_mm_xor_si128(c2[364],simde_mm_xor_si128(c2[2062],simde_mm_xor_si128(c2[664],simde_mm_xor_si128(c2[533],simde_mm_xor_si128(c2[257],simde_mm_xor_si128(c2[394],simde_mm_xor_si128(c2[1952],simde_mm_xor_si128(c2[2092],simde_mm_xor_si128(c2[827],c2[1666]))))))))))))))))))))))))))))))))));
 
 //row: 5
-     d2[35]=_mm_xor_si128(c2[1262],_mm_xor_si128(c2[1402],_mm_xor_si128(c2[981],_mm_xor_si128(c2[1400],_mm_xor_si128(c2[1264],_mm_xor_si128(c2[2118],_mm_xor_si128(c2[19],_mm_xor_si128(c2[720],_mm_xor_si128(c2[1556],_mm_xor_si128(c2[1415],_mm_xor_si128(c2[1154],_mm_xor_si128(c2[1294],_mm_xor_si128(c2[1153],_mm_xor_si128(c2[884],_mm_xor_si128(c2[1024],_mm_xor_si128(c2[2002],_mm_xor_si128(c2[43],_mm_xor_si128(c2[761],_mm_xor_si128(c2[199],_mm_xor_si128(c2[896],_mm_xor_si128(c2[2030],_mm_xor_si128(c2[2035],_mm_xor_si128(c2[1473],_mm_xor_si128(c2[88],_mm_xor_si128(c2[228],_mm_xor_si128(c2[789],_mm_xor_si128(c2[2049],_mm_xor_si128(c2[1501],_mm_xor_si128(c2[103],_mm_xor_si128(c2[1644],_mm_xor_si128(c2[2218],_mm_xor_si128(c2[1935],_mm_xor_si128(c2[2072],_mm_xor_si128(c2[1391],_mm_xor_si128(c2[1531],_mm_xor_si128(c2[266],c2[1112]))))))))))))))))))))))))))))))))))));
+     d2[35]=simde_mm_xor_si128(c2[1262],simde_mm_xor_si128(c2[1402],simde_mm_xor_si128(c2[981],simde_mm_xor_si128(c2[1400],simde_mm_xor_si128(c2[1264],simde_mm_xor_si128(c2[2118],simde_mm_xor_si128(c2[19],simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[1556],simde_mm_xor_si128(c2[1415],simde_mm_xor_si128(c2[1154],simde_mm_xor_si128(c2[1294],simde_mm_xor_si128(c2[1153],simde_mm_xor_si128(c2[884],simde_mm_xor_si128(c2[1024],simde_mm_xor_si128(c2[2002],simde_mm_xor_si128(c2[43],simde_mm_xor_si128(c2[761],simde_mm_xor_si128(c2[199],simde_mm_xor_si128(c2[896],simde_mm_xor_si128(c2[2030],simde_mm_xor_si128(c2[2035],simde_mm_xor_si128(c2[1473],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[228],simde_mm_xor_si128(c2[789],simde_mm_xor_si128(c2[2049],simde_mm_xor_si128(c2[1501],simde_mm_xor_si128(c2[103],simde_mm_xor_si128(c2[1644],simde_mm_xor_si128(c2[2218],simde_mm_xor_si128(c2[1935],simde_mm_xor_si128(c2[2072],simde_mm_xor_si128(c2[1391],simde_mm_xor_si128(c2[1531],simde_mm_xor_si128(c2[266],c2[1112]))))))))))))))))))))))))))))))))))));
 
 //row: 6
-     d2[42]=_mm_xor_si128(c2[1263],_mm_xor_si128(c2[1403],_mm_xor_si128(c2[982],_mm_xor_si128(c2[1401],_mm_xor_si128(c2[1540],_mm_xor_si128(c2[2119],_mm_xor_si128(c2[20],_mm_xor_si128(c2[714],_mm_xor_si128(c2[1557],_mm_xor_si128(c2[1148],_mm_xor_si128(c2[1288],_mm_xor_si128(c2[1154],_mm_xor_si128(c2[885],_mm_xor_si128(c2[1025],_mm_xor_si128(c2[2003],_mm_xor_si128(c2[44],_mm_xor_si128(c2[762],_mm_xor_si128(c2[200],_mm_xor_si128(c2[897],_mm_xor_si128(c2[2031],_mm_xor_si128(c2[2036],_mm_xor_si128(c2[1053],_mm_xor_si128(c2[89],_mm_xor_si128(c2[229],_mm_xor_si128(c2[790],_mm_xor_si128(c2[2050],_mm_xor_si128(c2[1502],_mm_xor_si128(c2[104],_mm_xor_si128(c2[2199],_mm_xor_si128(c2[2212],_mm_xor_si128(c2[1936],_mm_xor_si128(c2[2073],_mm_xor_si128(c2[1392],_mm_xor_si128(c2[1532],_mm_xor_si128(c2[267],_mm_xor_si128(c2[1106],c2[2092]))))))))))))))))))))))))))))))))))));
+     d2[42]=simde_mm_xor_si128(c2[1263],simde_mm_xor_si128(c2[1403],simde_mm_xor_si128(c2[982],simde_mm_xor_si128(c2[1401],simde_mm_xor_si128(c2[1540],simde_mm_xor_si128(c2[2119],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[714],simde_mm_xor_si128(c2[1557],simde_mm_xor_si128(c2[1148],simde_mm_xor_si128(c2[1288],simde_mm_xor_si128(c2[1154],simde_mm_xor_si128(c2[885],simde_mm_xor_si128(c2[1025],simde_mm_xor_si128(c2[2003],simde_mm_xor_si128(c2[44],simde_mm_xor_si128(c2[762],simde_mm_xor_si128(c2[200],simde_mm_xor_si128(c2[897],simde_mm_xor_si128(c2[2031],simde_mm_xor_si128(c2[2036],simde_mm_xor_si128(c2[1053],simde_mm_xor_si128(c2[89],simde_mm_xor_si128(c2[229],simde_mm_xor_si128(c2[790],simde_mm_xor_si128(c2[2050],simde_mm_xor_si128(c2[1502],simde_mm_xor_si128(c2[104],simde_mm_xor_si128(c2[2199],simde_mm_xor_si128(c2[2212],simde_mm_xor_si128(c2[1936],simde_mm_xor_si128(c2[2073],simde_mm_xor_si128(c2[1392],simde_mm_xor_si128(c2[1532],simde_mm_xor_si128(c2[267],simde_mm_xor_si128(c2[1106],c2[2092]))))))))))))))))))))))))))))))))))));
 
 //row: 7
-     d2[49]=_mm_xor_si128(c2[1966],_mm_xor_si128(c2[2106],_mm_xor_si128(c2[1961],_mm_xor_si128(c2[1685],_mm_xor_si128(c2[1540],_mm_xor_si128(c2[2104],_mm_xor_si128(c2[1966],_mm_xor_si128(c2[576],_mm_xor_si128(c2[716],_mm_xor_si128(c2[578],_mm_xor_si128(c2[1417],_mm_xor_si128(c2[1279],_mm_xor_si128(c2[14],_mm_xor_si128(c2[1975],_mm_xor_si128(c2[2115],_mm_xor_si128(c2[1000],_mm_xor_si128(c2[1851],_mm_xor_si128(c2[1991],_mm_xor_si128(c2[1853],_mm_xor_si128(c2[1850],_mm_xor_si128(c2[1572],_mm_xor_si128(c2[1712],_mm_xor_si128(c2[1588],_mm_xor_si128(c2[1728],_mm_xor_si128(c2[1583],_mm_xor_si128(c2[467],_mm_xor_si128(c2[322],_mm_xor_si128(c2[747],_mm_xor_si128(c2[602],_mm_xor_si128(c2[1458],_mm_xor_si128(c2[1320],_mm_xor_si128(c2[896],_mm_xor_si128(c2[758],_mm_xor_si128(c2[1600],_mm_xor_si128(c2[1322],_mm_xor_si128(c2[1462],_mm_xor_si128(c2[495],_mm_xor_si128(c2[350],_mm_xor_si128(c2[493],_mm_xor_si128(c2[215],_mm_xor_si128(c2[355],_mm_xor_si128(c2[1890],_mm_xor_si128(c2[785],_mm_xor_si128(c2[925],_mm_xor_si128(c2[787],_mm_xor_si128(c2[1486],_mm_xor_si128(c2[1348],_mm_xor_si128(c2[507],_mm_xor_si128(c2[229],_mm_xor_si128(c2[369],_mm_xor_si128(c2[2198],_mm_xor_si128(c2[2060],_mm_xor_si128(c2[800],_mm_xor_si128(c2[522],_mm_xor_si128(c2[662],_mm_xor_si128(c2[1364],_mm_xor_si128(c2[676],_mm_xor_si128(c2[538],_mm_xor_si128(c2[393],_mm_xor_si128(c2[255],_mm_xor_si128(c2[537],_mm_xor_si128(c2[252],_mm_xor_si128(c2[392],_mm_xor_si128(c2[2088],_mm_xor_si128(c2[2228],_mm_xor_si128(c2[2090],_mm_xor_si128(c2[970],_mm_xor_si128(c2[832],_mm_xor_si128(c2[1809],_mm_xor_si128(c2[1531],c2[1671]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[49]=simde_mm_xor_si128(c2[1966],simde_mm_xor_si128(c2[2106],simde_mm_xor_si128(c2[1961],simde_mm_xor_si128(c2[1685],simde_mm_xor_si128(c2[1540],simde_mm_xor_si128(c2[2104],simde_mm_xor_si128(c2[1966],simde_mm_xor_si128(c2[576],simde_mm_xor_si128(c2[716],simde_mm_xor_si128(c2[578],simde_mm_xor_si128(c2[1417],simde_mm_xor_si128(c2[1279],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[1975],simde_mm_xor_si128(c2[2115],simde_mm_xor_si128(c2[1000],simde_mm_xor_si128(c2[1851],simde_mm_xor_si128(c2[1991],simde_mm_xor_si128(c2[1853],simde_mm_xor_si128(c2[1850],simde_mm_xor_si128(c2[1572],simde_mm_xor_si128(c2[1712],simde_mm_xor_si128(c2[1588],simde_mm_xor_si128(c2[1728],simde_mm_xor_si128(c2[1583],simde_mm_xor_si128(c2[467],simde_mm_xor_si128(c2[322],simde_mm_xor_si128(c2[747],simde_mm_xor_si128(c2[602],simde_mm_xor_si128(c2[1458],simde_mm_xor_si128(c2[1320],simde_mm_xor_si128(c2[896],simde_mm_xor_si128(c2[758],simde_mm_xor_si128(c2[1600],simde_mm_xor_si128(c2[1322],simde_mm_xor_si128(c2[1462],simde_mm_xor_si128(c2[495],simde_mm_xor_si128(c2[350],simde_mm_xor_si128(c2[493],simde_mm_xor_si128(c2[215],simde_mm_xor_si128(c2[355],simde_mm_xor_si128(c2[1890],simde_mm_xor_si128(c2[785],simde_mm_xor_si128(c2[925],simde_mm_xor_si128(c2[787],simde_mm_xor_si128(c2[1486],simde_mm_xor_si128(c2[1348],simde_mm_xor_si128(c2[507],simde_mm_xor_si128(c2[229],simde_mm_xor_si128(c2[369],simde_mm_xor_si128(c2[2198],simde_mm_xor_si128(c2[2060],simde_mm_xor_si128(c2[800],simde_mm_xor_si128(c2[522],simde_mm_xor_si128(c2[662],simde_mm_xor_si128(c2[1364],simde_mm_xor_si128(c2[676],simde_mm_xor_si128(c2[538],simde_mm_xor_si128(c2[393],simde_mm_xor_si128(c2[255],simde_mm_xor_si128(c2[537],simde_mm_xor_si128(c2[252],simde_mm_xor_si128(c2[392],simde_mm_xor_si128(c2[2088],simde_mm_xor_si128(c2[2228],simde_mm_xor_si128(c2[2090],simde_mm_xor_si128(c2[970],simde_mm_xor_si128(c2[832],simde_mm_xor_si128(c2[1809],simde_mm_xor_si128(c2[1531],c2[1671]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 8
-     d2[56]=_mm_xor_si128(c2[0],_mm_xor_si128(c2[140],_mm_xor_si128(c2[1825],_mm_xor_si128(c2[1965],_mm_xor_si128(c2[145],_mm_xor_si128(c2[422],_mm_xor_si128(c2[856],_mm_xor_si128(c2[996],_mm_xor_si128(c2[1697],_mm_xor_si128(c2[294],_mm_xor_si128(c2[1556],_mm_xor_si128(c2[2131],_mm_xor_si128(c2[32],_mm_xor_si128(c2[2130],_mm_xor_si128(c2[1868],_mm_xor_si128(c2[2008],_mm_xor_si128(c2[607],_mm_xor_si128(c2[747],_mm_xor_si128(c2[1027],_mm_xor_si128(c2[1598],_mm_xor_si128(c2[1738],_mm_xor_si128(c2[1176],_mm_xor_si128(c2[1880],_mm_xor_si128(c2[635],_mm_xor_si128(c2[775],_mm_xor_si128(c2[773],_mm_xor_si128(c2[1065],_mm_xor_si128(c2[1205],_mm_xor_si128(c2[1626],_mm_xor_si128(c2[1766],_mm_xor_si128(c2[787],_mm_xor_si128(c2[99],_mm_xor_si128(c2[239],_mm_xor_si128(c2[1080],_mm_xor_si128(c2[816],_mm_xor_si128(c2[956],_mm_xor_si128(c2[673],_mm_xor_si128(c2[817],_mm_xor_si128(c2[129],_mm_xor_si128(c2[269],_mm_xor_si128(c2[1110],_mm_xor_si128(c2[1250],c2[2089]))))))))))))))))))))))))))))))))))))))))));
+     d2[56]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[140],simde_mm_xor_si128(c2[1825],simde_mm_xor_si128(c2[1965],simde_mm_xor_si128(c2[145],simde_mm_xor_si128(c2[422],simde_mm_xor_si128(c2[856],simde_mm_xor_si128(c2[996],simde_mm_xor_si128(c2[1697],simde_mm_xor_si128(c2[294],simde_mm_xor_si128(c2[1556],simde_mm_xor_si128(c2[2131],simde_mm_xor_si128(c2[32],simde_mm_xor_si128(c2[2130],simde_mm_xor_si128(c2[1868],simde_mm_xor_si128(c2[2008],simde_mm_xor_si128(c2[607],simde_mm_xor_si128(c2[747],simde_mm_xor_si128(c2[1027],simde_mm_xor_si128(c2[1598],simde_mm_xor_si128(c2[1738],simde_mm_xor_si128(c2[1176],simde_mm_xor_si128(c2[1880],simde_mm_xor_si128(c2[635],simde_mm_xor_si128(c2[775],simde_mm_xor_si128(c2[773],simde_mm_xor_si128(c2[1065],simde_mm_xor_si128(c2[1205],simde_mm_xor_si128(c2[1626],simde_mm_xor_si128(c2[1766],simde_mm_xor_si128(c2[787],simde_mm_xor_si128(c2[99],simde_mm_xor_si128(c2[239],simde_mm_xor_si128(c2[1080],simde_mm_xor_si128(c2[816],simde_mm_xor_si128(c2[956],simde_mm_xor_si128(c2[673],simde_mm_xor_si128(c2[817],simde_mm_xor_si128(c2[129],simde_mm_xor_si128(c2[269],simde_mm_xor_si128(c2[1110],simde_mm_xor_si128(c2[1250],c2[2089]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 9
-     d2[63]=_mm_xor_si128(c2[2],_mm_xor_si128(c2[1542],_mm_xor_si128(c2[1682],_mm_xor_si128(c2[1820],_mm_xor_si128(c2[1261],_mm_xor_si128(c2[0],_mm_xor_si128(c2[1680],_mm_xor_si128(c2[858],_mm_xor_si128(c2[159],_mm_xor_si128(c2[299],_mm_xor_si128(c2[1559],_mm_xor_si128(c2[1000],_mm_xor_si128(c2[156],_mm_xor_si128(c2[1836],_mm_xor_si128(c2[294],_mm_xor_si128(c2[2133],_mm_xor_si128(c2[1434],_mm_xor_si128(c2[1574],_mm_xor_si128(c2[1992],_mm_xor_si128(c2[1433],_mm_xor_si128(c2[1863],_mm_xor_si128(c2[1164],_mm_xor_si128(c2[1304],_mm_xor_si128(c2[602],_mm_xor_si128(c2[43],_mm_xor_si128(c2[882],_mm_xor_si128(c2[323],_mm_xor_si128(c2[1600],_mm_xor_si128(c2[1041],_mm_xor_si128(c2[1038],_mm_xor_si128(c2[479],_mm_xor_si128(c2[1742],_mm_xor_si128(c2[1176],_mm_xor_si128(c2[630],_mm_xor_si128(c2[71],_mm_xor_si128(c2[635],_mm_xor_si128(c2[76],_mm_xor_si128(c2[1067],_mm_xor_si128(c2[368],_mm_xor_si128(c2[508],_mm_xor_si128(c2[1628],_mm_xor_si128(c2[1069],_mm_xor_si128(c2[649],_mm_xor_si128(c2[90],_mm_xor_si128(c2[101],_mm_xor_si128(c2[1781],_mm_xor_si128(c2[942],_mm_xor_si128(c2[383],_mm_xor_si128(c2[818],_mm_xor_si128(c2[252],_mm_xor_si128(c2[535],_mm_xor_si128(c2[2215],_mm_xor_si128(c2[672],_mm_xor_si128(c2[113],_mm_xor_si128(c2[2073],_mm_xor_si128(c2[131],_mm_xor_si128(c2[1671],_mm_xor_si128(c2[1811],_mm_xor_si128(c2[1112],_mm_xor_si128(c2[546],_mm_xor_si128(c2[1951],c2[1392])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[63]=simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[1542],simde_mm_xor_si128(c2[1682],simde_mm_xor_si128(c2[1820],simde_mm_xor_si128(c2[1261],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[1680],simde_mm_xor_si128(c2[858],simde_mm_xor_si128(c2[159],simde_mm_xor_si128(c2[299],simde_mm_xor_si128(c2[1559],simde_mm_xor_si128(c2[1000],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[1836],simde_mm_xor_si128(c2[294],simde_mm_xor_si128(c2[2133],simde_mm_xor_si128(c2[1434],simde_mm_xor_si128(c2[1574],simde_mm_xor_si128(c2[1992],simde_mm_xor_si128(c2[1433],simde_mm_xor_si128(c2[1863],simde_mm_xor_si128(c2[1164],simde_mm_xor_si128(c2[1304],simde_mm_xor_si128(c2[602],simde_mm_xor_si128(c2[43],simde_mm_xor_si128(c2[882],simde_mm_xor_si128(c2[323],simde_mm_xor_si128(c2[1600],simde_mm_xor_si128(c2[1041],simde_mm_xor_si128(c2[1038],simde_mm_xor_si128(c2[479],simde_mm_xor_si128(c2[1742],simde_mm_xor_si128(c2[1176],simde_mm_xor_si128(c2[630],simde_mm_xor_si128(c2[71],simde_mm_xor_si128(c2[635],simde_mm_xor_si128(c2[76],simde_mm_xor_si128(c2[1067],simde_mm_xor_si128(c2[368],simde_mm_xor_si128(c2[508],simde_mm_xor_si128(c2[1628],simde_mm_xor_si128(c2[1069],simde_mm_xor_si128(c2[649],simde_mm_xor_si128(c2[90],simde_mm_xor_si128(c2[101],simde_mm_xor_si128(c2[1781],simde_mm_xor_si128(c2[942],simde_mm_xor_si128(c2[383],simde_mm_xor_si128(c2[818],simde_mm_xor_si128(c2[252],simde_mm_xor_si128(c2[535],simde_mm_xor_si128(c2[2215],simde_mm_xor_si128(c2[672],simde_mm_xor_si128(c2[113],simde_mm_xor_si128(c2[2073],simde_mm_xor_si128(c2[131],simde_mm_xor_si128(c2[1671],simde_mm_xor_si128(c2[1811],simde_mm_xor_si128(c2[1112],simde_mm_xor_si128(c2[546],simde_mm_xor_si128(c2[1951],c2[1392])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 10
-     d2[70]=_mm_xor_si128(c2[1963],_mm_xor_si128(c2[856],_mm_xor_si128(c2[1204],c2[1221])));
+     d2[70]=simde_mm_xor_si128(c2[1963],simde_mm_xor_si128(c2[856],simde_mm_xor_si128(c2[1204],c2[1221])));
 
 //row: 11
-     d2[77]=_mm_xor_si128(c2[1683],_mm_xor_si128(c2[1262],_mm_xor_si128(c2[1681],_mm_xor_si128(c2[426],_mm_xor_si128(c2[300],_mm_xor_si128(c2[994],_mm_xor_si128(c2[1697],_mm_xor_si128(c2[1837],_mm_xor_si128(c2[1568],_mm_xor_si128(c2[1294],_mm_xor_si128(c2[1434],_mm_xor_si128(c2[1305],_mm_xor_si128(c2[44],_mm_xor_si128(c2[324],_mm_xor_si128(c2[1042],_mm_xor_si128(c2[480],_mm_xor_si128(c2[1037],_mm_xor_si128(c2[1177],_mm_xor_si128(c2[72],_mm_xor_si128(c2[2176],_mm_xor_si128(c2[70],_mm_xor_si128(c2[509],_mm_xor_si128(c2[1070],_mm_xor_si128(c2[2190],_mm_xor_si128(c2[84],_mm_xor_si128(c2[1782],_mm_xor_si128(c2[244],_mm_xor_si128(c2[384],_mm_xor_si128(c2[1499],_mm_xor_si128(c2[253],_mm_xor_si128(c2[2216],_mm_xor_si128(c2[2213],_mm_xor_si128(c2[114],_mm_xor_si128(c2[1812],_mm_xor_si128(c2[547],_mm_xor_si128(c2[1246],_mm_xor_si128(c2[1386],c2[2092])))))))))))))))))))))))))))))))))))));
+     d2[77]=simde_mm_xor_si128(c2[1683],simde_mm_xor_si128(c2[1262],simde_mm_xor_si128(c2[1681],simde_mm_xor_si128(c2[426],simde_mm_xor_si128(c2[300],simde_mm_xor_si128(c2[994],simde_mm_xor_si128(c2[1697],simde_mm_xor_si128(c2[1837],simde_mm_xor_si128(c2[1568],simde_mm_xor_si128(c2[1294],simde_mm_xor_si128(c2[1434],simde_mm_xor_si128(c2[1305],simde_mm_xor_si128(c2[44],simde_mm_xor_si128(c2[324],simde_mm_xor_si128(c2[1042],simde_mm_xor_si128(c2[480],simde_mm_xor_si128(c2[1037],simde_mm_xor_si128(c2[1177],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[2176],simde_mm_xor_si128(c2[70],simde_mm_xor_si128(c2[509],simde_mm_xor_si128(c2[1070],simde_mm_xor_si128(c2[2190],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[1782],simde_mm_xor_si128(c2[244],simde_mm_xor_si128(c2[384],simde_mm_xor_si128(c2[1499],simde_mm_xor_si128(c2[253],simde_mm_xor_si128(c2[2216],simde_mm_xor_si128(c2[2213],simde_mm_xor_si128(c2[114],simde_mm_xor_si128(c2[1812],simde_mm_xor_si128(c2[547],simde_mm_xor_si128(c2[1246],simde_mm_xor_si128(c2[1386],c2[2092])))))))))))))))))))))))))))))))))))));
 
 //row: 12
-     d2[84]=_mm_xor_si128(c2[704],_mm_xor_si128(c2[844],_mm_xor_si128(c2[423],_mm_xor_si128(c2[842],_mm_xor_si128(c2[1560],_mm_xor_si128(c2[1700],_mm_xor_si128(c2[155],_mm_xor_si128(c2[998],_mm_xor_si128(c2[1976],_mm_xor_si128(c2[589],_mm_xor_si128(c2[729],_mm_xor_si128(c2[588],_mm_xor_si128(c2[326],_mm_xor_si128(c2[466],_mm_xor_si128(c2[1444],_mm_xor_si128(c2[1724],_mm_xor_si128(c2[1308],_mm_xor_si128(c2[196],_mm_xor_si128(c2[1880],_mm_xor_si128(c2[338],_mm_xor_si128(c2[1472],_mm_xor_si128(c2[1470],_mm_xor_si128(c2[1769],_mm_xor_si128(c2[1909],_mm_xor_si128(c2[224],_mm_xor_si128(c2[1484],_mm_xor_si128(c2[943],_mm_xor_si128(c2[1784],_mm_xor_si128(c2[1653],_mm_xor_si128(c2[1377],_mm_xor_si128(c2[1514],_mm_xor_si128(c2[826],_mm_xor_si128(c2[966],_mm_xor_si128(c2[1947],c2[547]))))))))))))))))))))))))))))))))));
+     d2[84]=simde_mm_xor_si128(c2[704],simde_mm_xor_si128(c2[844],simde_mm_xor_si128(c2[423],simde_mm_xor_si128(c2[842],simde_mm_xor_si128(c2[1560],simde_mm_xor_si128(c2[1700],simde_mm_xor_si128(c2[155],simde_mm_xor_si128(c2[998],simde_mm_xor_si128(c2[1976],simde_mm_xor_si128(c2[589],simde_mm_xor_si128(c2[729],simde_mm_xor_si128(c2[588],simde_mm_xor_si128(c2[326],simde_mm_xor_si128(c2[466],simde_mm_xor_si128(c2[1444],simde_mm_xor_si128(c2[1724],simde_mm_xor_si128(c2[1308],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[1880],simde_mm_xor_si128(c2[338],simde_mm_xor_si128(c2[1472],simde_mm_xor_si128(c2[1470],simde_mm_xor_si128(c2[1769],simde_mm_xor_si128(c2[1909],simde_mm_xor_si128(c2[224],simde_mm_xor_si128(c2[1484],simde_mm_xor_si128(c2[943],simde_mm_xor_si128(c2[1784],simde_mm_xor_si128(c2[1653],simde_mm_xor_si128(c2[1377],simde_mm_xor_si128(c2[1514],simde_mm_xor_si128(c2[826],simde_mm_xor_si128(c2[966],simde_mm_xor_si128(c2[1947],c2[547]))))))))))))))))))))))))))))))))));
 
 //row: 13
-     d2[91]=_mm_xor_si128(c2[706],_mm_xor_si128(c2[285],_mm_xor_si128(c2[704],_mm_xor_si128(c2[702],_mm_xor_si128(c2[1555],_mm_xor_si128(c2[17],_mm_xor_si128(c2[720],_mm_xor_si128(c2[860],_mm_xor_si128(c2[154],_mm_xor_si128(c2[591],_mm_xor_si128(c2[310],_mm_xor_si128(c2[450],_mm_xor_si128(c2[328],_mm_xor_si128(c2[1306],_mm_xor_si128(c2[1586],_mm_xor_si128(c2[58],_mm_xor_si128(c2[1742],_mm_xor_si128(c2[60],_mm_xor_si128(c2[200],_mm_xor_si128(c2[1334],_mm_xor_si128(c2[1192],_mm_xor_si128(c2[1332],_mm_xor_si128(c2[1764],_mm_xor_si128(c2[86],_mm_xor_si128(c2[1206],_mm_xor_si128(c2[1346],_mm_xor_si128(c2[798],_mm_xor_si128(c2[1499],_mm_xor_si128(c2[1639],_mm_xor_si128(c2[1515],_mm_xor_si128(c2[1232],_mm_xor_si128(c2[1236],_mm_xor_si128(c2[1376],_mm_xor_si128(c2[2213],_mm_xor_si128(c2[828],_mm_xor_si128(c2[1809],_mm_xor_si128(c2[269],c2[409])))))))))))))))))))))))))))))))))))));
+     d2[91]=simde_mm_xor_si128(c2[706],simde_mm_xor_si128(c2[285],simde_mm_xor_si128(c2[704],simde_mm_xor_si128(c2[702],simde_mm_xor_si128(c2[1555],simde_mm_xor_si128(c2[17],simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[860],simde_mm_xor_si128(c2[154],simde_mm_xor_si128(c2[591],simde_mm_xor_si128(c2[310],simde_mm_xor_si128(c2[450],simde_mm_xor_si128(c2[328],simde_mm_xor_si128(c2[1306],simde_mm_xor_si128(c2[1586],simde_mm_xor_si128(c2[58],simde_mm_xor_si128(c2[1742],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[200],simde_mm_xor_si128(c2[1334],simde_mm_xor_si128(c2[1192],simde_mm_xor_si128(c2[1332],simde_mm_xor_si128(c2[1764],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[1206],simde_mm_xor_si128(c2[1346],simde_mm_xor_si128(c2[798],simde_mm_xor_si128(c2[1499],simde_mm_xor_si128(c2[1639],simde_mm_xor_si128(c2[1515],simde_mm_xor_si128(c2[1232],simde_mm_xor_si128(c2[1236],simde_mm_xor_si128(c2[1376],simde_mm_xor_si128(c2[2213],simde_mm_xor_si128(c2[828],simde_mm_xor_si128(c2[1809],simde_mm_xor_si128(c2[269],c2[409])))))))))))))))))))))))))))))))))))));
 
 //row: 14
-     d2[98]=_mm_xor_si128(c2[0],_mm_xor_si128(c2[140],_mm_xor_si128(c2[1824],_mm_xor_si128(c2[1965],_mm_xor_si128(c2[1403],_mm_xor_si128(c2[145],_mm_xor_si128(c2[1822],_mm_xor_si128(c2[856],_mm_xor_si128(c2[996],_mm_xor_si128(c2[434],_mm_xor_si128(c2[1697],_mm_xor_si128(c2[1135],_mm_xor_si128(c2[294],_mm_xor_si128(c2[1838],_mm_xor_si128(c2[1978],_mm_xor_si128(c2[576],_mm_xor_si128(c2[2131],_mm_xor_si128(c2[32],_mm_xor_si128(c2[1709],_mm_xor_si128(c2[2130],_mm_xor_si128(c2[1428],_mm_xor_si128(c2[1568],_mm_xor_si128(c2[1868],_mm_xor_si128(c2[2008],_mm_xor_si128(c2[1446],_mm_xor_si128(c2[747],_mm_xor_si128(c2[185],_mm_xor_si128(c2[1027],_mm_xor_si128(c2[465],_mm_xor_si128(c2[1738],_mm_xor_si128(c2[1176],_mm_xor_si128(c2[1176],_mm_xor_si128(c2[621],_mm_xor_si128(c2[1880],_mm_xor_si128(c2[1178],_mm_xor_si128(c2[1318],_mm_xor_si128(c2[775],_mm_xor_si128(c2[213],_mm_xor_si128(c2[773],_mm_xor_si128(c2[71],_mm_xor_si128(c2[211],_mm_xor_si128(c2[1065],_mm_xor_si128(c2[1205],_mm_xor_si128(c2[650],_mm_xor_si128(c2[1766],_mm_xor_si128(c2[1204],_mm_xor_si128(c2[787],_mm_xor_si128(c2[85],_mm_xor_si128(c2[225],_mm_xor_si128(c2[2189],_mm_xor_si128(c2[239],_mm_xor_si128(c2[1923],_mm_xor_si128(c2[1080],_mm_xor_si128(c2[378],_mm_xor_si128(c2[518],_mm_xor_si128(c2[956],_mm_xor_si128(c2[394],_mm_xor_si128(c2[673],_mm_xor_si128(c2[118],_mm_xor_si128(c2[817],_mm_xor_si128(c2[115],_mm_xor_si128(c2[255],_mm_xor_si128(c2[129],_mm_xor_si128(c2[269],_mm_xor_si128(c2[1946],_mm_xor_si128(c2[1250],_mm_xor_si128(c2[688],_mm_xor_si128(c2[2089],_mm_xor_si128(c2[1387],c2[1527])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[98]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[140],simde_mm_xor_si128(c2[1824],simde_mm_xor_si128(c2[1965],simde_mm_xor_si128(c2[1403],simde_mm_xor_si128(c2[145],simde_mm_xor_si128(c2[1822],simde_mm_xor_si128(c2[856],simde_mm_xor_si128(c2[996],simde_mm_xor_si128(c2[434],simde_mm_xor_si128(c2[1697],simde_mm_xor_si128(c2[1135],simde_mm_xor_si128(c2[294],simde_mm_xor_si128(c2[1838],simde_mm_xor_si128(c2[1978],simde_mm_xor_si128(c2[576],simde_mm_xor_si128(c2[2131],simde_mm_xor_si128(c2[32],simde_mm_xor_si128(c2[1709],simde_mm_xor_si128(c2[2130],simde_mm_xor_si128(c2[1428],simde_mm_xor_si128(c2[1568],simde_mm_xor_si128(c2[1868],simde_mm_xor_si128(c2[2008],simde_mm_xor_si128(c2[1446],simde_mm_xor_si128(c2[747],simde_mm_xor_si128(c2[185],simde_mm_xor_si128(c2[1027],simde_mm_xor_si128(c2[465],simde_mm_xor_si128(c2[1738],simde_mm_xor_si128(c2[1176],simde_mm_xor_si128(c2[1176],simde_mm_xor_si128(c2[621],simde_mm_xor_si128(c2[1880],simde_mm_xor_si128(c2[1178],simde_mm_xor_si128(c2[1318],simde_mm_xor_si128(c2[775],simde_mm_xor_si128(c2[213],simde_mm_xor_si128(c2[773],simde_mm_xor_si128(c2[71],simde_mm_xor_si128(c2[211],simde_mm_xor_si128(c2[1065],simde_mm_xor_si128(c2[1205],simde_mm_xor_si128(c2[650],simde_mm_xor_si128(c2[1766],simde_mm_xor_si128(c2[1204],simde_mm_xor_si128(c2[787],simde_mm_xor_si128(c2[85],simde_mm_xor_si128(c2[225],simde_mm_xor_si128(c2[2189],simde_mm_xor_si128(c2[239],simde_mm_xor_si128(c2[1923],simde_mm_xor_si128(c2[1080],simde_mm_xor_si128(c2[378],simde_mm_xor_si128(c2[518],simde_mm_xor_si128(c2[956],simde_mm_xor_si128(c2[394],simde_mm_xor_si128(c2[673],simde_mm_xor_si128(c2[118],simde_mm_xor_si128(c2[817],simde_mm_xor_si128(c2[115],simde_mm_xor_si128(c2[255],simde_mm_xor_si128(c2[129],simde_mm_xor_si128(c2[269],simde_mm_xor_si128(c2[1946],simde_mm_xor_si128(c2[1250],simde_mm_xor_si128(c2[688],simde_mm_xor_si128(c2[2089],simde_mm_xor_si128(c2[1387],c2[1527])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 15
-     d2[105]=_mm_xor_si128(c2[3],_mm_xor_si128(c2[844],_mm_xor_si128(c2[984],_mm_xor_si128(c2[1821],_mm_xor_si128(c2[563],_mm_xor_si128(c2[1],_mm_xor_si128(c2[982],_mm_xor_si128(c2[560],_mm_xor_si128(c2[859],_mm_xor_si128(c2[1700],_mm_xor_si128(c2[1840],_mm_xor_si128(c2[1560],_mm_xor_si128(c2[295],_mm_xor_si128(c2[157],_mm_xor_si128(c2[1138],_mm_xor_si128(c2[2134],_mm_xor_si128(c2[729],_mm_xor_si128(c2[869],_mm_xor_si128(c2[1993],_mm_xor_si128(c2[728],_mm_xor_si128(c2[1864],_mm_xor_si128(c2[466],_mm_xor_si128(c2[606],_mm_xor_si128(c2[603],_mm_xor_si128(c2[1584],_mm_xor_si128(c2[883],_mm_xor_si128(c2[1864],_mm_xor_si128(c2[1601],_mm_xor_si128(c2[336],_mm_xor_si128(c2[1039],_mm_xor_si128(c2[2020],_mm_xor_si128(c2[1736],_mm_xor_si128(c2[478],_mm_xor_si128(c2[631],_mm_xor_si128(c2[1612],_mm_xor_si128(c2[636],_mm_xor_si128(c2[1610],_mm_xor_si128(c2[1068],_mm_xor_si128(c2[1909],_mm_xor_si128(c2[2049],_mm_xor_si128(c2[1629],_mm_xor_si128(c2[364],_mm_xor_si128(c2[650],_mm_xor_si128(c2[1624],_mm_xor_si128(c2[102],_mm_xor_si128(c2[1083],_mm_xor_si128(c2[943],_mm_xor_si128(c2[1924],_mm_xor_si128(c2[812],_mm_xor_si128(c2[1793],_mm_xor_si128(c2[536],_mm_xor_si128(c2[1517],_mm_xor_si128(c2[673],_mm_xor_si128(c2[1654],_mm_xor_si128(c2[132],_mm_xor_si128(c2[966],_mm_xor_si128(c2[1106],_mm_xor_si128(c2[1106],_mm_xor_si128(c2[2087],_mm_xor_si128(c2[1952],c2[687]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[105]=simde_mm_xor_si128(c2[3],simde_mm_xor_si128(c2[844],simde_mm_xor_si128(c2[984],simde_mm_xor_si128(c2[1821],simde_mm_xor_si128(c2[563],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[982],simde_mm_xor_si128(c2[560],simde_mm_xor_si128(c2[859],simde_mm_xor_si128(c2[1700],simde_mm_xor_si128(c2[1840],simde_mm_xor_si128(c2[1560],simde_mm_xor_si128(c2[295],simde_mm_xor_si128(c2[157],simde_mm_xor_si128(c2[1138],simde_mm_xor_si128(c2[2134],simde_mm_xor_si128(c2[729],simde_mm_xor_si128(c2[869],simde_mm_xor_si128(c2[1993],simde_mm_xor_si128(c2[728],simde_mm_xor_si128(c2[1864],simde_mm_xor_si128(c2[466],simde_mm_xor_si128(c2[606],simde_mm_xor_si128(c2[603],simde_mm_xor_si128(c2[1584],simde_mm_xor_si128(c2[883],simde_mm_xor_si128(c2[1864],simde_mm_xor_si128(c2[1601],simde_mm_xor_si128(c2[336],simde_mm_xor_si128(c2[1039],simde_mm_xor_si128(c2[2020],simde_mm_xor_si128(c2[1736],simde_mm_xor_si128(c2[478],simde_mm_xor_si128(c2[631],simde_mm_xor_si128(c2[1612],simde_mm_xor_si128(c2[636],simde_mm_xor_si128(c2[1610],simde_mm_xor_si128(c2[1068],simde_mm_xor_si128(c2[1909],simde_mm_xor_si128(c2[2049],simde_mm_xor_si128(c2[1629],simde_mm_xor_si128(c2[364],simde_mm_xor_si128(c2[650],simde_mm_xor_si128(c2[1624],simde_mm_xor_si128(c2[102],simde_mm_xor_si128(c2[1083],simde_mm_xor_si128(c2[943],simde_mm_xor_si128(c2[1924],simde_mm_xor_si128(c2[812],simde_mm_xor_si128(c2[1793],simde_mm_xor_si128(c2[536],simde_mm_xor_si128(c2[1517],simde_mm_xor_si128(c2[673],simde_mm_xor_si128(c2[1654],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[966],simde_mm_xor_si128(c2[1106],simde_mm_xor_si128(c2[1106],simde_mm_xor_si128(c2[2087],simde_mm_xor_si128(c2[1952],c2[687]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 16
-     d2[112]=_mm_xor_si128(c2[1965],_mm_xor_si128(c2[2105],_mm_xor_si128(c2[1263],_mm_xor_si128(c2[1403],_mm_xor_si128(c2[1684],_mm_xor_si128(c2[842],_mm_xor_si128(c2[982],_mm_xor_si128(c2[2103],_mm_xor_si128(c2[1401],_mm_xor_si128(c2[575],_mm_xor_si128(c2[715],_mm_xor_si128(c2[2119],_mm_xor_si128(c2[20],_mm_xor_si128(c2[1416],_mm_xor_si128(c2[714],_mm_xor_si128(c2[20],_mm_xor_si128(c2[1557],_mm_xor_si128(c2[1558],_mm_xor_si128(c2[1850],_mm_xor_si128(c2[1990],_mm_xor_si128(c2[1148],_mm_xor_si128(c2[1288],_mm_xor_si128(c2[1849],_mm_xor_si128(c2[1154],_mm_xor_si128(c2[1587],_mm_xor_si128(c2[1727],_mm_xor_si128(c2[885],_mm_xor_si128(c2[1025],_mm_xor_si128(c2[466],_mm_xor_si128(c2[1863],_mm_xor_si128(c2[2003],_mm_xor_si128(c2[746],_mm_xor_si128(c2[44],_mm_xor_si128(c2[1457],_mm_xor_si128(c2[622],_mm_xor_si128(c2[762],_mm_xor_si128(c2[902],_mm_xor_si128(c2[200],_mm_xor_si128(c2[1599],_mm_xor_si128(c2[897],_mm_xor_si128(c2[494],_mm_xor_si128(c2[1891],_mm_xor_si128(c2[2031],_mm_xor_si128(c2[492],_mm_xor_si128(c2[2036],_mm_xor_si128(c2[784],_mm_xor_si128(c2[924],_mm_xor_si128(c2[89],_mm_xor_si128(c2[229],_mm_xor_si128(c2[1485],_mm_xor_si128(c2[650],_mm_xor_si128(c2[790],_mm_xor_si128(c2[506],_mm_xor_si128(c2[2050],_mm_xor_si128(c2[2204],_mm_xor_si128(c2[1362],_mm_xor_si128(c2[1502],_mm_xor_si128(c2[799],_mm_xor_si128(c2[104],_mm_xor_si128(c2[675],_mm_xor_si128(c2[2072],_mm_xor_si128(c2[2212],_mm_xor_si128(c2[392],_mm_xor_si128(c2[1936],_mm_xor_si128(c2[536],_mm_xor_si128(c2[2073],_mm_xor_si128(c2[2087],_mm_xor_si128(c2[2227],_mm_xor_si128(c2[1392],_mm_xor_si128(c2[1532],_mm_xor_si128(c2[969],_mm_xor_si128(c2[127],_mm_xor_si128(c2[267],_mm_xor_si128(c2[1808],_mm_xor_si128(c2[1106],c2[2088])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[112]=simde_mm_xor_si128(c2[1965],simde_mm_xor_si128(c2[2105],simde_mm_xor_si128(c2[1263],simde_mm_xor_si128(c2[1403],simde_mm_xor_si128(c2[1684],simde_mm_xor_si128(c2[842],simde_mm_xor_si128(c2[982],simde_mm_xor_si128(c2[2103],simde_mm_xor_si128(c2[1401],simde_mm_xor_si128(c2[575],simde_mm_xor_si128(c2[715],simde_mm_xor_si128(c2[2119],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[1416],simde_mm_xor_si128(c2[714],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[1557],simde_mm_xor_si128(c2[1558],simde_mm_xor_si128(c2[1850],simde_mm_xor_si128(c2[1990],simde_mm_xor_si128(c2[1148],simde_mm_xor_si128(c2[1288],simde_mm_xor_si128(c2[1849],simde_mm_xor_si128(c2[1154],simde_mm_xor_si128(c2[1587],simde_mm_xor_si128(c2[1727],simde_mm_xor_si128(c2[885],simde_mm_xor_si128(c2[1025],simde_mm_xor_si128(c2[466],simde_mm_xor_si128(c2[1863],simde_mm_xor_si128(c2[2003],simde_mm_xor_si128(c2[746],simde_mm_xor_si128(c2[44],simde_mm_xor_si128(c2[1457],simde_mm_xor_si128(c2[622],simde_mm_xor_si128(c2[762],simde_mm_xor_si128(c2[902],simde_mm_xor_si128(c2[200],simde_mm_xor_si128(c2[1599],simde_mm_xor_si128(c2[897],simde_mm_xor_si128(c2[494],simde_mm_xor_si128(c2[1891],simde_mm_xor_si128(c2[2031],simde_mm_xor_si128(c2[492],simde_mm_xor_si128(c2[2036],simde_mm_xor_si128(c2[784],simde_mm_xor_si128(c2[924],simde_mm_xor_si128(c2[89],simde_mm_xor_si128(c2[229],simde_mm_xor_si128(c2[1485],simde_mm_xor_si128(c2[650],simde_mm_xor_si128(c2[790],simde_mm_xor_si128(c2[506],simde_mm_xor_si128(c2[2050],simde_mm_xor_si128(c2[2204],simde_mm_xor_si128(c2[1362],simde_mm_xor_si128(c2[1502],simde_mm_xor_si128(c2[799],simde_mm_xor_si128(c2[104],simde_mm_xor_si128(c2[675],simde_mm_xor_si128(c2[2072],simde_mm_xor_si128(c2[2212],simde_mm_xor_si128(c2[392],simde_mm_xor_si128(c2[1936],simde_mm_xor_si128(c2[536],simde_mm_xor_si128(c2[2073],simde_mm_xor_si128(c2[2087],simde_mm_xor_si128(c2[2227],simde_mm_xor_si128(c2[1392],simde_mm_xor_si128(c2[1532],simde_mm_xor_si128(c2[969],simde_mm_xor_si128(c2[127],simde_mm_xor_si128(c2[267],simde_mm_xor_si128(c2[1808],simde_mm_xor_si128(c2[1106],c2[2088])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 17
-     d2[119]=_mm_xor_si128(c2[843],_mm_xor_si128(c2[983],_mm_xor_si128(c2[1404],_mm_xor_si128(c2[1544],_mm_xor_si128(c2[562],_mm_xor_si128(c2[983],_mm_xor_si128(c2[1123],_mm_xor_si128(c2[981],_mm_xor_si128(c2[1542],_mm_xor_si128(c2[1699],_mm_xor_si128(c2[1839],_mm_xor_si128(c2[14],_mm_xor_si128(c2[154],_mm_xor_si128(c2[294],_mm_xor_si128(c2[855],_mm_xor_si128(c2[1137],_mm_xor_si128(c2[1698],_mm_xor_si128(c2[17],_mm_xor_si128(c2[728],_mm_xor_si128(c2[868],_mm_xor_si128(c2[1289],_mm_xor_si128(c2[1429],_mm_xor_si128(c2[734],_mm_xor_si128(c2[1288],_mm_xor_si128(c2[465],_mm_xor_si128(c2[605],_mm_xor_si128(c2[1026],_mm_xor_si128(c2[1166],_mm_xor_si128(c2[1583],_mm_xor_si128(c2[2004],_mm_xor_si128(c2[2144],_mm_xor_si128(c2[1863],_mm_xor_si128(c2[185],_mm_xor_si128(c2[342],_mm_xor_si128(c2[756],_mm_xor_si128(c2[896],_mm_xor_si128(c2[2019],_mm_xor_si128(c2[341],_mm_xor_si128(c2[477],_mm_xor_si128(c2[1038],_mm_xor_si128(c2[1611],_mm_xor_si128(c2[2032],_mm_xor_si128(c2[2172],_mm_xor_si128(c2[1616],_mm_xor_si128(c2[2170],_mm_xor_si128(c2[631],_mm_xor_si128(c2[1908],_mm_xor_si128(c2[2048],_mm_xor_si128(c2[230],_mm_xor_si128(c2[370],_mm_xor_si128(c2[370],_mm_xor_si128(c2[784],_mm_xor_si128(c2[924],_mm_xor_si128(c2[1630],_mm_xor_si128(c2[2184],_mm_xor_si128(c2[1082],_mm_xor_si128(c2[1503],_mm_xor_si128(c2[1643],_mm_xor_si128(c2[1923],_mm_xor_si128(c2[238],_mm_xor_si128(c2[1792],_mm_xor_si128(c2[2213],_mm_xor_si128(c2[114],_mm_xor_si128(c2[1516],_mm_xor_si128(c2[2077],_mm_xor_si128(c2[1653],_mm_xor_si128(c2[2214],_mm_xor_si128(c2[972],_mm_xor_si128(c2[1112],_mm_xor_si128(c2[1526],_mm_xor_si128(c2[1666],_mm_xor_si128(c2[2086],_mm_xor_si128(c2[268],_mm_xor_si128(c2[408],_mm_xor_si128(c2[686],c2[1247])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[119]=simde_mm_xor_si128(c2[843],simde_mm_xor_si128(c2[983],simde_mm_xor_si128(c2[1404],simde_mm_xor_si128(c2[1544],simde_mm_xor_si128(c2[562],simde_mm_xor_si128(c2[983],simde_mm_xor_si128(c2[1123],simde_mm_xor_si128(c2[981],simde_mm_xor_si128(c2[1542],simde_mm_xor_si128(c2[1699],simde_mm_xor_si128(c2[1839],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[154],simde_mm_xor_si128(c2[294],simde_mm_xor_si128(c2[855],simde_mm_xor_si128(c2[1137],simde_mm_xor_si128(c2[1698],simde_mm_xor_si128(c2[17],simde_mm_xor_si128(c2[728],simde_mm_xor_si128(c2[868],simde_mm_xor_si128(c2[1289],simde_mm_xor_si128(c2[1429],simde_mm_xor_si128(c2[734],simde_mm_xor_si128(c2[1288],simde_mm_xor_si128(c2[465],simde_mm_xor_si128(c2[605],simde_mm_xor_si128(c2[1026],simde_mm_xor_si128(c2[1166],simde_mm_xor_si128(c2[1583],simde_mm_xor_si128(c2[2004],simde_mm_xor_si128(c2[2144],simde_mm_xor_si128(c2[1863],simde_mm_xor_si128(c2[185],simde_mm_xor_si128(c2[342],simde_mm_xor_si128(c2[756],simde_mm_xor_si128(c2[896],simde_mm_xor_si128(c2[2019],simde_mm_xor_si128(c2[341],simde_mm_xor_si128(c2[477],simde_mm_xor_si128(c2[1038],simde_mm_xor_si128(c2[1611],simde_mm_xor_si128(c2[2032],simde_mm_xor_si128(c2[2172],simde_mm_xor_si128(c2[1616],simde_mm_xor_si128(c2[2170],simde_mm_xor_si128(c2[631],simde_mm_xor_si128(c2[1908],simde_mm_xor_si128(c2[2048],simde_mm_xor_si128(c2[230],simde_mm_xor_si128(c2[370],simde_mm_xor_si128(c2[370],simde_mm_xor_si128(c2[784],simde_mm_xor_si128(c2[924],simde_mm_xor_si128(c2[1630],simde_mm_xor_si128(c2[2184],simde_mm_xor_si128(c2[1082],simde_mm_xor_si128(c2[1503],simde_mm_xor_si128(c2[1643],simde_mm_xor_si128(c2[1923],simde_mm_xor_si128(c2[238],simde_mm_xor_si128(c2[1792],simde_mm_xor_si128(c2[2213],simde_mm_xor_si128(c2[114],simde_mm_xor_si128(c2[1516],simde_mm_xor_si128(c2[2077],simde_mm_xor_si128(c2[1653],simde_mm_xor_si128(c2[2214],simde_mm_xor_si128(c2[972],simde_mm_xor_si128(c2[1112],simde_mm_xor_si128(c2[1526],simde_mm_xor_si128(c2[1666],simde_mm_xor_si128(c2[2086],simde_mm_xor_si128(c2[268],simde_mm_xor_si128(c2[408],simde_mm_xor_si128(c2[686],c2[1247])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 18
-     d2[126]=_mm_xor_si128(c2[564],_mm_xor_si128(c2[85],c2[1780]));
+     d2[126]=simde_mm_xor_si128(c2[564],simde_mm_xor_si128(c2[85],c2[1780]));
 
 //row: 19
-     d2[133]=_mm_xor_si128(c2[1962],_mm_xor_si128(c2[1541],_mm_xor_si128(c2[1960],_mm_xor_si128(c2[422],_mm_xor_si128(c2[579],_mm_xor_si128(c2[1280],_mm_xor_si128(c2[2116],_mm_xor_si128(c2[1415],_mm_xor_si128(c2[1854],_mm_xor_si128(c2[1713],_mm_xor_si128(c2[1584],_mm_xor_si128(c2[323],_mm_xor_si128(c2[603],_mm_xor_si128(c2[1321],_mm_xor_si128(c2[759],_mm_xor_si128(c2[1456],_mm_xor_si128(c2[351],_mm_xor_si128(c2[356],_mm_xor_si128(c2[788],_mm_xor_si128(c2[1349],_mm_xor_si128(c2[370],_mm_xor_si128(c2[2061],_mm_xor_si128(c2[663],_mm_xor_si128(c2[532],_mm_xor_si128(c2[256],_mm_xor_si128(c2[393],_mm_xor_si128(c2[2091],_mm_xor_si128(c2[826],c2[1672]))))))))))))))))))))))))))));
+     d2[133]=simde_mm_xor_si128(c2[1962],simde_mm_xor_si128(c2[1541],simde_mm_xor_si128(c2[1960],simde_mm_xor_si128(c2[422],simde_mm_xor_si128(c2[579],simde_mm_xor_si128(c2[1280],simde_mm_xor_si128(c2[2116],simde_mm_xor_si128(c2[1415],simde_mm_xor_si128(c2[1854],simde_mm_xor_si128(c2[1713],simde_mm_xor_si128(c2[1584],simde_mm_xor_si128(c2[323],simde_mm_xor_si128(c2[603],simde_mm_xor_si128(c2[1321],simde_mm_xor_si128(c2[759],simde_mm_xor_si128(c2[1456],simde_mm_xor_si128(c2[351],simde_mm_xor_si128(c2[356],simde_mm_xor_si128(c2[788],simde_mm_xor_si128(c2[1349],simde_mm_xor_si128(c2[370],simde_mm_xor_si128(c2[2061],simde_mm_xor_si128(c2[663],simde_mm_xor_si128(c2[532],simde_mm_xor_si128(c2[256],simde_mm_xor_si128(c2[393],simde_mm_xor_si128(c2[2091],simde_mm_xor_si128(c2[826],c2[1672]))))))))))))))))))))))))))));
 
 //row: 20
-     d2[140]=_mm_xor_si128(c2[706],_mm_xor_si128(c2[846],_mm_xor_si128(c2[425],_mm_xor_si128(c2[844],_mm_xor_si128(c2[1555],_mm_xor_si128(c2[1695],_mm_xor_si128(c2[157],_mm_xor_si128(c2[1000],_mm_xor_si128(c2[854],_mm_xor_si128(c2[591],_mm_xor_si128(c2[731],_mm_xor_si128(c2[590],_mm_xor_si128(c2[328],_mm_xor_si128(c2[468],_mm_xor_si128(c2[1446],_mm_xor_si128(c2[1726],_mm_xor_si128(c2[198],_mm_xor_si128(c2[1882],_mm_xor_si128(c2[340],_mm_xor_si128(c2[617],_mm_xor_si128(c2[1474],_mm_xor_si128(c2[1472],_mm_xor_si128(c2[1764],_mm_xor_si128(c2[1904],_mm_xor_si128(c2[226],_mm_xor_si128(c2[1486],_mm_xor_si128(c2[938],_mm_xor_si128(c2[1779],_mm_xor_si128(c2[1655],_mm_xor_si128(c2[1372],_mm_xor_si128(c2[1516],_mm_xor_si128(c2[828],_mm_xor_si128(c2[968],_mm_xor_si128(c2[1949],c2[549]))))))))))))))))))))))))))))))))));
+     d2[140]=simde_mm_xor_si128(c2[706],simde_mm_xor_si128(c2[846],simde_mm_xor_si128(c2[425],simde_mm_xor_si128(c2[844],simde_mm_xor_si128(c2[1555],simde_mm_xor_si128(c2[1695],simde_mm_xor_si128(c2[157],simde_mm_xor_si128(c2[1000],simde_mm_xor_si128(c2[854],simde_mm_xor_si128(c2[591],simde_mm_xor_si128(c2[731],simde_mm_xor_si128(c2[590],simde_mm_xor_si128(c2[328],simde_mm_xor_si128(c2[468],simde_mm_xor_si128(c2[1446],simde_mm_xor_si128(c2[1726],simde_mm_xor_si128(c2[198],simde_mm_xor_si128(c2[1882],simde_mm_xor_si128(c2[340],simde_mm_xor_si128(c2[617],simde_mm_xor_si128(c2[1474],simde_mm_xor_si128(c2[1472],simde_mm_xor_si128(c2[1764],simde_mm_xor_si128(c2[1904],simde_mm_xor_si128(c2[226],simde_mm_xor_si128(c2[1486],simde_mm_xor_si128(c2[938],simde_mm_xor_si128(c2[1779],simde_mm_xor_si128(c2[1655],simde_mm_xor_si128(c2[1372],simde_mm_xor_si128(c2[1516],simde_mm_xor_si128(c2[828],simde_mm_xor_si128(c2[968],simde_mm_xor_si128(c2[1949],c2[549]))))))))))))))))))))))))))))))))));
 
 //row: 21
-     d2[147]=_mm_xor_si128(c2[1402],_mm_xor_si128(c2[981],_mm_xor_si128(c2[1400],_mm_xor_si128(c2[5],_mm_xor_si128(c2[19],_mm_xor_si128(c2[720],_mm_xor_si128(c2[1416],_mm_xor_si128(c2[1556],_mm_xor_si128(c2[1294],_mm_xor_si128(c2[1013],_mm_xor_si128(c2[1153],_mm_xor_si128(c2[1024],_mm_xor_si128(c2[2002],_mm_xor_si128(c2[43],_mm_xor_si128(c2[761],_mm_xor_si128(c2[199],_mm_xor_si128(c2[756],_mm_xor_si128(c2[896],_mm_xor_si128(c2[2030],_mm_xor_si128(c2[1895],_mm_xor_si128(c2[2035],_mm_xor_si128(c2[228],_mm_xor_si128(c2[789],_mm_xor_si128(c2[1909],_mm_xor_si128(c2[2049],_mm_xor_si128(c2[1501],_mm_xor_si128(c2[2202],_mm_xor_si128(c2[103],_mm_xor_si128(c2[2218],_mm_xor_si128(c2[1935],_mm_xor_si128(c2[1932],_mm_xor_si128(c2[2072],_mm_xor_si128(c2[1654],_mm_xor_si128(c2[1531],_mm_xor_si128(c2[266],_mm_xor_si128(c2[972],c2[1112]))))))))))))))))))))))))))))))))))));
+     d2[147]=simde_mm_xor_si128(c2[1402],simde_mm_xor_si128(c2[981],simde_mm_xor_si128(c2[1400],simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[19],simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[1416],simde_mm_xor_si128(c2[1556],simde_mm_xor_si128(c2[1294],simde_mm_xor_si128(c2[1013],simde_mm_xor_si128(c2[1153],simde_mm_xor_si128(c2[1024],simde_mm_xor_si128(c2[2002],simde_mm_xor_si128(c2[43],simde_mm_xor_si128(c2[761],simde_mm_xor_si128(c2[199],simde_mm_xor_si128(c2[756],simde_mm_xor_si128(c2[896],simde_mm_xor_si128(c2[2030],simde_mm_xor_si128(c2[1895],simde_mm_xor_si128(c2[2035],simde_mm_xor_si128(c2[228],simde_mm_xor_si128(c2[789],simde_mm_xor_si128(c2[1909],simde_mm_xor_si128(c2[2049],simde_mm_xor_si128(c2[1501],simde_mm_xor_si128(c2[2202],simde_mm_xor_si128(c2[103],simde_mm_xor_si128(c2[2218],simde_mm_xor_si128(c2[1935],simde_mm_xor_si128(c2[1932],simde_mm_xor_si128(c2[2072],simde_mm_xor_si128(c2[1654],simde_mm_xor_si128(c2[1531],simde_mm_xor_si128(c2[266],simde_mm_xor_si128(c2[972],c2[1112]))))))))))))))))))))))))))))))))))));
 
 //row: 22
-     d2[154]=_mm_xor_si128(c2[157],c2[168]);
+     d2[154]=simde_mm_xor_si128(c2[157],c2[168]);
 
 //row: 23
-     d2[161]=_mm_xor_si128(c2[1682],_mm_xor_si128(c2[885],c2[911]));
+     d2[161]=simde_mm_xor_si128(c2[1682],simde_mm_xor_si128(c2[885],c2[911]));
 
 //row: 24
-     d2[168]=_mm_xor_si128(c2[1276],_mm_xor_si128(c2[1153],c2[2229]));
+     d2[168]=simde_mm_xor_si128(c2[1276],simde_mm_xor_si128(c2[1153],c2[2229]));
 
 //row: 25
-     d2[175]=_mm_xor_si128(c2[426],c2[2035]);
+     d2[175]=simde_mm_xor_si128(c2[426],c2[2035]);
 
 //row: 26
-     d2[182]=_mm_xor_si128(c2[704],_mm_xor_si128(c2[844],_mm_xor_si128(c2[2104],_mm_xor_si128(c2[283],_mm_xor_si128(c2[423],_mm_xor_si128(c2[1683],_mm_xor_si128(c2[842],_mm_xor_si128(c2[2102],_mm_xor_si128(c2[1560],_mm_xor_si128(c2[1700],_mm_xor_si128(c2[714],_mm_xor_si128(c2[155],_mm_xor_si128(c2[1415],_mm_xor_si128(c2[998],_mm_xor_si128(c2[2118],_mm_xor_si128(c2[19],_mm_xor_si128(c2[589],_mm_xor_si128(c2[729],_mm_xor_si128(c2[1989],_mm_xor_si128(c2[588],_mm_xor_si128(c2[1708],_mm_xor_si128(c2[1848],_mm_xor_si128(c2[1433],_mm_xor_si128(c2[326],_mm_xor_si128(c2[466],_mm_xor_si128(c2[1726],_mm_xor_si128(c2[1304],_mm_xor_si128(c2[1444],_mm_xor_si128(c2[465],_mm_xor_si128(c2[1724],_mm_xor_si128(c2[745],_mm_xor_si128(c2[56],_mm_xor_si128(c2[196],_mm_xor_si128(c2[1456],_mm_xor_si128(c2[1880],_mm_xor_si128(c2[901],_mm_xor_si128(c2[338],_mm_xor_si128(c2[1458],_mm_xor_si128(c2[1598],_mm_xor_si128(c2[1332],_mm_xor_si128(c2[1472],_mm_xor_si128(c2[493],_mm_xor_si128(c2[1470],_mm_xor_si128(c2[351],_mm_xor_si128(c2[491],_mm_xor_si128(c2[1769],_mm_xor_si128(c2[1909],_mm_xor_si128(c2[930],_mm_xor_si128(c2[84],_mm_xor_si128(c2[224],_mm_xor_si128(c2[1484],_mm_xor_si128(c2[1484],_mm_xor_si128(c2[365],_mm_xor_si128(c2[505],_mm_xor_si128(c2[803],_mm_xor_si128(c2[943],_mm_xor_si128(c2[2203],_mm_xor_si128(c2[1784],_mm_xor_si128(c2[658],_mm_xor_si128(c2[798],_mm_xor_si128(c2[938],_mm_xor_si128(c2[1513],_mm_xor_si128(c2[1653],_mm_xor_si128(c2[674],_mm_xor_si128(c2[1377],_mm_xor_si128(c2[398],_mm_xor_si128(c2[1514],_mm_xor_si128(c2[395],_mm_xor_si128(c2[535],_mm_xor_si128(c2[826],_mm_xor_si128(c2[966],_mm_xor_si128(c2[2226],_mm_xor_si128(c2[1807],_mm_xor_si128(c2[1947],_mm_xor_si128(c2[968],_mm_xor_si128(c2[547],_mm_xor_si128(c2[1667],c2[1807])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[182]=simde_mm_xor_si128(c2[704],simde_mm_xor_si128(c2[844],simde_mm_xor_si128(c2[2104],simde_mm_xor_si128(c2[283],simde_mm_xor_si128(c2[423],simde_mm_xor_si128(c2[1683],simde_mm_xor_si128(c2[842],simde_mm_xor_si128(c2[2102],simde_mm_xor_si128(c2[1560],simde_mm_xor_si128(c2[1700],simde_mm_xor_si128(c2[714],simde_mm_xor_si128(c2[155],simde_mm_xor_si128(c2[1415],simde_mm_xor_si128(c2[998],simde_mm_xor_si128(c2[2118],simde_mm_xor_si128(c2[19],simde_mm_xor_si128(c2[589],simde_mm_xor_si128(c2[729],simde_mm_xor_si128(c2[1989],simde_mm_xor_si128(c2[588],simde_mm_xor_si128(c2[1708],simde_mm_xor_si128(c2[1848],simde_mm_xor_si128(c2[1433],simde_mm_xor_si128(c2[326],simde_mm_xor_si128(c2[466],simde_mm_xor_si128(c2[1726],simde_mm_xor_si128(c2[1304],simde_mm_xor_si128(c2[1444],simde_mm_xor_si128(c2[465],simde_mm_xor_si128(c2[1724],simde_mm_xor_si128(c2[745],simde_mm_xor_si128(c2[56],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[1456],simde_mm_xor_si128(c2[1880],simde_mm_xor_si128(c2[901],simde_mm_xor_si128(c2[338],simde_mm_xor_si128(c2[1458],simde_mm_xor_si128(c2[1598],simde_mm_xor_si128(c2[1332],simde_mm_xor_si128(c2[1472],simde_mm_xor_si128(c2[493],simde_mm_xor_si128(c2[1470],simde_mm_xor_si128(c2[351],simde_mm_xor_si128(c2[491],simde_mm_xor_si128(c2[1769],simde_mm_xor_si128(c2[1909],simde_mm_xor_si128(c2[930],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[224],simde_mm_xor_si128(c2[1484],simde_mm_xor_si128(c2[1484],simde_mm_xor_si128(c2[365],simde_mm_xor_si128(c2[505],simde_mm_xor_si128(c2[803],simde_mm_xor_si128(c2[943],simde_mm_xor_si128(c2[2203],simde_mm_xor_si128(c2[1784],simde_mm_xor_si128(c2[658],simde_mm_xor_si128(c2[798],simde_mm_xor_si128(c2[938],simde_mm_xor_si128(c2[1513],simde_mm_xor_si128(c2[1653],simde_mm_xor_si128(c2[674],simde_mm_xor_si128(c2[1377],simde_mm_xor_si128(c2[398],simde_mm_xor_si128(c2[1514],simde_mm_xor_si128(c2[395],simde_mm_xor_si128(c2[535],simde_mm_xor_si128(c2[826],simde_mm_xor_si128(c2[966],simde_mm_xor_si128(c2[2226],simde_mm_xor_si128(c2[1807],simde_mm_xor_si128(c2[1947],simde_mm_xor_si128(c2[968],simde_mm_xor_si128(c2[547],simde_mm_xor_si128(c2[1667],c2[1807])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 27
-     d2[189]=_mm_xor_si128(c2[1541],c2[924]);
+     d2[189]=simde_mm_xor_si128(c2[1541],c2[924]);
 
 //row: 28
-     d2[196]=_mm_xor_si128(c2[1140],_mm_xor_si128(c2[594],c2[215]));
+     d2[196]=simde_mm_xor_si128(c2[1140],simde_mm_xor_si128(c2[594],c2[215]));
 
 //row: 29
-     d2[203]=_mm_xor_si128(c2[1686],c2[1879]);
+     d2[203]=simde_mm_xor_si128(c2[1686],c2[1879]);
 
 //row: 30
-     d2[210]=_mm_xor_si128(c2[1434],_mm_xor_si128(c2[1752],_mm_xor_si128(c2[1362],c2[130])));
+     d2[210]=simde_mm_xor_si128(c2[1434],simde_mm_xor_si128(c2[1752],simde_mm_xor_si128(c2[1362],c2[130])));
 
 //row: 31
-     d2[217]=_mm_xor_si128(c2[2101],_mm_xor_si128(c2[1680],_mm_xor_si128(c2[2106],_mm_xor_si128(c2[718],_mm_xor_si128(c2[1419],_mm_xor_si128(c2[2115],_mm_xor_si128(c2[16],_mm_xor_si128(c2[436],_mm_xor_si128(c2[1993],_mm_xor_si128(c2[1712],_mm_xor_si128(c2[1852],_mm_xor_si128(c2[1723],_mm_xor_si128(c2[462],_mm_xor_si128(c2[742],_mm_xor_si128(c2[1460],_mm_xor_si128(c2[898],_mm_xor_si128(c2[1462],_mm_xor_si128(c2[1602],_mm_xor_si128(c2[490],_mm_xor_si128(c2[355],_mm_xor_si128(c2[495],_mm_xor_si128(c2[927],_mm_xor_si128(c2[1488],_mm_xor_si128(c2[369],_mm_xor_si128(c2[509],_mm_xor_si128(c2[2200],_mm_xor_si128(c2[662],_mm_xor_si128(c2[802],_mm_xor_si128(c2[678],_mm_xor_si128(c2[395],_mm_xor_si128(c2[392],_mm_xor_si128(c2[532],_mm_xor_si128(c2[2230],_mm_xor_si128(c2[972],_mm_xor_si128(c2[1671],c2[1811])))))))))))))))))))))))))))))))))));
+     d2[217]=simde_mm_xor_si128(c2[2101],simde_mm_xor_si128(c2[1680],simde_mm_xor_si128(c2[2106],simde_mm_xor_si128(c2[718],simde_mm_xor_si128(c2[1419],simde_mm_xor_si128(c2[2115],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[436],simde_mm_xor_si128(c2[1993],simde_mm_xor_si128(c2[1712],simde_mm_xor_si128(c2[1852],simde_mm_xor_si128(c2[1723],simde_mm_xor_si128(c2[462],simde_mm_xor_si128(c2[742],simde_mm_xor_si128(c2[1460],simde_mm_xor_si128(c2[898],simde_mm_xor_si128(c2[1462],simde_mm_xor_si128(c2[1602],simde_mm_xor_si128(c2[490],simde_mm_xor_si128(c2[355],simde_mm_xor_si128(c2[495],simde_mm_xor_si128(c2[927],simde_mm_xor_si128(c2[1488],simde_mm_xor_si128(c2[369],simde_mm_xor_si128(c2[509],simde_mm_xor_si128(c2[2200],simde_mm_xor_si128(c2[662],simde_mm_xor_si128(c2[802],simde_mm_xor_si128(c2[678],simde_mm_xor_si128(c2[395],simde_mm_xor_si128(c2[392],simde_mm_xor_si128(c2[532],simde_mm_xor_si128(c2[2230],simde_mm_xor_si128(c2[972],simde_mm_xor_si128(c2[1671],c2[1811])))))))))))))))))))))))))))))))))));
 
 //row: 32
-     d2[224]=_mm_xor_si128(c2[844],_mm_xor_si128(c2[984],_mm_xor_si128(c2[423],_mm_xor_si128(c2[563],_mm_xor_si128(c2[982],_mm_xor_si128(c2[1686],_mm_xor_si128(c2[1700],_mm_xor_si128(c2[1840],_mm_xor_si128(c2[295],_mm_xor_si128(c2[1138],_mm_xor_si128(c2[729],_mm_xor_si128(c2[869],_mm_xor_si128(c2[728],_mm_xor_si128(c2[466],_mm_xor_si128(c2[606],_mm_xor_si128(c2[1444],_mm_xor_si128(c2[1584],_mm_xor_si128(c2[1864],_mm_xor_si128(c2[196],_mm_xor_si128(c2[336],_mm_xor_si128(c2[2020],_mm_xor_si128(c2[478],_mm_xor_si128(c2[1472],_mm_xor_si128(c2[1612],_mm_xor_si128(c2[1610],_mm_xor_si128(c2[771],_mm_xor_si128(c2[1909],_mm_xor_si128(c2[2049],_mm_xor_si128(c2[224],_mm_xor_si128(c2[364],_mm_xor_si128(c2[1624],_mm_xor_si128(c2[943],_mm_xor_si128(c2[1083],_mm_xor_si128(c2[1924],_mm_xor_si128(c2[1653],_mm_xor_si128(c2[1793],_mm_xor_si128(c2[1517],_mm_xor_si128(c2[1654],_mm_xor_si128(c2[966],_mm_xor_si128(c2[1106],_mm_xor_si128(c2[1947],_mm_xor_si128(c2[2087],c2[687]))))))))))))))))))))))))))))))))))))))))));
+     d2[224]=simde_mm_xor_si128(c2[844],simde_mm_xor_si128(c2[984],simde_mm_xor_si128(c2[423],simde_mm_xor_si128(c2[563],simde_mm_xor_si128(c2[982],simde_mm_xor_si128(c2[1686],simde_mm_xor_si128(c2[1700],simde_mm_xor_si128(c2[1840],simde_mm_xor_si128(c2[295],simde_mm_xor_si128(c2[1138],simde_mm_xor_si128(c2[729],simde_mm_xor_si128(c2[869],simde_mm_xor_si128(c2[728],simde_mm_xor_si128(c2[466],simde_mm_xor_si128(c2[606],simde_mm_xor_si128(c2[1444],simde_mm_xor_si128(c2[1584],simde_mm_xor_si128(c2[1864],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[336],simde_mm_xor_si128(c2[2020],simde_mm_xor_si128(c2[478],simde_mm_xor_si128(c2[1472],simde_mm_xor_si128(c2[1612],simde_mm_xor_si128(c2[1610],simde_mm_xor_si128(c2[771],simde_mm_xor_si128(c2[1909],simde_mm_xor_si128(c2[2049],simde_mm_xor_si128(c2[224],simde_mm_xor_si128(c2[364],simde_mm_xor_si128(c2[1624],simde_mm_xor_si128(c2[943],simde_mm_xor_si128(c2[1083],simde_mm_xor_si128(c2[1924],simde_mm_xor_si128(c2[1653],simde_mm_xor_si128(c2[1793],simde_mm_xor_si128(c2[1517],simde_mm_xor_si128(c2[1654],simde_mm_xor_si128(c2[966],simde_mm_xor_si128(c2[1106],simde_mm_xor_si128(c2[1947],simde_mm_xor_si128(c2[2087],c2[687]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 33
-     d2[231]=_mm_xor_si128(c2[704],_mm_xor_si128(c2[283],_mm_xor_si128(c2[702],_mm_xor_si128(c2[1560],_mm_xor_si128(c2[15],_mm_xor_si128(c2[858],_mm_xor_si128(c2[589],_mm_xor_si128(c2[448],_mm_xor_si128(c2[1012],_mm_xor_si128(c2[326],_mm_xor_si128(c2[1304],_mm_xor_si128(c2[1584],_mm_xor_si128(c2[56],_mm_xor_si128(c2[1740],_mm_xor_si128(c2[198],_mm_xor_si128(c2[1332],_mm_xor_si128(c2[1330],_mm_xor_si128(c2[1769],_mm_xor_si128(c2[84],_mm_xor_si128(c2[1344],_mm_xor_si128(c2[803],_mm_xor_si128(c2[1644],_mm_xor_si128(c2[1778],_mm_xor_si128(c2[1513],_mm_xor_si128(c2[1237],_mm_xor_si128(c2[1374],_mm_xor_si128(c2[826],_mm_xor_si128(c2[1807],c2[407]))))))))))))))))))))))))))));
+     d2[231]=simde_mm_xor_si128(c2[704],simde_mm_xor_si128(c2[283],simde_mm_xor_si128(c2[702],simde_mm_xor_si128(c2[1560],simde_mm_xor_si128(c2[15],simde_mm_xor_si128(c2[858],simde_mm_xor_si128(c2[589],simde_mm_xor_si128(c2[448],simde_mm_xor_si128(c2[1012],simde_mm_xor_si128(c2[326],simde_mm_xor_si128(c2[1304],simde_mm_xor_si128(c2[1584],simde_mm_xor_si128(c2[56],simde_mm_xor_si128(c2[1740],simde_mm_xor_si128(c2[198],simde_mm_xor_si128(c2[1332],simde_mm_xor_si128(c2[1330],simde_mm_xor_si128(c2[1769],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[1344],simde_mm_xor_si128(c2[803],simde_mm_xor_si128(c2[1644],simde_mm_xor_si128(c2[1778],simde_mm_xor_si128(c2[1513],simde_mm_xor_si128(c2[1237],simde_mm_xor_si128(c2[1374],simde_mm_xor_si128(c2[826],simde_mm_xor_si128(c2[1807],c2[407]))))))))))))))))))))))))))));
 
 //row: 34
-     d2[238]=_mm_xor_si128(c2[143],_mm_xor_si128(c2[283],_mm_xor_si128(c2[1961],_mm_xor_si128(c2[1961],_mm_xor_si128(c2[2101],_mm_xor_si128(c2[1540],_mm_xor_si128(c2[281],_mm_xor_si128(c2[1966],_mm_xor_si128(c2[1821],_mm_xor_si128(c2[999],_mm_xor_si128(c2[1139],_mm_xor_si128(c2[578],_mm_xor_si128(c2[1840],_mm_xor_si128(c2[1279],_mm_xor_si128(c2[437],_mm_xor_si128(c2[1975],_mm_xor_si128(c2[2115],_mm_xor_si128(c2[28],_mm_xor_si128(c2[168],_mm_xor_si128(c2[1853],_mm_xor_si128(c2[34],_mm_xor_si128(c2[1572],_mm_xor_si128(c2[1712],_mm_xor_si128(c2[2004],_mm_xor_si128(c2[2144],_mm_xor_si128(c2[1583],_mm_xor_si128(c2[743],_mm_xor_si128(c2[883],_mm_xor_si128(c2[322],_mm_xor_si128(c2[1163],_mm_xor_si128(c2[602],_mm_xor_si128(c2[1741],_mm_xor_si128(c2[1881],_mm_xor_si128(c2[1320],_mm_xor_si128(c2[1319],_mm_xor_si128(c2[758],_mm_xor_si128(c2[2016],_mm_xor_si128(c2[1322],_mm_xor_si128(c2[1462],_mm_xor_si128(c2[771],_mm_xor_si128(c2[911],_mm_xor_si128(c2[350],_mm_xor_si128(c2[916],_mm_xor_si128(c2[215],_mm_xor_si128(c2[355],_mm_xor_si128(c2[1208],_mm_xor_si128(c2[1348],_mm_xor_si128(c2[787],_mm_xor_si128(c2[1769],_mm_xor_si128(c2[1909],_mm_xor_si128(c2[1348],_mm_xor_si128(c2[930],_mm_xor_si128(c2[229],_mm_xor_si128(c2[369],_mm_xor_si128(c2[242],_mm_xor_si128(c2[382],_mm_xor_si128(c2[2060],_mm_xor_si128(c2[1223],_mm_xor_si128(c2[522],_mm_xor_si128(c2[662],_mm_xor_si128(c2[952],_mm_xor_si128(c2[1092],_mm_xor_si128(c2[538],_mm_xor_si128(c2[816],_mm_xor_si128(c2[255],_mm_xor_si128(c2[953],_mm_xor_si128(c2[252],_mm_xor_si128(c2[392],_mm_xor_si128(c2[272],_mm_xor_si128(c2[412],_mm_xor_si128(c2[2090],_mm_xor_si128(c2[1246],_mm_xor_si128(c2[1386],_mm_xor_si128(c2[832],_mm_xor_si128(c2[2232],_mm_xor_si128(c2[1531],c2[1671]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[238]=simde_mm_xor_si128(c2[143],simde_mm_xor_si128(c2[283],simde_mm_xor_si128(c2[1961],simde_mm_xor_si128(c2[1961],simde_mm_xor_si128(c2[2101],simde_mm_xor_si128(c2[1540],simde_mm_xor_si128(c2[281],simde_mm_xor_si128(c2[1966],simde_mm_xor_si128(c2[1821],simde_mm_xor_si128(c2[999],simde_mm_xor_si128(c2[1139],simde_mm_xor_si128(c2[578],simde_mm_xor_si128(c2[1840],simde_mm_xor_si128(c2[1279],simde_mm_xor_si128(c2[437],simde_mm_xor_si128(c2[1975],simde_mm_xor_si128(c2[2115],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[1853],simde_mm_xor_si128(c2[34],simde_mm_xor_si128(c2[1572],simde_mm_xor_si128(c2[1712],simde_mm_xor_si128(c2[2004],simde_mm_xor_si128(c2[2144],simde_mm_xor_si128(c2[1583],simde_mm_xor_si128(c2[743],simde_mm_xor_si128(c2[883],simde_mm_xor_si128(c2[322],simde_mm_xor_si128(c2[1163],simde_mm_xor_si128(c2[602],simde_mm_xor_si128(c2[1741],simde_mm_xor_si128(c2[1881],simde_mm_xor_si128(c2[1320],simde_mm_xor_si128(c2[1319],simde_mm_xor_si128(c2[758],simde_mm_xor_si128(c2[2016],simde_mm_xor_si128(c2[1322],simde_mm_xor_si128(c2[1462],simde_mm_xor_si128(c2[771],simde_mm_xor_si128(c2[911],simde_mm_xor_si128(c2[350],simde_mm_xor_si128(c2[916],simde_mm_xor_si128(c2[215],simde_mm_xor_si128(c2[355],simde_mm_xor_si128(c2[1208],simde_mm_xor_si128(c2[1348],simde_mm_xor_si128(c2[787],simde_mm_xor_si128(c2[1769],simde_mm_xor_si128(c2[1909],simde_mm_xor_si128(c2[1348],simde_mm_xor_si128(c2[930],simde_mm_xor_si128(c2[229],simde_mm_xor_si128(c2[369],simde_mm_xor_si128(c2[242],simde_mm_xor_si128(c2[382],simde_mm_xor_si128(c2[2060],simde_mm_xor_si128(c2[1223],simde_mm_xor_si128(c2[522],simde_mm_xor_si128(c2[662],simde_mm_xor_si128(c2[952],simde_mm_xor_si128(c2[1092],simde_mm_xor_si128(c2[538],simde_mm_xor_si128(c2[816],simde_mm_xor_si128(c2[255],simde_mm_xor_si128(c2[953],simde_mm_xor_si128(c2[252],simde_mm_xor_si128(c2[392],simde_mm_xor_si128(c2[272],simde_mm_xor_si128(c2[412],simde_mm_xor_si128(c2[2090],simde_mm_xor_si128(c2[1246],simde_mm_xor_si128(c2[1386],simde_mm_xor_si128(c2[832],simde_mm_xor_si128(c2[2232],simde_mm_xor_si128(c2[1531],c2[1671]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 35
-     d2[245]=_mm_xor_si128(c2[2100],_mm_xor_si128(c2[1],_mm_xor_si128(c2[1826],_mm_xor_si128(c2[6],_mm_xor_si128(c2[717],_mm_xor_si128(c2[857],_mm_xor_si128(c2[1558],_mm_xor_si128(c2[155],_mm_xor_si128(c2[1559],_mm_xor_si128(c2[1992],_mm_xor_si128(c2[2132],_mm_xor_si128(c2[1991],_mm_xor_si128(c2[1722],_mm_xor_si128(c2[1862],_mm_xor_si128(c2[608],_mm_xor_si128(c2[888],_mm_xor_si128(c2[1599],_mm_xor_si128(c2[1037],_mm_xor_si128(c2[1741],_mm_xor_si128(c2[636],_mm_xor_si128(c2[634],_mm_xor_si128(c2[773],_mm_xor_si128(c2[926],_mm_xor_si128(c2[1066],_mm_xor_si128(c2[1627],_mm_xor_si128(c2[648],_mm_xor_si128(c2[100],_mm_xor_si128(c2[941],_mm_xor_si128(c2[817],_mm_xor_si128(c2[534],_mm_xor_si128(c2[678],_mm_xor_si128(c2[2229],_mm_xor_si128(c2[130],_mm_xor_si128(c2[1111],c2[1950]))))))))))))))))))))))))))))))))));
+     d2[245]=simde_mm_xor_si128(c2[2100],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[1826],simde_mm_xor_si128(c2[6],simde_mm_xor_si128(c2[717],simde_mm_xor_si128(c2[857],simde_mm_xor_si128(c2[1558],simde_mm_xor_si128(c2[155],simde_mm_xor_si128(c2[1559],simde_mm_xor_si128(c2[1992],simde_mm_xor_si128(c2[2132],simde_mm_xor_si128(c2[1991],simde_mm_xor_si128(c2[1722],simde_mm_xor_si128(c2[1862],simde_mm_xor_si128(c2[608],simde_mm_xor_si128(c2[888],simde_mm_xor_si128(c2[1599],simde_mm_xor_si128(c2[1037],simde_mm_xor_si128(c2[1741],simde_mm_xor_si128(c2[636],simde_mm_xor_si128(c2[634],simde_mm_xor_si128(c2[773],simde_mm_xor_si128(c2[926],simde_mm_xor_si128(c2[1066],simde_mm_xor_si128(c2[1627],simde_mm_xor_si128(c2[648],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[941],simde_mm_xor_si128(c2[817],simde_mm_xor_si128(c2[534],simde_mm_xor_si128(c2[678],simde_mm_xor_si128(c2[2229],simde_mm_xor_si128(c2[130],simde_mm_xor_si128(c2[1111],c2[1950]))))))))))))))))))))))))))))))))));
 
 //row: 36
-     d2[252]=_mm_xor_si128(c2[140],_mm_xor_si128(c2[2131],c2[523]));
+     d2[252]=simde_mm_xor_si128(c2[140],simde_mm_xor_si128(c2[2131],c2[523]));
 
 //row: 37
-     d2[259]=_mm_xor_si128(c2[0],_mm_xor_si128(c2[705],_mm_xor_si128(c2[1825],_mm_xor_si128(c2[284],_mm_xor_si128(c2[5],_mm_xor_si128(c2[703],_mm_xor_si128(c2[856],_mm_xor_si128(c2[1554],_mm_xor_si128(c2[1557],_mm_xor_si128(c2[16],_mm_xor_si128(c2[154],_mm_xor_si128(c2[719],_mm_xor_si128(c2[859],_mm_xor_si128(c2[2131],_mm_xor_si128(c2[590],_mm_xor_si128(c2[1990],_mm_xor_si128(c2[309],_mm_xor_si128(c2[449],_mm_xor_si128(c2[1868],_mm_xor_si128(c2[327],_mm_xor_si128(c2[607],_mm_xor_si128(c2[1305],_mm_xor_si128(c2[887],_mm_xor_si128(c2[1585],_mm_xor_si128(c2[1598],_mm_xor_si128(c2[57],_mm_xor_si128(c2[1036],_mm_xor_si128(c2[1741],_mm_xor_si128(c2[1740],_mm_xor_si128(c2[59],_mm_xor_si128(c2[199],_mm_xor_si128(c2[635],_mm_xor_si128(c2[1333],_mm_xor_si128(c2[633],_mm_xor_si128(c2[1191],_mm_xor_si128(c2[1331],_mm_xor_si128(c2[1065],_mm_xor_si128(c2[1770],_mm_xor_si128(c2[1626],_mm_xor_si128(c2[85],_mm_xor_si128(c2[647],_mm_xor_si128(c2[1205],_mm_xor_si128(c2[1345],_mm_xor_si128(c2[99],_mm_xor_si128(c2[804],_mm_xor_si128(c2[940],_mm_xor_si128(c2[1498],_mm_xor_si128(c2[1638],_mm_xor_si128(c2[816],_mm_xor_si128(c2[1514],_mm_xor_si128(c2[533],_mm_xor_si128(c2[1238],_mm_xor_si128(c2[677],_mm_xor_si128(c2[1235],_mm_xor_si128(c2[1375],_mm_xor_si128(c2[129],_mm_xor_si128(c2[827],_mm_xor_si128(c2[1110],_mm_xor_si128(c2[1808],_mm_xor_si128(c2[1949],_mm_xor_si128(c2[268],c2[408])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[259]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[705],simde_mm_xor_si128(c2[1825],simde_mm_xor_si128(c2[284],simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[703],simde_mm_xor_si128(c2[856],simde_mm_xor_si128(c2[1554],simde_mm_xor_si128(c2[1557],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[154],simde_mm_xor_si128(c2[719],simde_mm_xor_si128(c2[859],simde_mm_xor_si128(c2[2131],simde_mm_xor_si128(c2[590],simde_mm_xor_si128(c2[1990],simde_mm_xor_si128(c2[309],simde_mm_xor_si128(c2[449],simde_mm_xor_si128(c2[1868],simde_mm_xor_si128(c2[327],simde_mm_xor_si128(c2[607],simde_mm_xor_si128(c2[1305],simde_mm_xor_si128(c2[887],simde_mm_xor_si128(c2[1585],simde_mm_xor_si128(c2[1598],simde_mm_xor_si128(c2[57],simde_mm_xor_si128(c2[1036],simde_mm_xor_si128(c2[1741],simde_mm_xor_si128(c2[1740],simde_mm_xor_si128(c2[59],simde_mm_xor_si128(c2[199],simde_mm_xor_si128(c2[635],simde_mm_xor_si128(c2[1333],simde_mm_xor_si128(c2[633],simde_mm_xor_si128(c2[1191],simde_mm_xor_si128(c2[1331],simde_mm_xor_si128(c2[1065],simde_mm_xor_si128(c2[1770],simde_mm_xor_si128(c2[1626],simde_mm_xor_si128(c2[85],simde_mm_xor_si128(c2[647],simde_mm_xor_si128(c2[1205],simde_mm_xor_si128(c2[1345],simde_mm_xor_si128(c2[99],simde_mm_xor_si128(c2[804],simde_mm_xor_si128(c2[940],simde_mm_xor_si128(c2[1498],simde_mm_xor_si128(c2[1638],simde_mm_xor_si128(c2[816],simde_mm_xor_si128(c2[1514],simde_mm_xor_si128(c2[533],simde_mm_xor_si128(c2[1238],simde_mm_xor_si128(c2[677],simde_mm_xor_si128(c2[1235],simde_mm_xor_si128(c2[1375],simde_mm_xor_si128(c2[129],simde_mm_xor_si128(c2[827],simde_mm_xor_si128(c2[1110],simde_mm_xor_si128(c2[1808],simde_mm_xor_si128(c2[1949],simde_mm_xor_si128(c2[268],c2[408])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 38
-     d2[266]=_mm_xor_si128(c2[840],_mm_xor_si128(c2[980],_mm_xor_si128(c2[566],_mm_xor_si128(c2[985],_mm_xor_si128(c2[1696],_mm_xor_si128(c2[1836],_mm_xor_si128(c2[298],_mm_xor_si128(c2[1134],_mm_xor_si128(c2[716],_mm_xor_si128(c2[732],_mm_xor_si128(c2[872],_mm_xor_si128(c2[731],_mm_xor_si128(c2[462],_mm_xor_si128(c2[602],_mm_xor_si128(c2[1587],_mm_xor_si128(c2[1867],_mm_xor_si128(c2[339],_mm_xor_si128(c2[2016],_mm_xor_si128(c2[481],_mm_xor_si128(c2[1615],_mm_xor_si128(c2[1613],_mm_xor_si128(c2[216],_mm_xor_si128(c2[1905],_mm_xor_si128(c2[2045],_mm_xor_si128(c2[367],_mm_xor_si128(c2[1627],_mm_xor_si128(c2[1079],_mm_xor_si128(c2[1920],_mm_xor_si128(c2[1796],_mm_xor_si128(c2[1513],_mm_xor_si128(c2[1657],_mm_xor_si128(c2[969],_mm_xor_si128(c2[1109],_mm_xor_si128(c2[2090],c2[690]))))))))))))))))))))))))))))))))));
+     d2[266]=simde_mm_xor_si128(c2[840],simde_mm_xor_si128(c2[980],simde_mm_xor_si128(c2[566],simde_mm_xor_si128(c2[985],simde_mm_xor_si128(c2[1696],simde_mm_xor_si128(c2[1836],simde_mm_xor_si128(c2[298],simde_mm_xor_si128(c2[1134],simde_mm_xor_si128(c2[716],simde_mm_xor_si128(c2[732],simde_mm_xor_si128(c2[872],simde_mm_xor_si128(c2[731],simde_mm_xor_si128(c2[462],simde_mm_xor_si128(c2[602],simde_mm_xor_si128(c2[1587],simde_mm_xor_si128(c2[1867],simde_mm_xor_si128(c2[339],simde_mm_xor_si128(c2[2016],simde_mm_xor_si128(c2[481],simde_mm_xor_si128(c2[1615],simde_mm_xor_si128(c2[1613],simde_mm_xor_si128(c2[216],simde_mm_xor_si128(c2[1905],simde_mm_xor_si128(c2[2045],simde_mm_xor_si128(c2[367],simde_mm_xor_si128(c2[1627],simde_mm_xor_si128(c2[1079],simde_mm_xor_si128(c2[1920],simde_mm_xor_si128(c2[1796],simde_mm_xor_si128(c2[1513],simde_mm_xor_si128(c2[1657],simde_mm_xor_si128(c2[969],simde_mm_xor_si128(c2[1109],simde_mm_xor_si128(c2[2090],c2[690]))))))))))))))))))))))))))))))))));
 
 //row: 39
-     d2[273]=_mm_xor_si128(c2[280],_mm_xor_si128(c2[420],_mm_xor_si128(c2[2105],_mm_xor_si128(c2[6],_mm_xor_si128(c2[425],_mm_xor_si128(c2[1406],_mm_xor_si128(c2[1136],_mm_xor_si128(c2[1276],_mm_xor_si128(c2[1977],_mm_xor_si128(c2[574],_mm_xor_si128(c2[172],_mm_xor_si128(c2[312],_mm_xor_si128(c2[171],_mm_xor_si128(c2[2148],_mm_xor_si128(c2[42],_mm_xor_si128(c2[887],_mm_xor_si128(c2[1027],_mm_xor_si128(c2[1307],_mm_xor_si128(c2[1878],_mm_xor_si128(c2[2018],_mm_xor_si128(c2[1456],_mm_xor_si128(c2[2160],_mm_xor_si128(c2[915],_mm_xor_si128(c2[1055],_mm_xor_si128(c2[1053],_mm_xor_si128(c2[1345],_mm_xor_si128(c2[1485],_mm_xor_si128(c2[1906],_mm_xor_si128(c2[2046],_mm_xor_si128(c2[1067],_mm_xor_si128(c2[379],_mm_xor_si128(c2[519],_mm_xor_si128(c2[1360],_mm_xor_si128(c2[802],_mm_xor_si128(c2[1096],_mm_xor_si128(c2[1236],_mm_xor_si128(c2[953],_mm_xor_si128(c2[1097],_mm_xor_si128(c2[409],_mm_xor_si128(c2[549],_mm_xor_si128(c2[1390],_mm_xor_si128(c2[1530],c2[130]))))))))))))))))))))))))))))))))))))))))));
+     d2[273]=simde_mm_xor_si128(c2[280],simde_mm_xor_si128(c2[420],simde_mm_xor_si128(c2[2105],simde_mm_xor_si128(c2[6],simde_mm_xor_si128(c2[425],simde_mm_xor_si128(c2[1406],simde_mm_xor_si128(c2[1136],simde_mm_xor_si128(c2[1276],simde_mm_xor_si128(c2[1977],simde_mm_xor_si128(c2[574],simde_mm_xor_si128(c2[172],simde_mm_xor_si128(c2[312],simde_mm_xor_si128(c2[171],simde_mm_xor_si128(c2[2148],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[887],simde_mm_xor_si128(c2[1027],simde_mm_xor_si128(c2[1307],simde_mm_xor_si128(c2[1878],simde_mm_xor_si128(c2[2018],simde_mm_xor_si128(c2[1456],simde_mm_xor_si128(c2[2160],simde_mm_xor_si128(c2[915],simde_mm_xor_si128(c2[1055],simde_mm_xor_si128(c2[1053],simde_mm_xor_si128(c2[1345],simde_mm_xor_si128(c2[1485],simde_mm_xor_si128(c2[1906],simde_mm_xor_si128(c2[2046],simde_mm_xor_si128(c2[1067],simde_mm_xor_si128(c2[379],simde_mm_xor_si128(c2[519],simde_mm_xor_si128(c2[1360],simde_mm_xor_si128(c2[802],simde_mm_xor_si128(c2[1096],simde_mm_xor_si128(c2[1236],simde_mm_xor_si128(c2[953],simde_mm_xor_si128(c2[1097],simde_mm_xor_si128(c2[409],simde_mm_xor_si128(c2[549],simde_mm_xor_si128(c2[1390],simde_mm_xor_si128(c2[1530],c2[130]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 40
-     d2[280]=_mm_xor_si128(c2[1546],_mm_xor_si128(c2[1826],_mm_xor_si128(c2[1125],_mm_xor_si128(c2[1405],_mm_xor_si128(c2[1544],_mm_xor_si128(c2[1824],_mm_xor_si128(c2[156],_mm_xor_si128(c2[436],_mm_xor_si128(c2[857],_mm_xor_si128(c2[1137],_mm_xor_si128(c2[1700],_mm_xor_si128(c2[1840],_mm_xor_si128(c2[1980],_mm_xor_si128(c2[1431],_mm_xor_si128(c2[1711],_mm_xor_si128(c2[1290],_mm_xor_si128(c2[1430],_mm_xor_si128(c2[1570],_mm_xor_si128(c2[314],_mm_xor_si128(c2[1168],_mm_xor_si128(c2[1448],_mm_xor_si128(c2[2146],_mm_xor_si128(c2[187],_mm_xor_si128(c2[187],_mm_xor_si128(c2[467],_mm_xor_si128(c2[898],_mm_xor_si128(c2[1178],_mm_xor_si128(c2[336],_mm_xor_si128(c2[616],_mm_xor_si128(c2[1040],_mm_xor_si128(c2[1180],_mm_xor_si128(c2[1320],_mm_xor_si128(c2[2174],_mm_xor_si128(c2[215],_mm_xor_si128(c2[2172],_mm_xor_si128(c2[73],_mm_xor_si128(c2[213],_mm_xor_si128(c2[365],_mm_xor_si128(c2[645],_mm_xor_si128(c2[926],_mm_xor_si128(c2[1206],_mm_xor_si128(c2[2186],_mm_xor_si128(c2[87],_mm_xor_si128(c2[227],_mm_xor_si128(c2[1638],_mm_xor_si128(c2[1918],_mm_xor_si128(c2[240],_mm_xor_si128(c2[380],_mm_xor_si128(c2[520],_mm_xor_si128(c2[116],_mm_xor_si128(c2[396],_mm_xor_si128(c2[2072],_mm_xor_si128(c2[113],_mm_xor_si128(c2[2216],_mm_xor_si128(c2[117],_mm_xor_si128(c2[257],_mm_xor_si128(c2[1668],_mm_xor_si128(c2[1948],_mm_xor_si128(c2[410],_mm_xor_si128(c2[690],_mm_xor_si128(c2[1249],_mm_xor_si128(c2[1389],c2[1529]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[280]=simde_mm_xor_si128(c2[1546],simde_mm_xor_si128(c2[1826],simde_mm_xor_si128(c2[1125],simde_mm_xor_si128(c2[1405],simde_mm_xor_si128(c2[1544],simde_mm_xor_si128(c2[1824],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[436],simde_mm_xor_si128(c2[857],simde_mm_xor_si128(c2[1137],simde_mm_xor_si128(c2[1700],simde_mm_xor_si128(c2[1840],simde_mm_xor_si128(c2[1980],simde_mm_xor_si128(c2[1431],simde_mm_xor_si128(c2[1711],simde_mm_xor_si128(c2[1290],simde_mm_xor_si128(c2[1430],simde_mm_xor_si128(c2[1570],simde_mm_xor_si128(c2[314],simde_mm_xor_si128(c2[1168],simde_mm_xor_si128(c2[1448],simde_mm_xor_si128(c2[2146],simde_mm_xor_si128(c2[187],simde_mm_xor_si128(c2[187],simde_mm_xor_si128(c2[467],simde_mm_xor_si128(c2[898],simde_mm_xor_si128(c2[1178],simde_mm_xor_si128(c2[336],simde_mm_xor_si128(c2[616],simde_mm_xor_si128(c2[1040],simde_mm_xor_si128(c2[1180],simde_mm_xor_si128(c2[1320],simde_mm_xor_si128(c2[2174],simde_mm_xor_si128(c2[215],simde_mm_xor_si128(c2[2172],simde_mm_xor_si128(c2[73],simde_mm_xor_si128(c2[213],simde_mm_xor_si128(c2[365],simde_mm_xor_si128(c2[645],simde_mm_xor_si128(c2[926],simde_mm_xor_si128(c2[1206],simde_mm_xor_si128(c2[2186],simde_mm_xor_si128(c2[87],simde_mm_xor_si128(c2[227],simde_mm_xor_si128(c2[1638],simde_mm_xor_si128(c2[1918],simde_mm_xor_si128(c2[240],simde_mm_xor_si128(c2[380],simde_mm_xor_si128(c2[520],simde_mm_xor_si128(c2[116],simde_mm_xor_si128(c2[396],simde_mm_xor_si128(c2[2072],simde_mm_xor_si128(c2[113],simde_mm_xor_si128(c2[2216],simde_mm_xor_si128(c2[117],simde_mm_xor_si128(c2[257],simde_mm_xor_si128(c2[1668],simde_mm_xor_si128(c2[1948],simde_mm_xor_si128(c2[410],simde_mm_xor_si128(c2[690],simde_mm_xor_si128(c2[1249],simde_mm_xor_si128(c2[1389],c2[1529]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 41
-     d2[287]=_mm_xor_si128(c2[1262],_mm_xor_si128(c2[1402],_mm_xor_si128(c2[981],_mm_xor_si128(c2[1400],_mm_xor_si128(c2[2118],_mm_xor_si128(c2[19],_mm_xor_si128(c2[720],_mm_xor_si128(c2[1556],_mm_xor_si128(c2[1134],_mm_xor_si128(c2[1154],_mm_xor_si128(c2[1294],_mm_xor_si128(c2[1153],_mm_xor_si128(c2[884],_mm_xor_si128(c2[1024],_mm_xor_si128(c2[2002],_mm_xor_si128(c2[43],_mm_xor_si128(c2[761],_mm_xor_si128(c2[199],_mm_xor_si128(c2[896],_mm_xor_si128(c2[2030],_mm_xor_si128(c2[2035],_mm_xor_si128(c2[776],_mm_xor_si128(c2[88],_mm_xor_si128(c2[228],_mm_xor_si128(c2[789],_mm_xor_si128(c2[2049],_mm_xor_si128(c2[1501],_mm_xor_si128(c2[103],_mm_xor_si128(c2[2218],_mm_xor_si128(c2[1935],_mm_xor_si128(c2[2072],_mm_xor_si128(c2[1391],_mm_xor_si128(c2[1531],_mm_xor_si128(c2[266],c2[1112]))))))))))))))))))))))))))))))))));
+     d2[287]=simde_mm_xor_si128(c2[1262],simde_mm_xor_si128(c2[1402],simde_mm_xor_si128(c2[981],simde_mm_xor_si128(c2[1400],simde_mm_xor_si128(c2[2118],simde_mm_xor_si128(c2[19],simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[1556],simde_mm_xor_si128(c2[1134],simde_mm_xor_si128(c2[1154],simde_mm_xor_si128(c2[1294],simde_mm_xor_si128(c2[1153],simde_mm_xor_si128(c2[884],simde_mm_xor_si128(c2[1024],simde_mm_xor_si128(c2[2002],simde_mm_xor_si128(c2[43],simde_mm_xor_si128(c2[761],simde_mm_xor_si128(c2[199],simde_mm_xor_si128(c2[896],simde_mm_xor_si128(c2[2030],simde_mm_xor_si128(c2[2035],simde_mm_xor_si128(c2[776],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[228],simde_mm_xor_si128(c2[789],simde_mm_xor_si128(c2[2049],simde_mm_xor_si128(c2[1501],simde_mm_xor_si128(c2[103],simde_mm_xor_si128(c2[2218],simde_mm_xor_si128(c2[1935],simde_mm_xor_si128(c2[2072],simde_mm_xor_si128(c2[1391],simde_mm_xor_si128(c2[1531],simde_mm_xor_si128(c2[266],c2[1112]))))))))))))))))))))))))))))))))));
   }
 }
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc120_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc120_byte.c
index 21e79114d00a9cce21ebde35d8ad16829db9db08..cb4cc6771988896103163f9c37c0a1de3583e25b 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc120_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc120_byte.c
@@ -1,9 +1,9 @@
 #include "PHY/sse_intrin.h"
 // generated code for Zc=120, byte encoding
 static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG2_Zc120_byte(uint8_t *c,uint8_t *d) {
-  __m64 *csimd=(__m64 *)c,*dsimd=(__m64 *)d;
+  simde__m64 *csimd=(simde__m64 *)c,*dsimd=(simde__m64 *)d;
 
-  __m64 *c2,*d2;
+  simde__m64 *c2,*d2;
 
   int i2;
   for (i2=0; i2<15; i2++) {
@@ -11,129 +11,129 @@ static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG
      d2=&dsimd[i2];
 
 //row: 0
-     d2[0]=_mm_xor_si64(c2[303],_mm_xor_si64(c2[1802],_mm_xor_si64(c2[609],_mm_xor_si64(c2[931],_mm_xor_si64(c2[2133],_mm_xor_si64(c2[2144],_mm_xor_si64(c2[2168],_mm_xor_si64(c2[966],_mm_xor_si64(c2[1592],_mm_xor_si64(c2[1896],_mm_xor_si64(c2[990],_mm_xor_si64(c2[2227],_mm_xor_si64(c2[1626],_mm_xor_si64(c2[2231],_mm_xor_si64(c2[1053],_mm_xor_si64(c2[1963],_mm_xor_si64(c2[2282],_mm_xor_si64(c2[192],_mm_xor_si64(c2[2284],_mm_xor_si64(c2[2312],_mm_xor_si64(c2[2319],_mm_xor_si64(c2[541],_mm_xor_si64(c2[1144],_mm_xor_si64(c2[1145],_mm_xor_si64(c2[284],_mm_xor_si64(c2[2375],c2[1179]))))))))))))))))))))))))));
+     d2[0]=simde_mm_xor_si64(c2[303],simde_mm_xor_si64(c2[1802],simde_mm_xor_si64(c2[609],simde_mm_xor_si64(c2[931],simde_mm_xor_si64(c2[2133],simde_mm_xor_si64(c2[2144],simde_mm_xor_si64(c2[2168],simde_mm_xor_si64(c2[966],simde_mm_xor_si64(c2[1592],simde_mm_xor_si64(c2[1896],simde_mm_xor_si64(c2[990],simde_mm_xor_si64(c2[2227],simde_mm_xor_si64(c2[1626],simde_mm_xor_si64(c2[2231],simde_mm_xor_si64(c2[1053],simde_mm_xor_si64(c2[1963],simde_mm_xor_si64(c2[2282],simde_mm_xor_si64(c2[192],simde_mm_xor_si64(c2[2284],simde_mm_xor_si64(c2[2312],simde_mm_xor_si64(c2[2319],simde_mm_xor_si64(c2[541],simde_mm_xor_si64(c2[1144],simde_mm_xor_si64(c2[1145],simde_mm_xor_si64(c2[284],simde_mm_xor_si64(c2[2375],c2[1179]))))))))))))))))))))))))));
 
 //row: 1
-     d2[15]=_mm_xor_si64(c2[303],_mm_xor_si64(c2[603],_mm_xor_si64(c2[2102],_mm_xor_si64(c2[909],_mm_xor_si64(c2[931],_mm_xor_si64(c2[1231],_mm_xor_si64(c2[34],_mm_xor_si64(c2[30],_mm_xor_si64(c2[2168],_mm_xor_si64(c2[69],_mm_xor_si64(c2[1266],_mm_xor_si64(c2[1592],_mm_xor_si64(c2[1892],_mm_xor_si64(c2[2196],_mm_xor_si64(c2[1290],_mm_xor_si64(c2[128],_mm_xor_si64(c2[1926],_mm_xor_si64(c2[132],_mm_xor_si64(c2[1353],_mm_xor_si64(c2[2263],_mm_xor_si64(c2[2282],_mm_xor_si64(c2[183],_mm_xor_si64(c2[492],_mm_xor_si64(c2[185],_mm_xor_si64(c2[213],_mm_xor_si64(c2[220],_mm_xor_si64(c2[841],_mm_xor_si64(c2[1444],_mm_xor_si64(c2[1445],_mm_xor_si64(c2[284],_mm_xor_si64(c2[584],_mm_xor_si64(c2[276],c2[1479]))))))))))))))))))))))))))))))));
+     d2[15]=simde_mm_xor_si64(c2[303],simde_mm_xor_si64(c2[603],simde_mm_xor_si64(c2[2102],simde_mm_xor_si64(c2[909],simde_mm_xor_si64(c2[931],simde_mm_xor_si64(c2[1231],simde_mm_xor_si64(c2[34],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[2168],simde_mm_xor_si64(c2[69],simde_mm_xor_si64(c2[1266],simde_mm_xor_si64(c2[1592],simde_mm_xor_si64(c2[1892],simde_mm_xor_si64(c2[2196],simde_mm_xor_si64(c2[1290],simde_mm_xor_si64(c2[128],simde_mm_xor_si64(c2[1926],simde_mm_xor_si64(c2[132],simde_mm_xor_si64(c2[1353],simde_mm_xor_si64(c2[2263],simde_mm_xor_si64(c2[2282],simde_mm_xor_si64(c2[183],simde_mm_xor_si64(c2[492],simde_mm_xor_si64(c2[185],simde_mm_xor_si64(c2[213],simde_mm_xor_si64(c2[220],simde_mm_xor_si64(c2[841],simde_mm_xor_si64(c2[1444],simde_mm_xor_si64(c2[1445],simde_mm_xor_si64(c2[284],simde_mm_xor_si64(c2[584],simde_mm_xor_si64(c2[276],c2[1479]))))))))))))))))))))))))))))))));
 
 //row: 2
-     d2[30]=_mm_xor_si64(c2[303],_mm_xor_si64(c2[603],_mm_xor_si64(c2[1802],_mm_xor_si64(c2[2102],_mm_xor_si64(c2[909],_mm_xor_si64(c2[931],_mm_xor_si64(c2[1231],_mm_xor_si64(c2[34],_mm_xor_si64(c2[30],_mm_xor_si64(c2[2168],_mm_xor_si64(c2[69],_mm_xor_si64(c2[1266],_mm_xor_si64(c2[1592],_mm_xor_si64(c2[1892],_mm_xor_si64(c2[1896],_mm_xor_si64(c2[2196],_mm_xor_si64(c2[1290],_mm_xor_si64(c2[2227],_mm_xor_si64(c2[128],_mm_xor_si64(c2[1926],_mm_xor_si64(c2[132],_mm_xor_si64(c2[1053],_mm_xor_si64(c2[1353],_mm_xor_si64(c2[2263],_mm_xor_si64(c2[2282],_mm_xor_si64(c2[183],_mm_xor_si64(c2[192],_mm_xor_si64(c2[492],_mm_xor_si64(c2[185],_mm_xor_si64(c2[2312],_mm_xor_si64(c2[213],_mm_xor_si64(c2[220],_mm_xor_si64(c2[541],_mm_xor_si64(c2[841],_mm_xor_si64(c2[1444],_mm_xor_si64(c2[1445],_mm_xor_si64(c2[284],_mm_xor_si64(c2[584],_mm_xor_si64(c2[2375],_mm_xor_si64(c2[276],c2[1479]))))))))))))))))))))))))))))))))))))))));
+     d2[30]=simde_mm_xor_si64(c2[303],simde_mm_xor_si64(c2[603],simde_mm_xor_si64(c2[1802],simde_mm_xor_si64(c2[2102],simde_mm_xor_si64(c2[909],simde_mm_xor_si64(c2[931],simde_mm_xor_si64(c2[1231],simde_mm_xor_si64(c2[34],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[2168],simde_mm_xor_si64(c2[69],simde_mm_xor_si64(c2[1266],simde_mm_xor_si64(c2[1592],simde_mm_xor_si64(c2[1892],simde_mm_xor_si64(c2[1896],simde_mm_xor_si64(c2[2196],simde_mm_xor_si64(c2[1290],simde_mm_xor_si64(c2[2227],simde_mm_xor_si64(c2[128],simde_mm_xor_si64(c2[1926],simde_mm_xor_si64(c2[132],simde_mm_xor_si64(c2[1053],simde_mm_xor_si64(c2[1353],simde_mm_xor_si64(c2[2263],simde_mm_xor_si64(c2[2282],simde_mm_xor_si64(c2[183],simde_mm_xor_si64(c2[192],simde_mm_xor_si64(c2[492],simde_mm_xor_si64(c2[185],simde_mm_xor_si64(c2[2312],simde_mm_xor_si64(c2[213],simde_mm_xor_si64(c2[220],simde_mm_xor_si64(c2[541],simde_mm_xor_si64(c2[841],simde_mm_xor_si64(c2[1444],simde_mm_xor_si64(c2[1445],simde_mm_xor_si64(c2[284],simde_mm_xor_si64(c2[584],simde_mm_xor_si64(c2[2375],simde_mm_xor_si64(c2[276],c2[1479]))))))))))))))))))))))))))))))))))))))));
 
 //row: 3
-     d2[45]=_mm_xor_si64(c2[603],_mm_xor_si64(c2[2102],_mm_xor_si64(c2[909],_mm_xor_si64(c2[1231],_mm_xor_si64(c2[34],_mm_xor_si64(c2[2144],_mm_xor_si64(c2[30],_mm_xor_si64(c2[69],_mm_xor_si64(c2[966],_mm_xor_si64(c2[1266],_mm_xor_si64(c2[1892],_mm_xor_si64(c2[2196],_mm_xor_si64(c2[1290],_mm_xor_si64(c2[128],_mm_xor_si64(c2[1926],_mm_xor_si64(c2[2231],_mm_xor_si64(c2[132],_mm_xor_si64(c2[1353],_mm_xor_si64(c2[1963],_mm_xor_si64(c2[2263],_mm_xor_si64(c2[183],_mm_xor_si64(c2[492],_mm_xor_si64(c2[2284],_mm_xor_si64(c2[185],_mm_xor_si64(c2[213],_mm_xor_si64(c2[2319],_mm_xor_si64(c2[220],_mm_xor_si64(c2[841],_mm_xor_si64(c2[1444],_mm_xor_si64(c2[1145],_mm_xor_si64(c2[1445],_mm_xor_si64(c2[584],_mm_xor_si64(c2[276],_mm_xor_si64(c2[1179],c2[1479]))))))))))))))))))))))))))))))))));
+     d2[45]=simde_mm_xor_si64(c2[603],simde_mm_xor_si64(c2[2102],simde_mm_xor_si64(c2[909],simde_mm_xor_si64(c2[1231],simde_mm_xor_si64(c2[34],simde_mm_xor_si64(c2[2144],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[69],simde_mm_xor_si64(c2[966],simde_mm_xor_si64(c2[1266],simde_mm_xor_si64(c2[1892],simde_mm_xor_si64(c2[2196],simde_mm_xor_si64(c2[1290],simde_mm_xor_si64(c2[128],simde_mm_xor_si64(c2[1926],simde_mm_xor_si64(c2[2231],simde_mm_xor_si64(c2[132],simde_mm_xor_si64(c2[1353],simde_mm_xor_si64(c2[1963],simde_mm_xor_si64(c2[2263],simde_mm_xor_si64(c2[183],simde_mm_xor_si64(c2[492],simde_mm_xor_si64(c2[2284],simde_mm_xor_si64(c2[185],simde_mm_xor_si64(c2[213],simde_mm_xor_si64(c2[2319],simde_mm_xor_si64(c2[220],simde_mm_xor_si64(c2[841],simde_mm_xor_si64(c2[1444],simde_mm_xor_si64(c2[1145],simde_mm_xor_si64(c2[1445],simde_mm_xor_si64(c2[584],simde_mm_xor_si64(c2[276],simde_mm_xor_si64(c2[1179],c2[1479]))))))))))))))))))))))))))))))))));
 
 //row: 4
-     d2[60]=_mm_xor_si64(c2[1510],_mm_xor_si64(c2[1810],_mm_xor_si64(c2[910],_mm_xor_si64(c2[2101],_mm_xor_si64(c2[1503],_mm_xor_si64(c2[2138],_mm_xor_si64(c2[39],_mm_xor_si64(c2[1241],_mm_xor_si64(c2[1237],_mm_xor_si64(c2[1232],_mm_xor_si64(c2[961],_mm_xor_si64(c2[1261],_mm_xor_si64(c2[74],_mm_xor_si64(c2[400],_mm_xor_si64(c2[700],_mm_xor_si64(c2[1004],_mm_xor_si64(c2[98],_mm_xor_si64(c2[1320],_mm_xor_si64(c2[734],_mm_xor_si64(c2[1324],_mm_xor_si64(c2[161],_mm_xor_si64(c2[1056],_mm_xor_si64(c2[1090],_mm_xor_si64(c2[1390],_mm_xor_si64(c2[1684],_mm_xor_si64(c2[1392],_mm_xor_si64(c2[1420],_mm_xor_si64(c2[1412],_mm_xor_si64(c2[2048],_mm_xor_si64(c2[252],_mm_xor_si64(c2[253],_mm_xor_si64(c2[1476],_mm_xor_si64(c2[1776],_mm_xor_si64(c2[1483],c2[272]))))))))))))))))))))))))))))))))));
+     d2[60]=simde_mm_xor_si64(c2[1510],simde_mm_xor_si64(c2[1810],simde_mm_xor_si64(c2[910],simde_mm_xor_si64(c2[2101],simde_mm_xor_si64(c2[1503],simde_mm_xor_si64(c2[2138],simde_mm_xor_si64(c2[39],simde_mm_xor_si64(c2[1241],simde_mm_xor_si64(c2[1237],simde_mm_xor_si64(c2[1232],simde_mm_xor_si64(c2[961],simde_mm_xor_si64(c2[1261],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[400],simde_mm_xor_si64(c2[700],simde_mm_xor_si64(c2[1004],simde_mm_xor_si64(c2[98],simde_mm_xor_si64(c2[1320],simde_mm_xor_si64(c2[734],simde_mm_xor_si64(c2[1324],simde_mm_xor_si64(c2[161],simde_mm_xor_si64(c2[1056],simde_mm_xor_si64(c2[1090],simde_mm_xor_si64(c2[1390],simde_mm_xor_si64(c2[1684],simde_mm_xor_si64(c2[1392],simde_mm_xor_si64(c2[1420],simde_mm_xor_si64(c2[1412],simde_mm_xor_si64(c2[2048],simde_mm_xor_si64(c2[252],simde_mm_xor_si64(c2[253],simde_mm_xor_si64(c2[1476],simde_mm_xor_si64(c2[1776],simde_mm_xor_si64(c2[1483],c2[272]))))))))))))))))))))))))))))))))));
 
 //row: 5
-     d2[75]=_mm_xor_si64(c2[14],_mm_xor_si64(c2[314],_mm_xor_si64(c2[1813],_mm_xor_si64(c2[605],_mm_xor_si64(c2[300],_mm_xor_si64(c2[642],_mm_xor_si64(c2[942],_mm_xor_si64(c2[2144],_mm_xor_si64(c2[2140],_mm_xor_si64(c2[335],_mm_xor_si64(c2[1864],_mm_xor_si64(c2[2164],_mm_xor_si64(c2[962],_mm_xor_si64(c2[1303],_mm_xor_si64(c2[1603],_mm_xor_si64(c2[1892],_mm_xor_si64(c2[1001],_mm_xor_si64(c2[2223],_mm_xor_si64(c2[1622],_mm_xor_si64(c2[2227],_mm_xor_si64(c2[1064],_mm_xor_si64(c2[1959],_mm_xor_si64(c2[456],_mm_xor_si64(c2[1993],_mm_xor_si64(c2[2293],_mm_xor_si64(c2[188],_mm_xor_si64(c2[2280],_mm_xor_si64(c2[2323],_mm_xor_si64(c2[2315],_mm_xor_si64(c2[221],_mm_xor_si64(c2[552],_mm_xor_si64(c2[1140],_mm_xor_si64(c2[1141],_mm_xor_si64(c2[2379],_mm_xor_si64(c2[280],_mm_xor_si64(c2[2371],c2[1175]))))))))))))))))))))))))))))))))))));
+     d2[75]=simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[314],simde_mm_xor_si64(c2[1813],simde_mm_xor_si64(c2[605],simde_mm_xor_si64(c2[300],simde_mm_xor_si64(c2[642],simde_mm_xor_si64(c2[942],simde_mm_xor_si64(c2[2144],simde_mm_xor_si64(c2[2140],simde_mm_xor_si64(c2[335],simde_mm_xor_si64(c2[1864],simde_mm_xor_si64(c2[2164],simde_mm_xor_si64(c2[962],simde_mm_xor_si64(c2[1303],simde_mm_xor_si64(c2[1603],simde_mm_xor_si64(c2[1892],simde_mm_xor_si64(c2[1001],simde_mm_xor_si64(c2[2223],simde_mm_xor_si64(c2[1622],simde_mm_xor_si64(c2[2227],simde_mm_xor_si64(c2[1064],simde_mm_xor_si64(c2[1959],simde_mm_xor_si64(c2[456],simde_mm_xor_si64(c2[1993],simde_mm_xor_si64(c2[2293],simde_mm_xor_si64(c2[188],simde_mm_xor_si64(c2[2280],simde_mm_xor_si64(c2[2323],simde_mm_xor_si64(c2[2315],simde_mm_xor_si64(c2[221],simde_mm_xor_si64(c2[552],simde_mm_xor_si64(c2[1140],simde_mm_xor_si64(c2[1141],simde_mm_xor_si64(c2[2379],simde_mm_xor_si64(c2[280],simde_mm_xor_si64(c2[2371],c2[1175]))))))))))))))))))))))))))))))))))));
 
 //row: 6
-     d2[90]=_mm_xor_si64(c2[310],_mm_xor_si64(c2[610],_mm_xor_si64(c2[2109],_mm_xor_si64(c2[901],_mm_xor_si64(c2[302],_mm_xor_si64(c2[938],_mm_xor_si64(c2[1238],_mm_xor_si64(c2[41],_mm_xor_si64(c2[37],_mm_xor_si64(c2[2160],_mm_xor_si64(c2[61],_mm_xor_si64(c2[1273],_mm_xor_si64(c2[1599],_mm_xor_si64(c2[1899],_mm_xor_si64(c2[2203],_mm_xor_si64(c2[1297],_mm_xor_si64(c2[120],_mm_xor_si64(c2[1933],_mm_xor_si64(c2[124],_mm_xor_si64(c2[1360],_mm_xor_si64(c2[2255],_mm_xor_si64(c2[159],_mm_xor_si64(c2[2289],_mm_xor_si64(c2[190],_mm_xor_si64(c2[484],_mm_xor_si64(c2[192],_mm_xor_si64(c2[220],_mm_xor_si64(c2[212],_mm_xor_si64(c2[1416],_mm_xor_si64(c2[848],_mm_xor_si64(c2[1451],_mm_xor_si64(c2[1452],_mm_xor_si64(c2[276],_mm_xor_si64(c2[576],_mm_xor_si64(c2[283],_mm_xor_si64(c2[1471],c2[1470]))))))))))))))))))))))))))))))))))));
+     d2[90]=simde_mm_xor_si64(c2[310],simde_mm_xor_si64(c2[610],simde_mm_xor_si64(c2[2109],simde_mm_xor_si64(c2[901],simde_mm_xor_si64(c2[302],simde_mm_xor_si64(c2[938],simde_mm_xor_si64(c2[1238],simde_mm_xor_si64(c2[41],simde_mm_xor_si64(c2[37],simde_mm_xor_si64(c2[2160],simde_mm_xor_si64(c2[61],simde_mm_xor_si64(c2[1273],simde_mm_xor_si64(c2[1599],simde_mm_xor_si64(c2[1899],simde_mm_xor_si64(c2[2203],simde_mm_xor_si64(c2[1297],simde_mm_xor_si64(c2[120],simde_mm_xor_si64(c2[1933],simde_mm_xor_si64(c2[124],simde_mm_xor_si64(c2[1360],simde_mm_xor_si64(c2[2255],simde_mm_xor_si64(c2[159],simde_mm_xor_si64(c2[2289],simde_mm_xor_si64(c2[190],simde_mm_xor_si64(c2[484],simde_mm_xor_si64(c2[192],simde_mm_xor_si64(c2[220],simde_mm_xor_si64(c2[212],simde_mm_xor_si64(c2[1416],simde_mm_xor_si64(c2[848],simde_mm_xor_si64(c2[1451],simde_mm_xor_si64(c2[1452],simde_mm_xor_si64(c2[276],simde_mm_xor_si64(c2[576],simde_mm_xor_si64(c2[283],simde_mm_xor_si64(c2[1471],c2[1470]))))))))))))))))))))))))))))))))))));
 
 //row: 7
-     d2[105]=_mm_xor_si64(c2[1509],_mm_xor_si64(c2[1809],_mm_xor_si64(c2[1207],_mm_xor_si64(c2[909],_mm_xor_si64(c2[307],_mm_xor_si64(c2[2100],_mm_xor_si64(c2[1513],_mm_xor_si64(c2[2137],_mm_xor_si64(c2[38],_mm_xor_si64(c2[1835],_mm_xor_si64(c2[1240],_mm_xor_si64(c2[638],_mm_xor_si64(c2[1236],_mm_xor_si64(c2[334],_mm_xor_si64(c2[634],_mm_xor_si64(c2[1840],_mm_xor_si64(c2[960],_mm_xor_si64(c2[1260],_mm_xor_si64(c2[673],_mm_xor_si64(c2[73],_mm_xor_si64(c2[1570],_mm_xor_si64(c2[1870],_mm_xor_si64(c2[399],_mm_xor_si64(c2[699],_mm_xor_si64(c2[97],_mm_xor_si64(c2[1003],_mm_xor_si64(c2[401],_mm_xor_si64(c2[97],_mm_xor_si64(c2[1894],_mm_xor_si64(c2[1334],_mm_xor_si64(c2[732],_mm_xor_si64(c2[733],_mm_xor_si64(c2[131],_mm_xor_si64(c2[1323],_mm_xor_si64(c2[421],_mm_xor_si64(c2[721],_mm_xor_si64(c2[160],_mm_xor_si64(c2[1957],_mm_xor_si64(c2[1055],_mm_xor_si64(c2[153],_mm_xor_si64(c2[453],_mm_xor_si64(c2[758],_mm_xor_si64(c2[1089],_mm_xor_si64(c2[1389],_mm_xor_si64(c2[787],_mm_xor_si64(c2[1683],_mm_xor_si64(c2[1081],_mm_xor_si64(c2[1391],_mm_xor_si64(c2[489],_mm_xor_si64(c2[789],_mm_xor_si64(c2[1419],_mm_xor_si64(c2[817],_mm_xor_si64(c2[1411],_mm_xor_si64(c2[524],_mm_xor_si64(c2[824],_mm_xor_si64(c2[2320],_mm_xor_si64(c2[2047],_mm_xor_si64(c2[1445],_mm_xor_si64(c2[251],_mm_xor_si64(c2[2048],_mm_xor_si64(c2[252],_mm_xor_si64(c2[1749],_mm_xor_si64(c2[2049],_mm_xor_si64(c2[1475],_mm_xor_si64(c2[1775],_mm_xor_si64(c2[1173],_mm_xor_si64(c2[1482],_mm_xor_si64(c2[880],_mm_xor_si64(c2[271],_mm_xor_si64(c2[1783],c2[2083]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[105]=simde_mm_xor_si64(c2[1509],simde_mm_xor_si64(c2[1809],simde_mm_xor_si64(c2[1207],simde_mm_xor_si64(c2[909],simde_mm_xor_si64(c2[307],simde_mm_xor_si64(c2[2100],simde_mm_xor_si64(c2[1513],simde_mm_xor_si64(c2[2137],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[1835],simde_mm_xor_si64(c2[1240],simde_mm_xor_si64(c2[638],simde_mm_xor_si64(c2[1236],simde_mm_xor_si64(c2[334],simde_mm_xor_si64(c2[634],simde_mm_xor_si64(c2[1840],simde_mm_xor_si64(c2[960],simde_mm_xor_si64(c2[1260],simde_mm_xor_si64(c2[673],simde_mm_xor_si64(c2[73],simde_mm_xor_si64(c2[1570],simde_mm_xor_si64(c2[1870],simde_mm_xor_si64(c2[399],simde_mm_xor_si64(c2[699],simde_mm_xor_si64(c2[97],simde_mm_xor_si64(c2[1003],simde_mm_xor_si64(c2[401],simde_mm_xor_si64(c2[97],simde_mm_xor_si64(c2[1894],simde_mm_xor_si64(c2[1334],simde_mm_xor_si64(c2[732],simde_mm_xor_si64(c2[733],simde_mm_xor_si64(c2[131],simde_mm_xor_si64(c2[1323],simde_mm_xor_si64(c2[421],simde_mm_xor_si64(c2[721],simde_mm_xor_si64(c2[160],simde_mm_xor_si64(c2[1957],simde_mm_xor_si64(c2[1055],simde_mm_xor_si64(c2[153],simde_mm_xor_si64(c2[453],simde_mm_xor_si64(c2[758],simde_mm_xor_si64(c2[1089],simde_mm_xor_si64(c2[1389],simde_mm_xor_si64(c2[787],simde_mm_xor_si64(c2[1683],simde_mm_xor_si64(c2[1081],simde_mm_xor_si64(c2[1391],simde_mm_xor_si64(c2[489],simde_mm_xor_si64(c2[789],simde_mm_xor_si64(c2[1419],simde_mm_xor_si64(c2[817],simde_mm_xor_si64(c2[1411],simde_mm_xor_si64(c2[524],simde_mm_xor_si64(c2[824],simde_mm_xor_si64(c2[2320],simde_mm_xor_si64(c2[2047],simde_mm_xor_si64(c2[1445],simde_mm_xor_si64(c2[251],simde_mm_xor_si64(c2[2048],simde_mm_xor_si64(c2[252],simde_mm_xor_si64(c2[1749],simde_mm_xor_si64(c2[2049],simde_mm_xor_si64(c2[1475],simde_mm_xor_si64(c2[1775],simde_mm_xor_si64(c2[1173],simde_mm_xor_si64(c2[1482],simde_mm_xor_si64(c2[880],simde_mm_xor_si64(c2[271],simde_mm_xor_si64(c2[1783],c2[2083]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 8
-     d2[120]=_mm_xor_si64(c2[601],_mm_xor_si64(c2[901],_mm_xor_si64(c2[2100],_mm_xor_si64(c2[1],_mm_xor_si64(c2[1207],_mm_xor_si64(c2[7],_mm_xor_si64(c2[1244],_mm_xor_si64(c2[1544],_mm_xor_si64(c2[332],_mm_xor_si64(c2[343],_mm_xor_si64(c2[336],_mm_xor_si64(c2[67],_mm_xor_si64(c2[367],_mm_xor_si64(c2[1564],_mm_xor_si64(c2[1890],_mm_xor_si64(c2[2190],_mm_xor_si64(c2[2194],_mm_xor_si64(c2[95],_mm_xor_si64(c2[1603],_mm_xor_si64(c2[126],_mm_xor_si64(c2[426],_mm_xor_si64(c2[2224],_mm_xor_si64(c2[430],_mm_xor_si64(c2[1351],_mm_xor_si64(c2[1651],_mm_xor_si64(c2[162],_mm_xor_si64(c2[181],_mm_xor_si64(c2[481],_mm_xor_si64(c2[490],_mm_xor_si64(c2[790],_mm_xor_si64(c2[483],_mm_xor_si64(c2[211],_mm_xor_si64(c2[511],_mm_xor_si64(c2[518],_mm_xor_si64(c2[854],_mm_xor_si64(c2[1154],_mm_xor_si64(c2[1742],_mm_xor_si64(c2[1743],_mm_xor_si64(c2[582],_mm_xor_si64(c2[882],_mm_xor_si64(c2[274],_mm_xor_si64(c2[574],c2[1777]))))))))))))))))))))))))))))))))))))))))));
+     d2[120]=simde_mm_xor_si64(c2[601],simde_mm_xor_si64(c2[901],simde_mm_xor_si64(c2[2100],simde_mm_xor_si64(c2[1],simde_mm_xor_si64(c2[1207],simde_mm_xor_si64(c2[7],simde_mm_xor_si64(c2[1244],simde_mm_xor_si64(c2[1544],simde_mm_xor_si64(c2[332],simde_mm_xor_si64(c2[343],simde_mm_xor_si64(c2[336],simde_mm_xor_si64(c2[67],simde_mm_xor_si64(c2[367],simde_mm_xor_si64(c2[1564],simde_mm_xor_si64(c2[1890],simde_mm_xor_si64(c2[2190],simde_mm_xor_si64(c2[2194],simde_mm_xor_si64(c2[95],simde_mm_xor_si64(c2[1603],simde_mm_xor_si64(c2[126],simde_mm_xor_si64(c2[426],simde_mm_xor_si64(c2[2224],simde_mm_xor_si64(c2[430],simde_mm_xor_si64(c2[1351],simde_mm_xor_si64(c2[1651],simde_mm_xor_si64(c2[162],simde_mm_xor_si64(c2[181],simde_mm_xor_si64(c2[481],simde_mm_xor_si64(c2[490],simde_mm_xor_si64(c2[790],simde_mm_xor_si64(c2[483],simde_mm_xor_si64(c2[211],simde_mm_xor_si64(c2[511],simde_mm_xor_si64(c2[518],simde_mm_xor_si64(c2[854],simde_mm_xor_si64(c2[1154],simde_mm_xor_si64(c2[1742],simde_mm_xor_si64(c2[1743],simde_mm_xor_si64(c2[582],simde_mm_xor_si64(c2[882],simde_mm_xor_si64(c2[274],simde_mm_xor_si64(c2[574],c2[1777]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 9
-     d2[135]=_mm_xor_si64(c2[309],_mm_xor_si64(c2[1511],_mm_xor_si64(c2[1811],_mm_xor_si64(c2[1808],_mm_xor_si64(c2[911],_mm_xor_si64(c2[600],_mm_xor_si64(c2[2102],_mm_xor_si64(c2[937],_mm_xor_si64(c2[2139],_mm_xor_si64(c2[40],_mm_xor_si64(c2[2139],_mm_xor_si64(c2[1242],_mm_xor_si64(c2[2135],_mm_xor_si64(c2[1238],_mm_xor_si64(c2[2135],_mm_xor_si64(c2[2174],_mm_xor_si64(c2[962],_mm_xor_si64(c2[1262],_mm_xor_si64(c2[972],_mm_xor_si64(c2[60],_mm_xor_si64(c2[1598],_mm_xor_si64(c2[401],_mm_xor_si64(c2[701],_mm_xor_si64(c2[1902],_mm_xor_si64(c2[990],_mm_xor_si64(c2[996],_mm_xor_si64(c2[99],_mm_xor_si64(c2[2233],_mm_xor_si64(c2[1321],_mm_xor_si64(c2[1632],_mm_xor_si64(c2[720],_mm_xor_si64(c2[2222],_mm_xor_si64(c2[1325],_mm_xor_si64(c2[1059],_mm_xor_si64(c2[162],_mm_xor_si64(c2[1954],_mm_xor_si64(c2[1057],_mm_xor_si64(c2[2288],_mm_xor_si64(c2[1091],_mm_xor_si64(c2[1391],_mm_xor_si64(c2[183],_mm_xor_si64(c2[1685],_mm_xor_si64(c2[2290],_mm_xor_si64(c2[1393],_mm_xor_si64(c2[2318],_mm_xor_si64(c2[1421],_mm_xor_si64(c2[2310],_mm_xor_si64(c2[1413],_mm_xor_si64(c2[547],_mm_xor_si64(c2[2049],_mm_xor_si64(c2[1150],_mm_xor_si64(c2[253],_mm_xor_si64(c2[1151],_mm_xor_si64(c2[254],_mm_xor_si64(c2[2054],_mm_xor_si64(c2[275],_mm_xor_si64(c2[1477],_mm_xor_si64(c2[1777],_mm_xor_si64(c2[2381],_mm_xor_si64(c2[1484],_mm_xor_si64(c2[1170],c2[273])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[135]=simde_mm_xor_si64(c2[309],simde_mm_xor_si64(c2[1511],simde_mm_xor_si64(c2[1811],simde_mm_xor_si64(c2[1808],simde_mm_xor_si64(c2[911],simde_mm_xor_si64(c2[600],simde_mm_xor_si64(c2[2102],simde_mm_xor_si64(c2[937],simde_mm_xor_si64(c2[2139],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[2139],simde_mm_xor_si64(c2[1242],simde_mm_xor_si64(c2[2135],simde_mm_xor_si64(c2[1238],simde_mm_xor_si64(c2[2135],simde_mm_xor_si64(c2[2174],simde_mm_xor_si64(c2[962],simde_mm_xor_si64(c2[1262],simde_mm_xor_si64(c2[972],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[1598],simde_mm_xor_si64(c2[401],simde_mm_xor_si64(c2[701],simde_mm_xor_si64(c2[1902],simde_mm_xor_si64(c2[990],simde_mm_xor_si64(c2[996],simde_mm_xor_si64(c2[99],simde_mm_xor_si64(c2[2233],simde_mm_xor_si64(c2[1321],simde_mm_xor_si64(c2[1632],simde_mm_xor_si64(c2[720],simde_mm_xor_si64(c2[2222],simde_mm_xor_si64(c2[1325],simde_mm_xor_si64(c2[1059],simde_mm_xor_si64(c2[162],simde_mm_xor_si64(c2[1954],simde_mm_xor_si64(c2[1057],simde_mm_xor_si64(c2[2288],simde_mm_xor_si64(c2[1091],simde_mm_xor_si64(c2[1391],simde_mm_xor_si64(c2[183],simde_mm_xor_si64(c2[1685],simde_mm_xor_si64(c2[2290],simde_mm_xor_si64(c2[1393],simde_mm_xor_si64(c2[2318],simde_mm_xor_si64(c2[1421],simde_mm_xor_si64(c2[2310],simde_mm_xor_si64(c2[1413],simde_mm_xor_si64(c2[547],simde_mm_xor_si64(c2[2049],simde_mm_xor_si64(c2[1150],simde_mm_xor_si64(c2[253],simde_mm_xor_si64(c2[1151],simde_mm_xor_si64(c2[254],simde_mm_xor_si64(c2[2054],simde_mm_xor_si64(c2[275],simde_mm_xor_si64(c2[1477],simde_mm_xor_si64(c2[1777],simde_mm_xor_si64(c2[2381],simde_mm_xor_si64(c2[1484],simde_mm_xor_si64(c2[1170],c2[273])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 10
-     d2[150]=_mm_xor_si64(c2[1804],_mm_xor_si64(c2[342],_mm_xor_si64(c2[191],c2[224])));
+     d2[150]=simde_mm_xor_si64(c2[1804],simde_mm_xor_si64(c2[342],simde_mm_xor_si64(c2[191],c2[224])));
 
 //row: 11
-     d2[165]=_mm_xor_si64(c2[1509],_mm_xor_si64(c2[609],_mm_xor_si64(c2[1800],_mm_xor_si64(c2[607],_mm_xor_si64(c2[2137],_mm_xor_si64(c2[940],_mm_xor_si64(c2[636],_mm_xor_si64(c2[936],_mm_xor_si64(c2[960],_mm_xor_si64(c2[1872],_mm_xor_si64(c2[2172],_mm_xor_si64(c2[399],_mm_xor_si64(c2[703],_mm_xor_si64(c2[2196],_mm_xor_si64(c2[1034],_mm_xor_si64(c2[433],_mm_xor_si64(c2[723],_mm_xor_si64(c2[1023],_mm_xor_si64(c2[2259],_mm_xor_si64(c2[455],_mm_xor_si64(c2[755],_mm_xor_si64(c2[1089],_mm_xor_si64(c2[1383],_mm_xor_si64(c2[791],_mm_xor_si64(c2[1091],_mm_xor_si64(c2[1119],_mm_xor_si64(c2[811],_mm_xor_si64(c2[1111],_mm_xor_si64(c2[2021],_mm_xor_si64(c2[1747],_mm_xor_si64(c2[2350],_mm_xor_si64(c2[2051],_mm_xor_si64(c2[2351],_mm_xor_si64(c2[1475],_mm_xor_si64(c2[1182],_mm_xor_si64(c2[2070],_mm_xor_si64(c2[2370],c2[276])))))))))))))))))))))))))))))))))))));
+     d2[165]=simde_mm_xor_si64(c2[1509],simde_mm_xor_si64(c2[609],simde_mm_xor_si64(c2[1800],simde_mm_xor_si64(c2[607],simde_mm_xor_si64(c2[2137],simde_mm_xor_si64(c2[940],simde_mm_xor_si64(c2[636],simde_mm_xor_si64(c2[936],simde_mm_xor_si64(c2[960],simde_mm_xor_si64(c2[1872],simde_mm_xor_si64(c2[2172],simde_mm_xor_si64(c2[399],simde_mm_xor_si64(c2[703],simde_mm_xor_si64(c2[2196],simde_mm_xor_si64(c2[1034],simde_mm_xor_si64(c2[433],simde_mm_xor_si64(c2[723],simde_mm_xor_si64(c2[1023],simde_mm_xor_si64(c2[2259],simde_mm_xor_si64(c2[455],simde_mm_xor_si64(c2[755],simde_mm_xor_si64(c2[1089],simde_mm_xor_si64(c2[1383],simde_mm_xor_si64(c2[791],simde_mm_xor_si64(c2[1091],simde_mm_xor_si64(c2[1119],simde_mm_xor_si64(c2[811],simde_mm_xor_si64(c2[1111],simde_mm_xor_si64(c2[2021],simde_mm_xor_si64(c2[1747],simde_mm_xor_si64(c2[2350],simde_mm_xor_si64(c2[2051],simde_mm_xor_si64(c2[2351],simde_mm_xor_si64(c2[1475],simde_mm_xor_si64(c2[1182],simde_mm_xor_si64(c2[2070],simde_mm_xor_si64(c2[2370],c2[276])))))))))))))))))))))))))))))))))))));
 
 //row: 12
-     d2[180]=_mm_xor_si64(c2[312],_mm_xor_si64(c2[612],_mm_xor_si64(c2[2111],_mm_xor_si64(c2[903],_mm_xor_si64(c2[940],_mm_xor_si64(c2[1240],_mm_xor_si64(c2[43],_mm_xor_si64(c2[39],_mm_xor_si64(c2[1230],_mm_xor_si64(c2[2162],_mm_xor_si64(c2[63],_mm_xor_si64(c2[1260],_mm_xor_si64(c2[1601],_mm_xor_si64(c2[1901],_mm_xor_si64(c2[2190],_mm_xor_si64(c2[1299],_mm_xor_si64(c2[690],_mm_xor_si64(c2[122],_mm_xor_si64(c2[1920],_mm_xor_si64(c2[126],_mm_xor_si64(c2[1362],_mm_xor_si64(c2[2257],_mm_xor_si64(c2[2291],_mm_xor_si64(c2[192],_mm_xor_si64(c2[486],_mm_xor_si64(c2[194],_mm_xor_si64(c2[222],_mm_xor_si64(c2[214],_mm_xor_si64(c2[850],_mm_xor_si64(c2[1453],_mm_xor_si64(c2[1454],_mm_xor_si64(c2[278],_mm_xor_si64(c2[578],_mm_xor_si64(c2[270],c2[1473]))))))))))))))))))))))))))))))))));
+     d2[180]=simde_mm_xor_si64(c2[312],simde_mm_xor_si64(c2[612],simde_mm_xor_si64(c2[2111],simde_mm_xor_si64(c2[903],simde_mm_xor_si64(c2[940],simde_mm_xor_si64(c2[1240],simde_mm_xor_si64(c2[43],simde_mm_xor_si64(c2[39],simde_mm_xor_si64(c2[1230],simde_mm_xor_si64(c2[2162],simde_mm_xor_si64(c2[63],simde_mm_xor_si64(c2[1260],simde_mm_xor_si64(c2[1601],simde_mm_xor_si64(c2[1901],simde_mm_xor_si64(c2[2190],simde_mm_xor_si64(c2[1299],simde_mm_xor_si64(c2[690],simde_mm_xor_si64(c2[122],simde_mm_xor_si64(c2[1920],simde_mm_xor_si64(c2[126],simde_mm_xor_si64(c2[1362],simde_mm_xor_si64(c2[2257],simde_mm_xor_si64(c2[2291],simde_mm_xor_si64(c2[192],simde_mm_xor_si64(c2[486],simde_mm_xor_si64(c2[194],simde_mm_xor_si64(c2[222],simde_mm_xor_si64(c2[214],simde_mm_xor_si64(c2[850],simde_mm_xor_si64(c2[1453],simde_mm_xor_si64(c2[1454],simde_mm_xor_si64(c2[278],simde_mm_xor_si64(c2[578],simde_mm_xor_si64(c2[270],c2[1473]))))))))))))))))))))))))))))))))));
 
 //row: 13
-     d2[195]=_mm_xor_si64(c2[1500],_mm_xor_si64(c2[600],_mm_xor_si64(c2[1806],_mm_xor_si64(c2[6],_mm_xor_si64(c2[2143],_mm_xor_si64(c2[931],_mm_xor_si64(c2[642],_mm_xor_si64(c2[942],_mm_xor_si64(c2[337],_mm_xor_si64(c2[966],_mm_xor_si64(c2[1863],_mm_xor_si64(c2[2163],_mm_xor_si64(c2[390],_mm_xor_si64(c2[694],_mm_xor_si64(c2[2202],_mm_xor_si64(c2[1025],_mm_xor_si64(c2[424],_mm_xor_si64(c2[729],_mm_xor_si64(c2[1029],_mm_xor_si64(c2[2250],_mm_xor_si64(c2[461],_mm_xor_si64(c2[761],_mm_xor_si64(c2[1080],_mm_xor_si64(c2[1389],_mm_xor_si64(c2[782],_mm_xor_si64(c2[1082],_mm_xor_si64(c2[1110],_mm_xor_si64(c2[817],_mm_xor_si64(c2[1117],_mm_xor_si64(c2[1753],_mm_xor_si64(c2[2341],_mm_xor_si64(c2[2042],_mm_xor_si64(c2[2342],_mm_xor_si64(c2[2345],_mm_xor_si64(c2[1481],_mm_xor_si64(c2[1173],_mm_xor_si64(c2[2076],c2[2376])))))))))))))))))))))))))))))))))))));
+     d2[195]=simde_mm_xor_si64(c2[1500],simde_mm_xor_si64(c2[600],simde_mm_xor_si64(c2[1806],simde_mm_xor_si64(c2[6],simde_mm_xor_si64(c2[2143],simde_mm_xor_si64(c2[931],simde_mm_xor_si64(c2[642],simde_mm_xor_si64(c2[942],simde_mm_xor_si64(c2[337],simde_mm_xor_si64(c2[966],simde_mm_xor_si64(c2[1863],simde_mm_xor_si64(c2[2163],simde_mm_xor_si64(c2[390],simde_mm_xor_si64(c2[694],simde_mm_xor_si64(c2[2202],simde_mm_xor_si64(c2[1025],simde_mm_xor_si64(c2[424],simde_mm_xor_si64(c2[729],simde_mm_xor_si64(c2[1029],simde_mm_xor_si64(c2[2250],simde_mm_xor_si64(c2[461],simde_mm_xor_si64(c2[761],simde_mm_xor_si64(c2[1080],simde_mm_xor_si64(c2[1389],simde_mm_xor_si64(c2[782],simde_mm_xor_si64(c2[1082],simde_mm_xor_si64(c2[1110],simde_mm_xor_si64(c2[817],simde_mm_xor_si64(c2[1117],simde_mm_xor_si64(c2[1753],simde_mm_xor_si64(c2[2341],simde_mm_xor_si64(c2[2042],simde_mm_xor_si64(c2[2342],simde_mm_xor_si64(c2[2345],simde_mm_xor_si64(c2[1481],simde_mm_xor_si64(c2[1173],simde_mm_xor_si64(c2[2076],c2[2376])))))))))))))))))))))))))))))))))))));
 
 //row: 14
-     d2[210]=_mm_xor_si64(c2[1513],_mm_xor_si64(c2[1813],_mm_xor_si64(c2[1208],_mm_xor_si64(c2[913],_mm_xor_si64(c2[308],_mm_xor_si64(c2[2104],_mm_xor_si64(c2[1514],_mm_xor_si64(c2[2141],_mm_xor_si64(c2[42],_mm_xor_si64(c2[1836],_mm_xor_si64(c2[1244],_mm_xor_si64(c2[639],_mm_xor_si64(c2[1240],_mm_xor_si64(c2[335],_mm_xor_si64(c2[635],_mm_xor_si64(c2[640],_mm_xor_si64(c2[964],_mm_xor_si64(c2[1264],_mm_xor_si64(c2[674],_mm_xor_si64(c2[62],_mm_xor_si64(c2[1571],_mm_xor_si64(c2[1871],_mm_xor_si64(c2[403],_mm_xor_si64(c2[703],_mm_xor_si64(c2[98],_mm_xor_si64(c2[992],_mm_xor_si64(c2[402],_mm_xor_si64(c2[101],_mm_xor_si64(c2[1895],_mm_xor_si64(c2[1323],_mm_xor_si64(c2[733],_mm_xor_si64(c2[722],_mm_xor_si64(c2[132],_mm_xor_si64(c2[1327],_mm_xor_si64(c2[422],_mm_xor_si64(c2[722],_mm_xor_si64(c2[164],_mm_xor_si64(c2[1958],_mm_xor_si64(c2[1059],_mm_xor_si64(c2[154],_mm_xor_si64(c2[454],_mm_xor_si64(c2[1093],_mm_xor_si64(c2[1393],_mm_xor_si64(c2[788],_mm_xor_si64(c2[1687],_mm_xor_si64(c2[1082],_mm_xor_si64(c2[1380],_mm_xor_si64(c2[490],_mm_xor_si64(c2[790],_mm_xor_si64(c2[194],_mm_xor_si64(c2[1423],_mm_xor_si64(c2[818],_mm_xor_si64(c2[1415],_mm_xor_si64(c2[510],_mm_xor_si64(c2[810],_mm_xor_si64(c2[2051],_mm_xor_si64(c2[1446],_mm_xor_si64(c2[240],_mm_xor_si64(c2[2049],_mm_xor_si64(c2[241],_mm_xor_si64(c2[1750],_mm_xor_si64(c2[2050],_mm_xor_si64(c2[1479],_mm_xor_si64(c2[1779],_mm_xor_si64(c2[1174],_mm_xor_si64(c2[1471],_mm_xor_si64(c2[881],_mm_xor_si64(c2[275],_mm_xor_si64(c2[1784],c2[2084])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[210]=simde_mm_xor_si64(c2[1513],simde_mm_xor_si64(c2[1813],simde_mm_xor_si64(c2[1208],simde_mm_xor_si64(c2[913],simde_mm_xor_si64(c2[308],simde_mm_xor_si64(c2[2104],simde_mm_xor_si64(c2[1514],simde_mm_xor_si64(c2[2141],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[1836],simde_mm_xor_si64(c2[1244],simde_mm_xor_si64(c2[639],simde_mm_xor_si64(c2[1240],simde_mm_xor_si64(c2[335],simde_mm_xor_si64(c2[635],simde_mm_xor_si64(c2[640],simde_mm_xor_si64(c2[964],simde_mm_xor_si64(c2[1264],simde_mm_xor_si64(c2[674],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[1571],simde_mm_xor_si64(c2[1871],simde_mm_xor_si64(c2[403],simde_mm_xor_si64(c2[703],simde_mm_xor_si64(c2[98],simde_mm_xor_si64(c2[992],simde_mm_xor_si64(c2[402],simde_mm_xor_si64(c2[101],simde_mm_xor_si64(c2[1895],simde_mm_xor_si64(c2[1323],simde_mm_xor_si64(c2[733],simde_mm_xor_si64(c2[722],simde_mm_xor_si64(c2[132],simde_mm_xor_si64(c2[1327],simde_mm_xor_si64(c2[422],simde_mm_xor_si64(c2[722],simde_mm_xor_si64(c2[164],simde_mm_xor_si64(c2[1958],simde_mm_xor_si64(c2[1059],simde_mm_xor_si64(c2[154],simde_mm_xor_si64(c2[454],simde_mm_xor_si64(c2[1093],simde_mm_xor_si64(c2[1393],simde_mm_xor_si64(c2[788],simde_mm_xor_si64(c2[1687],simde_mm_xor_si64(c2[1082],simde_mm_xor_si64(c2[1380],simde_mm_xor_si64(c2[490],simde_mm_xor_si64(c2[790],simde_mm_xor_si64(c2[194],simde_mm_xor_si64(c2[1423],simde_mm_xor_si64(c2[818],simde_mm_xor_si64(c2[1415],simde_mm_xor_si64(c2[510],simde_mm_xor_si64(c2[810],simde_mm_xor_si64(c2[2051],simde_mm_xor_si64(c2[1446],simde_mm_xor_si64(c2[240],simde_mm_xor_si64(c2[2049],simde_mm_xor_si64(c2[241],simde_mm_xor_si64(c2[1750],simde_mm_xor_si64(c2[2050],simde_mm_xor_si64(c2[1479],simde_mm_xor_si64(c2[1779],simde_mm_xor_si64(c2[1174],simde_mm_xor_si64(c2[1471],simde_mm_xor_si64(c2[881],simde_mm_xor_si64(c2[275],simde_mm_xor_si64(c2[1784],c2[2084])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 15
-     d2[225]=_mm_xor_si64(c2[600],_mm_xor_si64(c2[1807],_mm_xor_si64(c2[2107],_mm_xor_si64(c2[2114],_mm_xor_si64(c2[1207],_mm_xor_si64(c2[906],_mm_xor_si64(c2[14],_mm_xor_si64(c2[1804],_mm_xor_si64(c2[1243],_mm_xor_si64(c2[36],_mm_xor_si64(c2[336],_mm_xor_si64(c2[31],_mm_xor_si64(c2[1538],_mm_xor_si64(c2[42],_mm_xor_si64(c2[1534],_mm_xor_si64(c2[66],_mm_xor_si64(c2[1273],_mm_xor_si64(c2[1573],_mm_xor_si64(c2[1263],_mm_xor_si64(c2[371],_mm_xor_si64(c2[1904],_mm_xor_si64(c2[697],_mm_xor_si64(c2[997],_mm_xor_si64(c2[2193],_mm_xor_si64(c2[1301],_mm_xor_si64(c2[1302],_mm_xor_si64(c2[395],_mm_xor_si64(c2[125],_mm_xor_si64(c2[1632],_mm_xor_si64(c2[1923],_mm_xor_si64(c2[1031],_mm_xor_si64(c2[129],_mm_xor_si64(c2[1621],_mm_xor_si64(c2[1350],_mm_xor_si64(c2[458],_mm_xor_si64(c2[2260],_mm_xor_si64(c2[1353],_mm_xor_si64(c2[180],_mm_xor_si64(c2[1387],_mm_xor_si64(c2[1687],_mm_xor_si64(c2[489],_mm_xor_si64(c2[1981],_mm_xor_si64(c2[182],_mm_xor_si64(c2[1689],_mm_xor_si64(c2[210],_mm_xor_si64(c2[1717],_mm_xor_si64(c2[217],_mm_xor_si64(c2[1724],_mm_xor_si64(c2[853],_mm_xor_si64(c2[2345],_mm_xor_si64(c2[1441],_mm_xor_si64(c2[549],_mm_xor_si64(c2[1442],_mm_xor_si64(c2[550],_mm_xor_si64(c2[581],_mm_xor_si64(c2[1773],_mm_xor_si64(c2[2073],_mm_xor_si64(c2[273],_mm_xor_si64(c2[1780],_mm_xor_si64(c2[1476],c2[584]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[225]=simde_mm_xor_si64(c2[600],simde_mm_xor_si64(c2[1807],simde_mm_xor_si64(c2[2107],simde_mm_xor_si64(c2[2114],simde_mm_xor_si64(c2[1207],simde_mm_xor_si64(c2[906],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[1804],simde_mm_xor_si64(c2[1243],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[336],simde_mm_xor_si64(c2[31],simde_mm_xor_si64(c2[1538],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[1534],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[1273],simde_mm_xor_si64(c2[1573],simde_mm_xor_si64(c2[1263],simde_mm_xor_si64(c2[371],simde_mm_xor_si64(c2[1904],simde_mm_xor_si64(c2[697],simde_mm_xor_si64(c2[997],simde_mm_xor_si64(c2[2193],simde_mm_xor_si64(c2[1301],simde_mm_xor_si64(c2[1302],simde_mm_xor_si64(c2[395],simde_mm_xor_si64(c2[125],simde_mm_xor_si64(c2[1632],simde_mm_xor_si64(c2[1923],simde_mm_xor_si64(c2[1031],simde_mm_xor_si64(c2[129],simde_mm_xor_si64(c2[1621],simde_mm_xor_si64(c2[1350],simde_mm_xor_si64(c2[458],simde_mm_xor_si64(c2[2260],simde_mm_xor_si64(c2[1353],simde_mm_xor_si64(c2[180],simde_mm_xor_si64(c2[1387],simde_mm_xor_si64(c2[1687],simde_mm_xor_si64(c2[489],simde_mm_xor_si64(c2[1981],simde_mm_xor_si64(c2[182],simde_mm_xor_si64(c2[1689],simde_mm_xor_si64(c2[210],simde_mm_xor_si64(c2[1717],simde_mm_xor_si64(c2[217],simde_mm_xor_si64(c2[1724],simde_mm_xor_si64(c2[853],simde_mm_xor_si64(c2[2345],simde_mm_xor_si64(c2[1441],simde_mm_xor_si64(c2[549],simde_mm_xor_si64(c2[1442],simde_mm_xor_si64(c2[550],simde_mm_xor_si64(c2[581],simde_mm_xor_si64(c2[1773],simde_mm_xor_si64(c2[2073],simde_mm_xor_si64(c2[273],simde_mm_xor_si64(c2[1780],simde_mm_xor_si64(c2[1476],c2[584]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 16
-     d2[240]=_mm_xor_si64(c2[10],_mm_xor_si64(c2[310],_mm_xor_si64(c2[913],_mm_xor_si64(c2[1213],_mm_xor_si64(c2[1809],_mm_xor_si64(c2[13],_mm_xor_si64(c2[313],_mm_xor_si64(c2[601],_mm_xor_si64(c2[1504],_mm_xor_si64(c2[638],_mm_xor_si64(c2[938],_mm_xor_si64(c2[1541],_mm_xor_si64(c2[1841],_mm_xor_si64(c2[2140],_mm_xor_si64(c2[644],_mm_xor_si64(c2[2136],_mm_xor_si64(c2[640],_mm_xor_si64(c2[636],_mm_xor_si64(c2[1860],_mm_xor_si64(c2[2160],_mm_xor_si64(c2[364],_mm_xor_si64(c2[664],_mm_xor_si64(c2[973],_mm_xor_si64(c2[1861],_mm_xor_si64(c2[1299],_mm_xor_si64(c2[1599],_mm_xor_si64(c2[2202],_mm_xor_si64(c2[103],_mm_xor_si64(c2[1903],_mm_xor_si64(c2[92],_mm_xor_si64(c2[392],_mm_xor_si64(c2[997],_mm_xor_si64(c2[1900],_mm_xor_si64(c2[2234],_mm_xor_si64(c2[423],_mm_xor_si64(c2[723],_mm_xor_si64(c2[1633],_mm_xor_si64(c2[122],_mm_xor_si64(c2[2223],_mm_xor_si64(c2[727],_mm_xor_si64(c2[1060],_mm_xor_si64(c2[1663],_mm_xor_si64(c2[1963],_mm_xor_si64(c2[1955],_mm_xor_si64(c2[459],_mm_xor_si64(c2[1989],_mm_xor_si64(c2[2289],_mm_xor_si64(c2[493],_mm_xor_si64(c2[793],_mm_xor_si64(c2[184],_mm_xor_si64(c2[787],_mm_xor_si64(c2[1087],_mm_xor_si64(c2[2291],_mm_xor_si64(c2[780],_mm_xor_si64(c2[2319],_mm_xor_si64(c2[523],_mm_xor_si64(c2[823],_mm_xor_si64(c2[2311],_mm_xor_si64(c2[815],_mm_xor_si64(c2[548],_mm_xor_si64(c2[1151],_mm_xor_si64(c2[1451],_mm_xor_si64(c2[1151],_mm_xor_si64(c2[2054],_mm_xor_si64(c2[1152],_mm_xor_si64(c2[2040],_mm_xor_si64(c2[2375],_mm_xor_si64(c2[276],_mm_xor_si64(c2[879],_mm_xor_si64(c2[1179],_mm_xor_si64(c2[2382],_mm_xor_si64(c2[571],_mm_xor_si64(c2[871],_mm_xor_si64(c2[1171],_mm_xor_si64(c2[2074],c2[2372])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[240]=simde_mm_xor_si64(c2[10],simde_mm_xor_si64(c2[310],simde_mm_xor_si64(c2[913],simde_mm_xor_si64(c2[1213],simde_mm_xor_si64(c2[1809],simde_mm_xor_si64(c2[13],simde_mm_xor_si64(c2[313],simde_mm_xor_si64(c2[601],simde_mm_xor_si64(c2[1504],simde_mm_xor_si64(c2[638],simde_mm_xor_si64(c2[938],simde_mm_xor_si64(c2[1541],simde_mm_xor_si64(c2[1841],simde_mm_xor_si64(c2[2140],simde_mm_xor_si64(c2[644],simde_mm_xor_si64(c2[2136],simde_mm_xor_si64(c2[640],simde_mm_xor_si64(c2[636],simde_mm_xor_si64(c2[1860],simde_mm_xor_si64(c2[2160],simde_mm_xor_si64(c2[364],simde_mm_xor_si64(c2[664],simde_mm_xor_si64(c2[973],simde_mm_xor_si64(c2[1861],simde_mm_xor_si64(c2[1299],simde_mm_xor_si64(c2[1599],simde_mm_xor_si64(c2[2202],simde_mm_xor_si64(c2[103],simde_mm_xor_si64(c2[1903],simde_mm_xor_si64(c2[92],simde_mm_xor_si64(c2[392],simde_mm_xor_si64(c2[997],simde_mm_xor_si64(c2[1900],simde_mm_xor_si64(c2[2234],simde_mm_xor_si64(c2[423],simde_mm_xor_si64(c2[723],simde_mm_xor_si64(c2[1633],simde_mm_xor_si64(c2[122],simde_mm_xor_si64(c2[2223],simde_mm_xor_si64(c2[727],simde_mm_xor_si64(c2[1060],simde_mm_xor_si64(c2[1663],simde_mm_xor_si64(c2[1963],simde_mm_xor_si64(c2[1955],simde_mm_xor_si64(c2[459],simde_mm_xor_si64(c2[1989],simde_mm_xor_si64(c2[2289],simde_mm_xor_si64(c2[493],simde_mm_xor_si64(c2[793],simde_mm_xor_si64(c2[184],simde_mm_xor_si64(c2[787],simde_mm_xor_si64(c2[1087],simde_mm_xor_si64(c2[2291],simde_mm_xor_si64(c2[780],simde_mm_xor_si64(c2[2319],simde_mm_xor_si64(c2[523],simde_mm_xor_si64(c2[823],simde_mm_xor_si64(c2[2311],simde_mm_xor_si64(c2[815],simde_mm_xor_si64(c2[548],simde_mm_xor_si64(c2[1151],simde_mm_xor_si64(c2[1451],simde_mm_xor_si64(c2[1151],simde_mm_xor_si64(c2[2054],simde_mm_xor_si64(c2[1152],simde_mm_xor_si64(c2[2040],simde_mm_xor_si64(c2[2375],simde_mm_xor_si64(c2[276],simde_mm_xor_si64(c2[879],simde_mm_xor_si64(c2[1179],simde_mm_xor_si64(c2[2382],simde_mm_xor_si64(c2[571],simde_mm_xor_si64(c2[871],simde_mm_xor_si64(c2[1171],simde_mm_xor_si64(c2[2074],c2[2372])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 17
-     d2[255]=_mm_xor_si64(c2[1212],_mm_xor_si64(c2[1512],_mm_xor_si64(c2[900],_mm_xor_si64(c2[1200],_mm_xor_si64(c2[612],_mm_xor_si64(c2[0],_mm_xor_si64(c2[300],_mm_xor_si64(c2[1803],_mm_xor_si64(c2[1506],_mm_xor_si64(c2[1840],_mm_xor_si64(c2[2140],_mm_xor_si64(c2[1543],_mm_xor_si64(c2[1843],_mm_xor_si64(c2[943],_mm_xor_si64(c2[631],_mm_xor_si64(c2[939],_mm_xor_si64(c2[642],_mm_xor_si64(c2[1239],_mm_xor_si64(c2[663],_mm_xor_si64(c2[963],_mm_xor_si64(c2[366],_mm_xor_si64(c2[666],_mm_xor_si64(c2[2160],_mm_xor_si64(c2[1863],_mm_xor_si64(c2[102],_mm_xor_si64(c2[402],_mm_xor_si64(c2[2204],_mm_xor_si64(c2[90],_mm_xor_si64(c2[691],_mm_xor_si64(c2[94],_mm_xor_si64(c2[394],_mm_xor_si64(c2[2199],_mm_xor_si64(c2[1902],_mm_xor_si64(c2[1022],_mm_xor_si64(c2[425],_mm_xor_si64(c2[725],_mm_xor_si64(c2[421],_mm_xor_si64(c2[124],_mm_xor_si64(c2[1026],_mm_xor_si64(c2[729],_mm_xor_si64(c2[2262],_mm_xor_si64(c2[1650],_mm_xor_si64(c2[1950],_mm_xor_si64(c2[758],_mm_xor_si64(c2[461],_mm_xor_si64(c2[1656],_mm_xor_si64(c2[792],_mm_xor_si64(c2[1092],_mm_xor_si64(c2[480],_mm_xor_si64(c2[780],_mm_xor_si64(c2[1386],_mm_xor_si64(c2[789],_mm_xor_si64(c2[1089],_mm_xor_si64(c2[1094],_mm_xor_si64(c2[782],_mm_xor_si64(c2[1122],_mm_xor_si64(c2[510],_mm_xor_si64(c2[810],_mm_xor_si64(c2[1114],_mm_xor_si64(c2[817],_mm_xor_si64(c2[1750],_mm_xor_si64(c2[1153],_mm_xor_si64(c2[1453],_mm_xor_si64(c2[2353],_mm_xor_si64(c2[2041],_mm_xor_si64(c2[2354],_mm_xor_si64(c2[2042],_mm_xor_si64(c2[1178],_mm_xor_si64(c2[1478],_mm_xor_si64(c2[881],_mm_xor_si64(c2[1181],_mm_xor_si64(c2[1170],_mm_xor_si64(c2[573],_mm_xor_si64(c2[873],_mm_xor_si64(c2[2373],c2[2076])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[255]=simde_mm_xor_si64(c2[1212],simde_mm_xor_si64(c2[1512],simde_mm_xor_si64(c2[900],simde_mm_xor_si64(c2[1200],simde_mm_xor_si64(c2[612],simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[300],simde_mm_xor_si64(c2[1803],simde_mm_xor_si64(c2[1506],simde_mm_xor_si64(c2[1840],simde_mm_xor_si64(c2[2140],simde_mm_xor_si64(c2[1543],simde_mm_xor_si64(c2[1843],simde_mm_xor_si64(c2[943],simde_mm_xor_si64(c2[631],simde_mm_xor_si64(c2[939],simde_mm_xor_si64(c2[642],simde_mm_xor_si64(c2[1239],simde_mm_xor_si64(c2[663],simde_mm_xor_si64(c2[963],simde_mm_xor_si64(c2[366],simde_mm_xor_si64(c2[666],simde_mm_xor_si64(c2[2160],simde_mm_xor_si64(c2[1863],simde_mm_xor_si64(c2[102],simde_mm_xor_si64(c2[402],simde_mm_xor_si64(c2[2204],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[691],simde_mm_xor_si64(c2[94],simde_mm_xor_si64(c2[394],simde_mm_xor_si64(c2[2199],simde_mm_xor_si64(c2[1902],simde_mm_xor_si64(c2[1022],simde_mm_xor_si64(c2[425],simde_mm_xor_si64(c2[725],simde_mm_xor_si64(c2[421],simde_mm_xor_si64(c2[124],simde_mm_xor_si64(c2[1026],simde_mm_xor_si64(c2[729],simde_mm_xor_si64(c2[2262],simde_mm_xor_si64(c2[1650],simde_mm_xor_si64(c2[1950],simde_mm_xor_si64(c2[758],simde_mm_xor_si64(c2[461],simde_mm_xor_si64(c2[1656],simde_mm_xor_si64(c2[792],simde_mm_xor_si64(c2[1092],simde_mm_xor_si64(c2[480],simde_mm_xor_si64(c2[780],simde_mm_xor_si64(c2[1386],simde_mm_xor_si64(c2[789],simde_mm_xor_si64(c2[1089],simde_mm_xor_si64(c2[1094],simde_mm_xor_si64(c2[782],simde_mm_xor_si64(c2[1122],simde_mm_xor_si64(c2[510],simde_mm_xor_si64(c2[810],simde_mm_xor_si64(c2[1114],simde_mm_xor_si64(c2[817],simde_mm_xor_si64(c2[1750],simde_mm_xor_si64(c2[1153],simde_mm_xor_si64(c2[1453],simde_mm_xor_si64(c2[2353],simde_mm_xor_si64(c2[2041],simde_mm_xor_si64(c2[2354],simde_mm_xor_si64(c2[2042],simde_mm_xor_si64(c2[1178],simde_mm_xor_si64(c2[1478],simde_mm_xor_si64(c2[881],simde_mm_xor_si64(c2[1181],simde_mm_xor_si64(c2[1170],simde_mm_xor_si64(c2[573],simde_mm_xor_si64(c2[873],simde_mm_xor_si64(c2[2373],c2[2076])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 18
-     d2[270]=_mm_xor_si64(c2[1],_mm_xor_si64(c2[1091],c2[821]));
+     d2[270]=simde_mm_xor_si64(c2[1],simde_mm_xor_si64(c2[1091],c2[821]));
 
 //row: 19
-     d2[285]=_mm_xor_si64(c2[314],_mm_xor_si64(c2[1813],_mm_xor_si64(c2[605],_mm_xor_si64(c2[2104],_mm_xor_si64(c2[942],_mm_xor_si64(c2[2144],_mm_xor_si64(c2[2140],_mm_xor_si64(c2[1240],_mm_xor_si64(c2[2164],_mm_xor_si64(c2[962],_mm_xor_si64(c2[1603],_mm_xor_si64(c2[1892],_mm_xor_si64(c2[1001],_mm_xor_si64(c2[2223],_mm_xor_si64(c2[1622],_mm_xor_si64(c2[2227],_mm_xor_si64(c2[1064],_mm_xor_si64(c2[1959],_mm_xor_si64(c2[2293],_mm_xor_si64(c2[188],_mm_xor_si64(c2[2280],_mm_xor_si64(c2[2323],_mm_xor_si64(c2[2315],_mm_xor_si64(c2[552],_mm_xor_si64(c2[1140],_mm_xor_si64(c2[1141],_mm_xor_si64(c2[280],_mm_xor_si64(c2[2371],c2[1175]))))))))))))))))))))))))))));
+     d2[285]=simde_mm_xor_si64(c2[314],simde_mm_xor_si64(c2[1813],simde_mm_xor_si64(c2[605],simde_mm_xor_si64(c2[2104],simde_mm_xor_si64(c2[942],simde_mm_xor_si64(c2[2144],simde_mm_xor_si64(c2[2140],simde_mm_xor_si64(c2[1240],simde_mm_xor_si64(c2[2164],simde_mm_xor_si64(c2[962],simde_mm_xor_si64(c2[1603],simde_mm_xor_si64(c2[1892],simde_mm_xor_si64(c2[1001],simde_mm_xor_si64(c2[2223],simde_mm_xor_si64(c2[1622],simde_mm_xor_si64(c2[2227],simde_mm_xor_si64(c2[1064],simde_mm_xor_si64(c2[1959],simde_mm_xor_si64(c2[2293],simde_mm_xor_si64(c2[188],simde_mm_xor_si64(c2[2280],simde_mm_xor_si64(c2[2323],simde_mm_xor_si64(c2[2315],simde_mm_xor_si64(c2[552],simde_mm_xor_si64(c2[1140],simde_mm_xor_si64(c2[1141],simde_mm_xor_si64(c2[280],simde_mm_xor_si64(c2[2371],c2[1175]))))))))))))))))))))))))))));
 
 //row: 20
-     d2[300]=_mm_xor_si64(c2[2103],_mm_xor_si64(c2[4],_mm_xor_si64(c2[1503],_mm_xor_si64(c2[310],_mm_xor_si64(c2[332],_mm_xor_si64(c2[632],_mm_xor_si64(c2[1834],_mm_xor_si64(c2[1830],_mm_xor_si64(c2[1544],_mm_xor_si64(c2[1569],_mm_xor_si64(c2[1869],_mm_xor_si64(c2[667],_mm_xor_si64(c2[993],_mm_xor_si64(c2[1293],_mm_xor_si64(c2[1597],_mm_xor_si64(c2[691],_mm_xor_si64(c2[1928],_mm_xor_si64(c2[1327],_mm_xor_si64(c2[1932],_mm_xor_si64(c2[1033],_mm_xor_si64(c2[754],_mm_xor_si64(c2[1664],_mm_xor_si64(c2[1683],_mm_xor_si64(c2[1983],_mm_xor_si64(c2[2292],_mm_xor_si64(c2[1985],_mm_xor_si64(c2[2013],_mm_xor_si64(c2[2020],_mm_xor_si64(c2[242],_mm_xor_si64(c2[845],_mm_xor_si64(c2[846],_mm_xor_si64(c2[2084],_mm_xor_si64(c2[2384],_mm_xor_si64(c2[2076],c2[880]))))))))))))))))))))))))))))))))));
+     d2[300]=simde_mm_xor_si64(c2[2103],simde_mm_xor_si64(c2[4],simde_mm_xor_si64(c2[1503],simde_mm_xor_si64(c2[310],simde_mm_xor_si64(c2[332],simde_mm_xor_si64(c2[632],simde_mm_xor_si64(c2[1834],simde_mm_xor_si64(c2[1830],simde_mm_xor_si64(c2[1544],simde_mm_xor_si64(c2[1569],simde_mm_xor_si64(c2[1869],simde_mm_xor_si64(c2[667],simde_mm_xor_si64(c2[993],simde_mm_xor_si64(c2[1293],simde_mm_xor_si64(c2[1597],simde_mm_xor_si64(c2[691],simde_mm_xor_si64(c2[1928],simde_mm_xor_si64(c2[1327],simde_mm_xor_si64(c2[1932],simde_mm_xor_si64(c2[1033],simde_mm_xor_si64(c2[754],simde_mm_xor_si64(c2[1664],simde_mm_xor_si64(c2[1683],simde_mm_xor_si64(c2[1983],simde_mm_xor_si64(c2[2292],simde_mm_xor_si64(c2[1985],simde_mm_xor_si64(c2[2013],simde_mm_xor_si64(c2[2020],simde_mm_xor_si64(c2[242],simde_mm_xor_si64(c2[845],simde_mm_xor_si64(c2[846],simde_mm_xor_si64(c2[2084],simde_mm_xor_si64(c2[2384],simde_mm_xor_si64(c2[2076],c2[880]))))))))))))))))))))))))))))))))));
 
 //row: 21
-     d2[315]=_mm_xor_si64(c2[1504],_mm_xor_si64(c2[604],_mm_xor_si64(c2[1810],_mm_xor_si64(c2[1814],_mm_xor_si64(c2[2132],_mm_xor_si64(c2[935],_mm_xor_si64(c2[631],_mm_xor_si64(c2[931],_mm_xor_si64(c2[970],_mm_xor_si64(c2[1867],_mm_xor_si64(c2[2167],_mm_xor_si64(c2[394],_mm_xor_si64(c2[698],_mm_xor_si64(c2[2191],_mm_xor_si64(c2[1029],_mm_xor_si64(c2[428],_mm_xor_si64(c2[733],_mm_xor_si64(c2[1033],_mm_xor_si64(c2[2254],_mm_xor_si64(c2[450],_mm_xor_si64(c2[750],_mm_xor_si64(c2[1084],_mm_xor_si64(c2[1393],_mm_xor_si64(c2[786],_mm_xor_si64(c2[1086],_mm_xor_si64(c2[1114],_mm_xor_si64(c2[821],_mm_xor_si64(c2[1121],_mm_xor_si64(c2[1742],_mm_xor_si64(c2[2345],_mm_xor_si64(c2[2046],_mm_xor_si64(c2[2346],_mm_xor_si64(c2[1741],_mm_xor_si64(c2[1470],_mm_xor_si64(c2[1177],_mm_xor_si64(c2[2080],c2[2380]))))))))))))))))))))))))))))))))))));
+     d2[315]=simde_mm_xor_si64(c2[1504],simde_mm_xor_si64(c2[604],simde_mm_xor_si64(c2[1810],simde_mm_xor_si64(c2[1814],simde_mm_xor_si64(c2[2132],simde_mm_xor_si64(c2[935],simde_mm_xor_si64(c2[631],simde_mm_xor_si64(c2[931],simde_mm_xor_si64(c2[970],simde_mm_xor_si64(c2[1867],simde_mm_xor_si64(c2[2167],simde_mm_xor_si64(c2[394],simde_mm_xor_si64(c2[698],simde_mm_xor_si64(c2[2191],simde_mm_xor_si64(c2[1029],simde_mm_xor_si64(c2[428],simde_mm_xor_si64(c2[733],simde_mm_xor_si64(c2[1033],simde_mm_xor_si64(c2[2254],simde_mm_xor_si64(c2[450],simde_mm_xor_si64(c2[750],simde_mm_xor_si64(c2[1084],simde_mm_xor_si64(c2[1393],simde_mm_xor_si64(c2[786],simde_mm_xor_si64(c2[1086],simde_mm_xor_si64(c2[1114],simde_mm_xor_si64(c2[821],simde_mm_xor_si64(c2[1121],simde_mm_xor_si64(c2[1742],simde_mm_xor_si64(c2[2345],simde_mm_xor_si64(c2[2046],simde_mm_xor_si64(c2[2346],simde_mm_xor_si64(c2[1741],simde_mm_xor_si64(c2[1470],simde_mm_xor_si64(c2[1177],simde_mm_xor_si64(c2[2080],c2[2380]))))))))))))))))))))))))))))))))))));
 
 //row: 22
-     d2[330]=_mm_xor_si64(c2[939],c2[1265]);
+     d2[330]=simde_mm_xor_si64(c2[939],c2[1265]);
 
 //row: 23
-     d2[345]=_mm_xor_si64(c2[1500],_mm_xor_si64(c2[1901],c2[2263]));
+     d2[345]=simde_mm_xor_si64(c2[1500],simde_mm_xor_si64(c2[1901],c2[2263]));
 
 //row: 24
-     d2[360]=_mm_xor_si64(c2[340],_mm_xor_si64(c2[962],c2[871]));
+     d2[360]=simde_mm_xor_si64(c2[340],simde_mm_xor_si64(c2[962],c2[871]));
 
 //row: 25
-     d2[375]=_mm_xor_si64(c2[608],c2[2261]);
+     d2[375]=simde_mm_xor_si64(c2[608],c2[2261]);
 
 //row: 26
-     d2[390]=_mm_xor_si64(c2[2111],_mm_xor_si64(c2[12],_mm_xor_si64(c2[14],_mm_xor_si64(c2[1211],_mm_xor_si64(c2[1511],_mm_xor_si64(c2[1513],_mm_xor_si64(c2[303],_mm_xor_si64(c2[305],_mm_xor_si64(c2[340],_mm_xor_si64(c2[640],_mm_xor_si64(c2[642],_mm_xor_si64(c2[1842],_mm_xor_si64(c2[1844],_mm_xor_si64(c2[1838],_mm_xor_si64(c2[1540],_mm_xor_si64(c2[1840],_mm_xor_si64(c2[1562],_mm_xor_si64(c2[1862],_mm_xor_si64(c2[1864],_mm_xor_si64(c2[660],_mm_xor_si64(c2[362],_mm_xor_si64(c2[662],_mm_xor_si64(c2[663],_mm_xor_si64(c2[1001],_mm_xor_si64(c2[1301],_mm_xor_si64(c2[1303],_mm_xor_si64(c2[1290],_mm_xor_si64(c2[1590],_mm_xor_si64(c2[1592],_mm_xor_si64(c2[699],_mm_xor_si64(c2[701],_mm_xor_si64(c2[1621],_mm_xor_si64(c2[1921],_mm_xor_si64(c2[1923],_mm_xor_si64(c2[1320],_mm_xor_si64(c2[1322],_mm_xor_si64(c2[1925],_mm_xor_si64(c2[1627],_mm_xor_si64(c2[1927],_mm_xor_si64(c2[462],_mm_xor_si64(c2[762],_mm_xor_si64(c2[764],_mm_xor_si64(c2[1657],_mm_xor_si64(c2[1359],_mm_xor_si64(c2[1659],_mm_xor_si64(c2[1691],_mm_xor_si64(c2[1991],_mm_xor_si64(c2[1993],_mm_xor_si64(c2[1985],_mm_xor_si64(c2[2285],_mm_xor_si64(c2[2287],_mm_xor_si64(c2[1993],_mm_xor_si64(c2[1680],_mm_xor_si64(c2[1980],_mm_xor_si64(c2[1721],_mm_xor_si64(c2[2021],_mm_xor_si64(c2[2023],_mm_xor_si64(c2[2013],_mm_xor_si64(c2[1715],_mm_xor_si64(c2[2015],_mm_xor_si64(c2[818],_mm_xor_si64(c2[2349],_mm_xor_si64(c2[250],_mm_xor_si64(c2[252],_mm_xor_si64(c2[853],_mm_xor_si64(c2[840],_mm_xor_si64(c2[854],_mm_xor_si64(c2[541],_mm_xor_si64(c2[841],_mm_xor_si64(c2[2077],_mm_xor_si64(c2[2377],_mm_xor_si64(c2[2379],_mm_xor_si64(c2[1784],_mm_xor_si64(c2[2084],_mm_xor_si64(c2[2071],_mm_xor_si64(c2[873],_mm_xor_si64(c2[575],c2[875])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[390]=simde_mm_xor_si64(c2[2111],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[1211],simde_mm_xor_si64(c2[1511],simde_mm_xor_si64(c2[1513],simde_mm_xor_si64(c2[303],simde_mm_xor_si64(c2[305],simde_mm_xor_si64(c2[340],simde_mm_xor_si64(c2[640],simde_mm_xor_si64(c2[642],simde_mm_xor_si64(c2[1842],simde_mm_xor_si64(c2[1844],simde_mm_xor_si64(c2[1838],simde_mm_xor_si64(c2[1540],simde_mm_xor_si64(c2[1840],simde_mm_xor_si64(c2[1562],simde_mm_xor_si64(c2[1862],simde_mm_xor_si64(c2[1864],simde_mm_xor_si64(c2[660],simde_mm_xor_si64(c2[362],simde_mm_xor_si64(c2[662],simde_mm_xor_si64(c2[663],simde_mm_xor_si64(c2[1001],simde_mm_xor_si64(c2[1301],simde_mm_xor_si64(c2[1303],simde_mm_xor_si64(c2[1290],simde_mm_xor_si64(c2[1590],simde_mm_xor_si64(c2[1592],simde_mm_xor_si64(c2[699],simde_mm_xor_si64(c2[701],simde_mm_xor_si64(c2[1621],simde_mm_xor_si64(c2[1921],simde_mm_xor_si64(c2[1923],simde_mm_xor_si64(c2[1320],simde_mm_xor_si64(c2[1322],simde_mm_xor_si64(c2[1925],simde_mm_xor_si64(c2[1627],simde_mm_xor_si64(c2[1927],simde_mm_xor_si64(c2[462],simde_mm_xor_si64(c2[762],simde_mm_xor_si64(c2[764],simde_mm_xor_si64(c2[1657],simde_mm_xor_si64(c2[1359],simde_mm_xor_si64(c2[1659],simde_mm_xor_si64(c2[1691],simde_mm_xor_si64(c2[1991],simde_mm_xor_si64(c2[1993],simde_mm_xor_si64(c2[1985],simde_mm_xor_si64(c2[2285],simde_mm_xor_si64(c2[2287],simde_mm_xor_si64(c2[1993],simde_mm_xor_si64(c2[1680],simde_mm_xor_si64(c2[1980],simde_mm_xor_si64(c2[1721],simde_mm_xor_si64(c2[2021],simde_mm_xor_si64(c2[2023],simde_mm_xor_si64(c2[2013],simde_mm_xor_si64(c2[1715],simde_mm_xor_si64(c2[2015],simde_mm_xor_si64(c2[818],simde_mm_xor_si64(c2[2349],simde_mm_xor_si64(c2[250],simde_mm_xor_si64(c2[252],simde_mm_xor_si64(c2[853],simde_mm_xor_si64(c2[840],simde_mm_xor_si64(c2[854],simde_mm_xor_si64(c2[541],simde_mm_xor_si64(c2[841],simde_mm_xor_si64(c2[2077],simde_mm_xor_si64(c2[2377],simde_mm_xor_si64(c2[2379],simde_mm_xor_si64(c2[1784],simde_mm_xor_si64(c2[2084],simde_mm_xor_si64(c2[2071],simde_mm_xor_si64(c2[873],simde_mm_xor_si64(c2[575],c2[875])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 27
-     d2[405]=_mm_xor_si64(c2[8],c2[1687]);
+     d2[405]=simde_mm_xor_si64(c2[8],c2[1687]);
 
 //row: 28
-     d2[420]=_mm_xor_si64(c2[2130],_mm_xor_si64(c2[63],c2[152]));
+     d2[420]=simde_mm_xor_si64(c2[2130],simde_mm_xor_si64(c2[63],c2[152]));
 
 //row: 29
-     d2[435]=_mm_xor_si64(c2[303],c2[427]);
+     d2[435]=simde_mm_xor_si64(c2[303],c2[427]);
 
 //row: 30
-     d2[450]=_mm_xor_si64(c2[1564],_mm_xor_si64(c2[1052],_mm_xor_si64(c2[1722],c2[572])));
+     d2[450]=simde_mm_xor_si64(c2[1564],simde_mm_xor_si64(c2[1052],simde_mm_xor_si64(c2[1722],c2[572])));
 
 //row: 31
-     d2[465]=_mm_xor_si64(c2[9],_mm_xor_si64(c2[1508],_mm_xor_si64(c2[300],_mm_xor_si64(c2[637],_mm_xor_si64(c2[1839],_mm_xor_si64(c2[1535],_mm_xor_si64(c2[1835],_mm_xor_si64(c2[340],_mm_xor_si64(c2[1874],_mm_xor_si64(c2[372],_mm_xor_si64(c2[672],_mm_xor_si64(c2[1298],_mm_xor_si64(c2[1602],_mm_xor_si64(c2[696],_mm_xor_si64(c2[1933],_mm_xor_si64(c2[1332],_mm_xor_si64(c2[1622],_mm_xor_si64(c2[1922],_mm_xor_si64(c2[759],_mm_xor_si64(c2[1354],_mm_xor_si64(c2[1654],_mm_xor_si64(c2[1988],_mm_xor_si64(c2[2282],_mm_xor_si64(c2[1690],_mm_xor_si64(c2[1990],_mm_xor_si64(c2[2018],_mm_xor_si64(c2[1710],_mm_xor_si64(c2[2010],_mm_xor_si64(c2[247],_mm_xor_si64(c2[850],_mm_xor_si64(c2[551],_mm_xor_si64(c2[851],_mm_xor_si64(c2[2374],_mm_xor_si64(c2[2081],_mm_xor_si64(c2[570],c2[870])))))))))))))))))))))))))))))))))));
+     d2[465]=simde_mm_xor_si64(c2[9],simde_mm_xor_si64(c2[1508],simde_mm_xor_si64(c2[300],simde_mm_xor_si64(c2[637],simde_mm_xor_si64(c2[1839],simde_mm_xor_si64(c2[1535],simde_mm_xor_si64(c2[1835],simde_mm_xor_si64(c2[340],simde_mm_xor_si64(c2[1874],simde_mm_xor_si64(c2[372],simde_mm_xor_si64(c2[672],simde_mm_xor_si64(c2[1298],simde_mm_xor_si64(c2[1602],simde_mm_xor_si64(c2[696],simde_mm_xor_si64(c2[1933],simde_mm_xor_si64(c2[1332],simde_mm_xor_si64(c2[1622],simde_mm_xor_si64(c2[1922],simde_mm_xor_si64(c2[759],simde_mm_xor_si64(c2[1354],simde_mm_xor_si64(c2[1654],simde_mm_xor_si64(c2[1988],simde_mm_xor_si64(c2[2282],simde_mm_xor_si64(c2[1690],simde_mm_xor_si64(c2[1990],simde_mm_xor_si64(c2[2018],simde_mm_xor_si64(c2[1710],simde_mm_xor_si64(c2[2010],simde_mm_xor_si64(c2[247],simde_mm_xor_si64(c2[850],simde_mm_xor_si64(c2[551],simde_mm_xor_si64(c2[851],simde_mm_xor_si64(c2[2374],simde_mm_xor_si64(c2[2081],simde_mm_xor_si64(c2[570],c2[870])))))))))))))))))))))))))))))))))));
 
 //row: 32
-     d2[480]=_mm_xor_si64(c2[1502],_mm_xor_si64(c2[1802],_mm_xor_si64(c2[602],_mm_xor_si64(c2[902],_mm_xor_si64(c2[2108],_mm_xor_si64(c2[907],_mm_xor_si64(c2[2130],_mm_xor_si64(c2[31],_mm_xor_si64(c2[1233],_mm_xor_si64(c2[1244],_mm_xor_si64(c2[968],_mm_xor_si64(c2[1268],_mm_xor_si64(c2[66],_mm_xor_si64(c2[392],_mm_xor_si64(c2[692],_mm_xor_si64(c2[696],_mm_xor_si64(c2[996],_mm_xor_si64(c2[90],_mm_xor_si64(c2[1027],_mm_xor_si64(c2[1327],_mm_xor_si64(c2[726],_mm_xor_si64(c2[1331],_mm_xor_si64(c2[2252],_mm_xor_si64(c2[153],_mm_xor_si64(c2[1063],_mm_xor_si64(c2[1951],_mm_xor_si64(c2[1082],_mm_xor_si64(c2[1382],_mm_xor_si64(c2[1391],_mm_xor_si64(c2[1691],_mm_xor_si64(c2[1384],_mm_xor_si64(c2[1112],_mm_xor_si64(c2[1412],_mm_xor_si64(c2[1419],_mm_xor_si64(c2[1740],_mm_xor_si64(c2[2040],_mm_xor_si64(c2[244],_mm_xor_si64(c2[245],_mm_xor_si64(c2[1483],_mm_xor_si64(c2[1783],_mm_xor_si64(c2[1175],_mm_xor_si64(c2[1475],c2[279]))))))))))))))))))))))))))))))))))))))))));
+     d2[480]=simde_mm_xor_si64(c2[1502],simde_mm_xor_si64(c2[1802],simde_mm_xor_si64(c2[602],simde_mm_xor_si64(c2[902],simde_mm_xor_si64(c2[2108],simde_mm_xor_si64(c2[907],simde_mm_xor_si64(c2[2130],simde_mm_xor_si64(c2[31],simde_mm_xor_si64(c2[1233],simde_mm_xor_si64(c2[1244],simde_mm_xor_si64(c2[968],simde_mm_xor_si64(c2[1268],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[392],simde_mm_xor_si64(c2[692],simde_mm_xor_si64(c2[696],simde_mm_xor_si64(c2[996],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[1027],simde_mm_xor_si64(c2[1327],simde_mm_xor_si64(c2[726],simde_mm_xor_si64(c2[1331],simde_mm_xor_si64(c2[2252],simde_mm_xor_si64(c2[153],simde_mm_xor_si64(c2[1063],simde_mm_xor_si64(c2[1951],simde_mm_xor_si64(c2[1082],simde_mm_xor_si64(c2[1382],simde_mm_xor_si64(c2[1391],simde_mm_xor_si64(c2[1691],simde_mm_xor_si64(c2[1384],simde_mm_xor_si64(c2[1112],simde_mm_xor_si64(c2[1412],simde_mm_xor_si64(c2[1419],simde_mm_xor_si64(c2[1740],simde_mm_xor_si64(c2[2040],simde_mm_xor_si64(c2[244],simde_mm_xor_si64(c2[245],simde_mm_xor_si64(c2[1483],simde_mm_xor_si64(c2[1783],simde_mm_xor_si64(c2[1175],simde_mm_xor_si64(c2[1475],c2[279]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 33
-     d2[495]=_mm_xor_si64(c2[901],_mm_xor_si64(c2[1],_mm_xor_si64(c2[1207],_mm_xor_si64(c2[1544],_mm_xor_si64(c2[332],_mm_xor_si64(c2[343],_mm_xor_si64(c2[367],_mm_xor_si64(c2[1564],_mm_xor_si64(c2[1865],_mm_xor_si64(c2[2190],_mm_xor_si64(c2[95],_mm_xor_si64(c2[1603],_mm_xor_si64(c2[426],_mm_xor_si64(c2[2224],_mm_xor_si64(c2[430],_mm_xor_si64(c2[1651],_mm_xor_si64(c2[162],_mm_xor_si64(c2[481],_mm_xor_si64(c2[790],_mm_xor_si64(c2[483],_mm_xor_si64(c2[511],_mm_xor_si64(c2[518],_mm_xor_si64(c2[810],_mm_xor_si64(c2[1154],_mm_xor_si64(c2[1742],_mm_xor_si64(c2[1743],_mm_xor_si64(c2[882],_mm_xor_si64(c2[574],c2[1777]))))))))))))))))))))))))))));
+     d2[495]=simde_mm_xor_si64(c2[901],simde_mm_xor_si64(c2[1],simde_mm_xor_si64(c2[1207],simde_mm_xor_si64(c2[1544],simde_mm_xor_si64(c2[332],simde_mm_xor_si64(c2[343],simde_mm_xor_si64(c2[367],simde_mm_xor_si64(c2[1564],simde_mm_xor_si64(c2[1865],simde_mm_xor_si64(c2[2190],simde_mm_xor_si64(c2[95],simde_mm_xor_si64(c2[1603],simde_mm_xor_si64(c2[426],simde_mm_xor_si64(c2[2224],simde_mm_xor_si64(c2[430],simde_mm_xor_si64(c2[1651],simde_mm_xor_si64(c2[162],simde_mm_xor_si64(c2[481],simde_mm_xor_si64(c2[790],simde_mm_xor_si64(c2[483],simde_mm_xor_si64(c2[511],simde_mm_xor_si64(c2[518],simde_mm_xor_si64(c2[810],simde_mm_xor_si64(c2[1154],simde_mm_xor_si64(c2[1742],simde_mm_xor_si64(c2[1743],simde_mm_xor_si64(c2[882],simde_mm_xor_si64(c2[574],c2[1777]))))))))))))))))))))))))))));
 
 //row: 34
-     d2[510]=_mm_xor_si64(c2[5],_mm_xor_si64(c2[305],_mm_xor_si64(c2[2105],_mm_xor_si64(c2[1504],_mm_xor_si64(c2[1804],_mm_xor_si64(c2[1205],_mm_xor_si64(c2[611],_mm_xor_si64(c2[12],_mm_xor_si64(c2[8],_mm_xor_si64(c2[633],_mm_xor_si64(c2[933],_mm_xor_si64(c2[334],_mm_xor_si64(c2[2135],_mm_xor_si64(c2[1536],_mm_xor_si64(c2[2131],_mm_xor_si64(c2[1232],_mm_xor_si64(c2[1532],_mm_xor_si64(c2[1870],_mm_xor_si64(c2[2170],_mm_xor_si64(c2[1571],_mm_xor_si64(c2[968],_mm_xor_si64(c2[69],_mm_xor_si64(c2[369],_mm_xor_si64(c2[1294],_mm_xor_si64(c2[1594],_mm_xor_si64(c2[995],_mm_xor_si64(c2[1598],_mm_xor_si64(c2[1898],_mm_xor_si64(c2[1299],_mm_xor_si64(c2[992],_mm_xor_si64(c2[393],_mm_xor_si64(c2[1929],_mm_xor_si64(c2[2229],_mm_xor_si64(c2[1630],_mm_xor_si64(c2[1628],_mm_xor_si64(c2[1029],_mm_xor_si64(c2[2233],_mm_xor_si64(c2[1334],_mm_xor_si64(c2[1634],_mm_xor_si64(c2[755],_mm_xor_si64(c2[1055],_mm_xor_si64(c2[456],_mm_xor_si64(c2[1950],_mm_xor_si64(c2[1051],_mm_xor_si64(c2[1351],_mm_xor_si64(c2[1984],_mm_xor_si64(c2[2284],_mm_xor_si64(c2[1685],_mm_xor_si64(c2[2293],_mm_xor_si64(c2[194],_mm_xor_si64(c2[1994],_mm_xor_si64(c2[2286],_mm_xor_si64(c2[1387],_mm_xor_si64(c2[1687],_mm_xor_si64(c2[2014],_mm_xor_si64(c2[2314],_mm_xor_si64(c2[1715],_mm_xor_si64(c2[2321],_mm_xor_si64(c2[1422],_mm_xor_si64(c2[1722],_mm_xor_si64(c2[243],_mm_xor_si64(c2[543],_mm_xor_si64(c2[2343],_mm_xor_si64(c2[1146],_mm_xor_si64(c2[547],_mm_xor_si64(c2[1147],_mm_xor_si64(c2[248],_mm_xor_si64(c2[548],_mm_xor_si64(c2[2370],_mm_xor_si64(c2[271],_mm_xor_si64(c2[2071],_mm_xor_si64(c2[2077],_mm_xor_si64(c2[2377],_mm_xor_si64(c2[1778],_mm_xor_si64(c2[1181],_mm_xor_si64(c2[282],c2[582]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[510]=simde_mm_xor_si64(c2[5],simde_mm_xor_si64(c2[305],simde_mm_xor_si64(c2[2105],simde_mm_xor_si64(c2[1504],simde_mm_xor_si64(c2[1804],simde_mm_xor_si64(c2[1205],simde_mm_xor_si64(c2[611],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[8],simde_mm_xor_si64(c2[633],simde_mm_xor_si64(c2[933],simde_mm_xor_si64(c2[334],simde_mm_xor_si64(c2[2135],simde_mm_xor_si64(c2[1536],simde_mm_xor_si64(c2[2131],simde_mm_xor_si64(c2[1232],simde_mm_xor_si64(c2[1532],simde_mm_xor_si64(c2[1870],simde_mm_xor_si64(c2[2170],simde_mm_xor_si64(c2[1571],simde_mm_xor_si64(c2[968],simde_mm_xor_si64(c2[69],simde_mm_xor_si64(c2[369],simde_mm_xor_si64(c2[1294],simde_mm_xor_si64(c2[1594],simde_mm_xor_si64(c2[995],simde_mm_xor_si64(c2[1598],simde_mm_xor_si64(c2[1898],simde_mm_xor_si64(c2[1299],simde_mm_xor_si64(c2[992],simde_mm_xor_si64(c2[393],simde_mm_xor_si64(c2[1929],simde_mm_xor_si64(c2[2229],simde_mm_xor_si64(c2[1630],simde_mm_xor_si64(c2[1628],simde_mm_xor_si64(c2[1029],simde_mm_xor_si64(c2[2233],simde_mm_xor_si64(c2[1334],simde_mm_xor_si64(c2[1634],simde_mm_xor_si64(c2[755],simde_mm_xor_si64(c2[1055],simde_mm_xor_si64(c2[456],simde_mm_xor_si64(c2[1950],simde_mm_xor_si64(c2[1051],simde_mm_xor_si64(c2[1351],simde_mm_xor_si64(c2[1984],simde_mm_xor_si64(c2[2284],simde_mm_xor_si64(c2[1685],simde_mm_xor_si64(c2[2293],simde_mm_xor_si64(c2[194],simde_mm_xor_si64(c2[1994],simde_mm_xor_si64(c2[2286],simde_mm_xor_si64(c2[1387],simde_mm_xor_si64(c2[1687],simde_mm_xor_si64(c2[2014],simde_mm_xor_si64(c2[2314],simde_mm_xor_si64(c2[1715],simde_mm_xor_si64(c2[2321],simde_mm_xor_si64(c2[1422],simde_mm_xor_si64(c2[1722],simde_mm_xor_si64(c2[243],simde_mm_xor_si64(c2[543],simde_mm_xor_si64(c2[2343],simde_mm_xor_si64(c2[1146],simde_mm_xor_si64(c2[547],simde_mm_xor_si64(c2[1147],simde_mm_xor_si64(c2[248],simde_mm_xor_si64(c2[548],simde_mm_xor_si64(c2[2370],simde_mm_xor_si64(c2[271],simde_mm_xor_si64(c2[2071],simde_mm_xor_si64(c2[2077],simde_mm_xor_si64(c2[2377],simde_mm_xor_si64(c2[1778],simde_mm_xor_si64(c2[1181],simde_mm_xor_si64(c2[282],c2[582]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 35
-     d2[525]=_mm_xor_si64(c2[10],_mm_xor_si64(c2[310],_mm_xor_si64(c2[1809],_mm_xor_si64(c2[601],_mm_xor_si64(c2[638],_mm_xor_si64(c2[938],_mm_xor_si64(c2[2140],_mm_xor_si64(c2[2136],_mm_xor_si64(c2[1540],_mm_xor_si64(c2[1860],_mm_xor_si64(c2[2160],_mm_xor_si64(c2[973],_mm_xor_si64(c2[1299],_mm_xor_si64(c2[1599],_mm_xor_si64(c2[1903],_mm_xor_si64(c2[997],_mm_xor_si64(c2[2234],_mm_xor_si64(c2[1633],_mm_xor_si64(c2[2223],_mm_xor_si64(c2[1060],_mm_xor_si64(c2[1955],_mm_xor_si64(c2[463],_mm_xor_si64(c2[1989],_mm_xor_si64(c2[2289],_mm_xor_si64(c2[184],_mm_xor_si64(c2[2291],_mm_xor_si64(c2[2319],_mm_xor_si64(c2[2311],_mm_xor_si64(c2[548],_mm_xor_si64(c2[1151],_mm_xor_si64(c2[1152],_mm_xor_si64(c2[2375],_mm_xor_si64(c2[276],_mm_xor_si64(c2[2382],c2[1171]))))))))))))))))))))))))))))))))));
+     d2[525]=simde_mm_xor_si64(c2[10],simde_mm_xor_si64(c2[310],simde_mm_xor_si64(c2[1809],simde_mm_xor_si64(c2[601],simde_mm_xor_si64(c2[638],simde_mm_xor_si64(c2[938],simde_mm_xor_si64(c2[2140],simde_mm_xor_si64(c2[2136],simde_mm_xor_si64(c2[1540],simde_mm_xor_si64(c2[1860],simde_mm_xor_si64(c2[2160],simde_mm_xor_si64(c2[973],simde_mm_xor_si64(c2[1299],simde_mm_xor_si64(c2[1599],simde_mm_xor_si64(c2[1903],simde_mm_xor_si64(c2[997],simde_mm_xor_si64(c2[2234],simde_mm_xor_si64(c2[1633],simde_mm_xor_si64(c2[2223],simde_mm_xor_si64(c2[1060],simde_mm_xor_si64(c2[1955],simde_mm_xor_si64(c2[463],simde_mm_xor_si64(c2[1989],simde_mm_xor_si64(c2[2289],simde_mm_xor_si64(c2[184],simde_mm_xor_si64(c2[2291],simde_mm_xor_si64(c2[2319],simde_mm_xor_si64(c2[2311],simde_mm_xor_si64(c2[548],simde_mm_xor_si64(c2[1151],simde_mm_xor_si64(c2[1152],simde_mm_xor_si64(c2[2375],simde_mm_xor_si64(c2[276],simde_mm_xor_si64(c2[2382],c2[1171]))))))))))))))))))))))))))))))))));
 
 //row: 36
-     d2[540]=_mm_xor_si64(c2[607],_mm_xor_si64(c2[74],c2[823]));
+     d2[540]=simde_mm_xor_si64(c2[607],simde_mm_xor_si64(c2[74],c2[823]));
 
 //row: 37
-     d2[555]=_mm_xor_si64(c2[907],_mm_xor_si64(c2[1202],_mm_xor_si64(c2[7],_mm_xor_si64(c2[302],_mm_xor_si64(c2[1213],_mm_xor_si64(c2[1508],_mm_xor_si64(c2[1535],_mm_xor_si64(c2[1830],_mm_xor_si64(c2[338],_mm_xor_si64(c2[633],_mm_xor_si64(c2[334],_mm_xor_si64(c2[344],_mm_xor_si64(c2[644],_mm_xor_si64(c2[373],_mm_xor_si64(c2[668],_mm_xor_si64(c2[1570],_mm_xor_si64(c2[1565],_mm_xor_si64(c2[1865],_mm_xor_si64(c2[2196],_mm_xor_si64(c2[92],_mm_xor_si64(c2[101],_mm_xor_si64(c2[396],_mm_xor_si64(c2[1594],_mm_xor_si64(c2[1904],_mm_xor_si64(c2[432],_mm_xor_si64(c2[727],_mm_xor_si64(c2[2230],_mm_xor_si64(c2[126],_mm_xor_si64(c2[421],_mm_xor_si64(c2[431],_mm_xor_si64(c2[731],_mm_xor_si64(c2[1657],_mm_xor_si64(c2[1952],_mm_xor_si64(c2[153],_mm_xor_si64(c2[163],_mm_xor_si64(c2[463],_mm_xor_si64(c2[487],_mm_xor_si64(c2[782],_mm_xor_si64(c2[781],_mm_xor_si64(c2[1091],_mm_xor_si64(c2[489],_mm_xor_si64(c2[484],_mm_xor_si64(c2[784],_mm_xor_si64(c2[517],_mm_xor_si64(c2[812],_mm_xor_si64(c2[524],_mm_xor_si64(c2[519],_mm_xor_si64(c2[819],_mm_xor_si64(c2[1145],_mm_xor_si64(c2[1440],_mm_xor_si64(c2[1748],_mm_xor_si64(c2[2043],_mm_xor_si64(c2[1749],_mm_xor_si64(c2[1744],_mm_xor_si64(c2[2044],_mm_xor_si64(c2[873],_mm_xor_si64(c2[1183],_mm_xor_si64(c2[580],_mm_xor_si64(c2[875],_mm_xor_si64(c2[1783],_mm_xor_si64(c2[1778],c2[2078])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[555]=simde_mm_xor_si64(c2[907],simde_mm_xor_si64(c2[1202],simde_mm_xor_si64(c2[7],simde_mm_xor_si64(c2[302],simde_mm_xor_si64(c2[1213],simde_mm_xor_si64(c2[1508],simde_mm_xor_si64(c2[1535],simde_mm_xor_si64(c2[1830],simde_mm_xor_si64(c2[338],simde_mm_xor_si64(c2[633],simde_mm_xor_si64(c2[334],simde_mm_xor_si64(c2[344],simde_mm_xor_si64(c2[644],simde_mm_xor_si64(c2[373],simde_mm_xor_si64(c2[668],simde_mm_xor_si64(c2[1570],simde_mm_xor_si64(c2[1565],simde_mm_xor_si64(c2[1865],simde_mm_xor_si64(c2[2196],simde_mm_xor_si64(c2[92],simde_mm_xor_si64(c2[101],simde_mm_xor_si64(c2[396],simde_mm_xor_si64(c2[1594],simde_mm_xor_si64(c2[1904],simde_mm_xor_si64(c2[432],simde_mm_xor_si64(c2[727],simde_mm_xor_si64(c2[2230],simde_mm_xor_si64(c2[126],simde_mm_xor_si64(c2[421],simde_mm_xor_si64(c2[431],simde_mm_xor_si64(c2[731],simde_mm_xor_si64(c2[1657],simde_mm_xor_si64(c2[1952],simde_mm_xor_si64(c2[153],simde_mm_xor_si64(c2[163],simde_mm_xor_si64(c2[463],simde_mm_xor_si64(c2[487],simde_mm_xor_si64(c2[782],simde_mm_xor_si64(c2[781],simde_mm_xor_si64(c2[1091],simde_mm_xor_si64(c2[489],simde_mm_xor_si64(c2[484],simde_mm_xor_si64(c2[784],simde_mm_xor_si64(c2[517],simde_mm_xor_si64(c2[812],simde_mm_xor_si64(c2[524],simde_mm_xor_si64(c2[519],simde_mm_xor_si64(c2[819],simde_mm_xor_si64(c2[1145],simde_mm_xor_si64(c2[1440],simde_mm_xor_si64(c2[1748],simde_mm_xor_si64(c2[2043],simde_mm_xor_si64(c2[1749],simde_mm_xor_si64(c2[1744],simde_mm_xor_si64(c2[2044],simde_mm_xor_si64(c2[873],simde_mm_xor_si64(c2[1183],simde_mm_xor_si64(c2[580],simde_mm_xor_si64(c2[875],simde_mm_xor_si64(c2[1783],simde_mm_xor_si64(c2[1778],c2[2078])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 38
-     d2[570]=_mm_xor_si64(c2[601],_mm_xor_si64(c2[901],_mm_xor_si64(c2[1],_mm_xor_si64(c2[1207],_mm_xor_si64(c2[1244],_mm_xor_si64(c2[1544],_mm_xor_si64(c2[332],_mm_xor_si64(c2[343],_mm_xor_si64(c2[635],_mm_xor_si64(c2[67],_mm_xor_si64(c2[367],_mm_xor_si64(c2[1564],_mm_xor_si64(c2[1890],_mm_xor_si64(c2[2190],_mm_xor_si64(c2[95],_mm_xor_si64(c2[1603],_mm_xor_si64(c2[426],_mm_xor_si64(c2[2224],_mm_xor_si64(c2[430],_mm_xor_si64(c2[1651],_mm_xor_si64(c2[162],_mm_xor_si64(c2[455],_mm_xor_si64(c2[181],_mm_xor_si64(c2[481],_mm_xor_si64(c2[790],_mm_xor_si64(c2[483],_mm_xor_si64(c2[511],_mm_xor_si64(c2[518],_mm_xor_si64(c2[1154],_mm_xor_si64(c2[1742],_mm_xor_si64(c2[1743],_mm_xor_si64(c2[582],_mm_xor_si64(c2[882],_mm_xor_si64(c2[574],c2[1777]))))))))))))))))))))))))))))))))));
+     d2[570]=simde_mm_xor_si64(c2[601],simde_mm_xor_si64(c2[901],simde_mm_xor_si64(c2[1],simde_mm_xor_si64(c2[1207],simde_mm_xor_si64(c2[1244],simde_mm_xor_si64(c2[1544],simde_mm_xor_si64(c2[332],simde_mm_xor_si64(c2[343],simde_mm_xor_si64(c2[635],simde_mm_xor_si64(c2[67],simde_mm_xor_si64(c2[367],simde_mm_xor_si64(c2[1564],simde_mm_xor_si64(c2[1890],simde_mm_xor_si64(c2[2190],simde_mm_xor_si64(c2[95],simde_mm_xor_si64(c2[1603],simde_mm_xor_si64(c2[426],simde_mm_xor_si64(c2[2224],simde_mm_xor_si64(c2[430],simde_mm_xor_si64(c2[1651],simde_mm_xor_si64(c2[162],simde_mm_xor_si64(c2[455],simde_mm_xor_si64(c2[181],simde_mm_xor_si64(c2[481],simde_mm_xor_si64(c2[790],simde_mm_xor_si64(c2[483],simde_mm_xor_si64(c2[511],simde_mm_xor_si64(c2[518],simde_mm_xor_si64(c2[1154],simde_mm_xor_si64(c2[1742],simde_mm_xor_si64(c2[1743],simde_mm_xor_si64(c2[582],simde_mm_xor_si64(c2[882],simde_mm_xor_si64(c2[574],c2[1777]))))))))))))))))))))))))))))))))));
 
 //row: 39
-     d2[585]=_mm_xor_si64(c2[1811],_mm_xor_si64(c2[2111],_mm_xor_si64(c2[911],_mm_xor_si64(c2[1211],_mm_xor_si64(c2[3],_mm_xor_si64(c2[2105],_mm_xor_si64(c2[40],_mm_xor_si64(c2[340],_mm_xor_si64(c2[1542],_mm_xor_si64(c2[1538],_mm_xor_si64(c2[1262],_mm_xor_si64(c2[1562],_mm_xor_si64(c2[360],_mm_xor_si64(c2[701],_mm_xor_si64(c2[1001],_mm_xor_si64(c2[990],_mm_xor_si64(c2[1290],_mm_xor_si64(c2[399],_mm_xor_si64(c2[1321],_mm_xor_si64(c2[1621],_mm_xor_si64(c2[1020],_mm_xor_si64(c2[1625],_mm_xor_si64(c2[162],_mm_xor_si64(c2[462],_mm_xor_si64(c2[1357],_mm_xor_si64(c2[1391],_mm_xor_si64(c2[1691],_mm_xor_si64(c2[1685],_mm_xor_si64(c2[1985],_mm_xor_si64(c2[1693],_mm_xor_si64(c2[1421],_mm_xor_si64(c2[1721],_mm_xor_si64(c2[1713],_mm_xor_si64(c2[1715],_mm_xor_si64(c2[2049],_mm_xor_si64(c2[2349],_mm_xor_si64(c2[553],_mm_xor_si64(c2[554],_mm_xor_si64(c2[1777],_mm_xor_si64(c2[2077],_mm_xor_si64(c2[1484],_mm_xor_si64(c2[1784],c2[573]))))))))))))))))))))))))))))))))))))))))));
+     d2[585]=simde_mm_xor_si64(c2[1811],simde_mm_xor_si64(c2[2111],simde_mm_xor_si64(c2[911],simde_mm_xor_si64(c2[1211],simde_mm_xor_si64(c2[3],simde_mm_xor_si64(c2[2105],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[340],simde_mm_xor_si64(c2[1542],simde_mm_xor_si64(c2[1538],simde_mm_xor_si64(c2[1262],simde_mm_xor_si64(c2[1562],simde_mm_xor_si64(c2[360],simde_mm_xor_si64(c2[701],simde_mm_xor_si64(c2[1001],simde_mm_xor_si64(c2[990],simde_mm_xor_si64(c2[1290],simde_mm_xor_si64(c2[399],simde_mm_xor_si64(c2[1321],simde_mm_xor_si64(c2[1621],simde_mm_xor_si64(c2[1020],simde_mm_xor_si64(c2[1625],simde_mm_xor_si64(c2[162],simde_mm_xor_si64(c2[462],simde_mm_xor_si64(c2[1357],simde_mm_xor_si64(c2[1391],simde_mm_xor_si64(c2[1691],simde_mm_xor_si64(c2[1685],simde_mm_xor_si64(c2[1985],simde_mm_xor_si64(c2[1693],simde_mm_xor_si64(c2[1421],simde_mm_xor_si64(c2[1721],simde_mm_xor_si64(c2[1713],simde_mm_xor_si64(c2[1715],simde_mm_xor_si64(c2[2049],simde_mm_xor_si64(c2[2349],simde_mm_xor_si64(c2[553],simde_mm_xor_si64(c2[554],simde_mm_xor_si64(c2[1777],simde_mm_xor_si64(c2[2077],simde_mm_xor_si64(c2[1484],simde_mm_xor_si64(c2[1784],c2[573]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 40
-     d2[600]=_mm_xor_si64(c2[1211],_mm_xor_si64(c2[1810],_mm_xor_si64(c2[311],_mm_xor_si64(c2[910],_mm_xor_si64(c2[1502],_mm_xor_si64(c2[2101],_mm_xor_si64(c2[1839],_mm_xor_si64(c2[39],_mm_xor_si64(c2[642],_mm_xor_si64(c2[1241],_mm_xor_si64(c2[638],_mm_xor_si64(c2[937],_mm_xor_si64(c2[1237],_mm_xor_si64(c2[662],_mm_xor_si64(c2[1261],_mm_xor_si64(c2[1874],_mm_xor_si64(c2[2173],_mm_xor_si64(c2[74],_mm_xor_si64(c2[1869],_mm_xor_si64(c2[101],_mm_xor_si64(c2[700],_mm_xor_si64(c2[390],_mm_xor_si64(c2[1004],_mm_xor_si64(c2[1898],_mm_xor_si64(c2[98],_mm_xor_si64(c2[721],_mm_xor_si64(c2[1320],_mm_xor_si64(c2[120],_mm_xor_si64(c2[734],_mm_xor_si64(c2[725],_mm_xor_si64(c2[1024],_mm_xor_si64(c2[1324],_mm_xor_si64(c2[1961],_mm_xor_si64(c2[161],_mm_xor_si64(c2[457],_mm_xor_si64(c2[756],_mm_xor_si64(c2[1056],_mm_xor_si64(c2[791],_mm_xor_si64(c2[1390],_mm_xor_si64(c2[1085],_mm_xor_si64(c2[1684],_mm_xor_si64(c2[793],_mm_xor_si64(c2[1092],_mm_xor_si64(c2[1392],_mm_xor_si64(c2[821],_mm_xor_si64(c2[1420],_mm_xor_si64(c2[813],_mm_xor_si64(c2[1112],_mm_xor_si64(c2[1412],_mm_xor_si64(c2[1449],_mm_xor_si64(c2[2048],_mm_xor_si64(c2[2052],_mm_xor_si64(c2[252],_mm_xor_si64(c2[2053],_mm_xor_si64(c2[2352],_mm_xor_si64(c2[253],_mm_xor_si64(c2[1177],_mm_xor_si64(c2[1776],_mm_xor_si64(c2[884],_mm_xor_si64(c2[1483],_mm_xor_si64(c2[2072],_mm_xor_si64(c2[2371],c2[272]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[600]=simde_mm_xor_si64(c2[1211],simde_mm_xor_si64(c2[1810],simde_mm_xor_si64(c2[311],simde_mm_xor_si64(c2[910],simde_mm_xor_si64(c2[1502],simde_mm_xor_si64(c2[2101],simde_mm_xor_si64(c2[1839],simde_mm_xor_si64(c2[39],simde_mm_xor_si64(c2[642],simde_mm_xor_si64(c2[1241],simde_mm_xor_si64(c2[638],simde_mm_xor_si64(c2[937],simde_mm_xor_si64(c2[1237],simde_mm_xor_si64(c2[662],simde_mm_xor_si64(c2[1261],simde_mm_xor_si64(c2[1874],simde_mm_xor_si64(c2[2173],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[1869],simde_mm_xor_si64(c2[101],simde_mm_xor_si64(c2[700],simde_mm_xor_si64(c2[390],simde_mm_xor_si64(c2[1004],simde_mm_xor_si64(c2[1898],simde_mm_xor_si64(c2[98],simde_mm_xor_si64(c2[721],simde_mm_xor_si64(c2[1320],simde_mm_xor_si64(c2[120],simde_mm_xor_si64(c2[734],simde_mm_xor_si64(c2[725],simde_mm_xor_si64(c2[1024],simde_mm_xor_si64(c2[1324],simde_mm_xor_si64(c2[1961],simde_mm_xor_si64(c2[161],simde_mm_xor_si64(c2[457],simde_mm_xor_si64(c2[756],simde_mm_xor_si64(c2[1056],simde_mm_xor_si64(c2[791],simde_mm_xor_si64(c2[1390],simde_mm_xor_si64(c2[1085],simde_mm_xor_si64(c2[1684],simde_mm_xor_si64(c2[793],simde_mm_xor_si64(c2[1092],simde_mm_xor_si64(c2[1392],simde_mm_xor_si64(c2[821],simde_mm_xor_si64(c2[1420],simde_mm_xor_si64(c2[813],simde_mm_xor_si64(c2[1112],simde_mm_xor_si64(c2[1412],simde_mm_xor_si64(c2[1449],simde_mm_xor_si64(c2[2048],simde_mm_xor_si64(c2[2052],simde_mm_xor_si64(c2[252],simde_mm_xor_si64(c2[2053],simde_mm_xor_si64(c2[2352],simde_mm_xor_si64(c2[253],simde_mm_xor_si64(c2[1177],simde_mm_xor_si64(c2[1776],simde_mm_xor_si64(c2[884],simde_mm_xor_si64(c2[1483],simde_mm_xor_si64(c2[2072],simde_mm_xor_si64(c2[2371],c2[272]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 41
-     d2[615]=_mm_xor_si64(c2[2101],_mm_xor_si64(c2[2],_mm_xor_si64(c2[1501],_mm_xor_si64(c2[308],_mm_xor_si64(c2[330],_mm_xor_si64(c2[630],_mm_xor_si64(c2[1832],_mm_xor_si64(c2[1843],_mm_xor_si64(c2[1536],_mm_xor_si64(c2[1567],_mm_xor_si64(c2[1867],_mm_xor_si64(c2[665],_mm_xor_si64(c2[991],_mm_xor_si64(c2[1291],_mm_xor_si64(c2[1595],_mm_xor_si64(c2[704],_mm_xor_si64(c2[1926],_mm_xor_si64(c2[1325],_mm_xor_si64(c2[1930],_mm_xor_si64(c2[752],_mm_xor_si64(c2[1662],_mm_xor_si64(c2[2261],_mm_xor_si64(c2[1681],_mm_xor_si64(c2[1981],_mm_xor_si64(c2[2290],_mm_xor_si64(c2[1983],_mm_xor_si64(c2[2011],_mm_xor_si64(c2[2018],_mm_xor_si64(c2[240],_mm_xor_si64(c2[843],_mm_xor_si64(c2[844],_mm_xor_si64(c2[2082],_mm_xor_si64(c2[2382],_mm_xor_si64(c2[2074],c2[878]))))))))))))))))))))))))))))))))));
+     d2[615]=simde_mm_xor_si64(c2[2101],simde_mm_xor_si64(c2[2],simde_mm_xor_si64(c2[1501],simde_mm_xor_si64(c2[308],simde_mm_xor_si64(c2[330],simde_mm_xor_si64(c2[630],simde_mm_xor_si64(c2[1832],simde_mm_xor_si64(c2[1843],simde_mm_xor_si64(c2[1536],simde_mm_xor_si64(c2[1567],simde_mm_xor_si64(c2[1867],simde_mm_xor_si64(c2[665],simde_mm_xor_si64(c2[991],simde_mm_xor_si64(c2[1291],simde_mm_xor_si64(c2[1595],simde_mm_xor_si64(c2[704],simde_mm_xor_si64(c2[1926],simde_mm_xor_si64(c2[1325],simde_mm_xor_si64(c2[1930],simde_mm_xor_si64(c2[752],simde_mm_xor_si64(c2[1662],simde_mm_xor_si64(c2[2261],simde_mm_xor_si64(c2[1681],simde_mm_xor_si64(c2[1981],simde_mm_xor_si64(c2[2290],simde_mm_xor_si64(c2[1983],simde_mm_xor_si64(c2[2011],simde_mm_xor_si64(c2[2018],simde_mm_xor_si64(c2[240],simde_mm_xor_si64(c2[843],simde_mm_xor_si64(c2[844],simde_mm_xor_si64(c2[2082],simde_mm_xor_si64(c2[2382],simde_mm_xor_si64(c2[2074],c2[878]))))))))))))))))))))))))))))))))));
   }
 }
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc128_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc128_byte.c
index f82e92c6b346f6beb0f75f887b5684ff0830c93b..90195387524187387a5b02366c7813522bcaed3f 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc128_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc128_byte.c
@@ -1,9 +1,9 @@
+#ifdef __AVX2__
 #include "PHY/sse_intrin.h"
 // generated code for Zc=128, byte encoding
 static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG2_Zc128_byte(uint8_t *c,uint8_t *d) {
-  __m256i *csimd=(__m256i *)c,*dsimd=(__m256i *)d;
-
-  __m256i *c2,*d2;
+  simde__m256i *csimd=(simde__m256i *)c,*dsimd=(simde__m256i *)d;
+  simde__m256i *c2,*d2;
 
   int i2;
   for (i2=0; i2<4; i2++) {
@@ -137,3 +137,4 @@ static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG
      d2[164]=simde_mm256_xor_si256(c2[2483],simde_mm256_xor_si256(c2[2403],simde_mm256_xor_si256(c2[2240],simde_mm256_xor_si256(c2[482],simde_mm256_xor_si256(c2[891],simde_mm256_xor_si256(c2[811],simde_mm256_xor_si256(c2[571],simde_mm256_xor_si256(c2[2331],simde_mm256_xor_si256(c2[88],simde_mm256_xor_si256(c2[178],simde_mm256_xor_si256(c2[98],simde_mm256_xor_si256(c2[1217],simde_mm256_xor_si256(c2[1304],simde_mm256_xor_si256(c2[1224],simde_mm256_xor_si256(c2[2184],simde_mm256_xor_si256(c2[105],simde_mm256_xor_si256(c2[1475],simde_mm256_xor_si256(c2[753],simde_mm256_xor_si256(c2[1552],simde_mm256_xor_si256(c2[1483],simde_mm256_xor_si256(c2[2362],simde_mm256_xor_si256(c2[443],simde_mm256_xor_si256(c2[1569],simde_mm256_xor_si256(c2[1489],simde_mm256_xor_si256(c2[1890],simde_mm256_xor_si256(c2[530],simde_mm256_xor_si256(c2[1416],simde_mm256_xor_si256(c2[937],simde_mm256_xor_si256(c2[1746],simde_mm256_xor_si256(c2[467],simde_mm256_xor_si256(c2[624],simde_mm256_xor_si256(c2[314],simde_mm256_xor_si256(c2[234],simde_mm256_xor_si256(c2[1435],c2[1755]))))))))))))))))))))))))))))))))));
   }
 }
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc128_byte_128.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc128_byte_128.c
new file mode 100644
index 0000000000000000000000000000000000000000..9eac179fc3f432c7d282d9785fe3c74483848894
--- /dev/null
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc128_byte_128.c
@@ -0,0 +1,141 @@
+#ifndef __AVX2__
+#include "PHY/sse_intrin.h"
+// generated code for Zc=128, byte encoding
+static inline void ldpc_BG2_Zc128_byte(uint8_t *c,uint8_t *d) {
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
+
+  simde__m128i *c2,*d2;
+
+  int i2;
+  for (i2=0; i2<8; i2++) {
+     c2=&csimd[i2];
+     d2=&dsimd[i2];
+
+//row: 0
+     d2[0]=simde_mm_xor_si128(c2[1280],simde_mm_xor_si128(c2[962],simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[663],simde_mm_xor_si128(c2[183],simde_mm_xor_si128(c2[1136],simde_mm_xor_si128(c2[1796],simde_mm_xor_si128(c2[1475],simde_mm_xor_si128(c2[1489],simde_mm_xor_si128(c2[850],simde_mm_xor_si128(c2[1810],simde_mm_xor_si128(c2[1991],simde_mm_xor_si128(c2[547],simde_mm_xor_si128(c2[2145],simde_mm_xor_si128(c2[2007],simde_mm_xor_si128(c2[1206],simde_mm_xor_si128(c2[2019],simde_mm_xor_si128(c2[262],simde_mm_xor_si128(c2[101],simde_mm_xor_si128(c2[1873],simde_mm_xor_si128(c2[915],simde_mm_xor_si128(c2[2533],simde_mm_xor_si128(c2[2534],simde_mm_xor_si128(c2[289],simde_mm_xor_si128(c2[2068],simde_mm_xor_si128(c2[1911],c2[2551]))))))))))))))))))))))))));
+
+//row: 1
+     d2[8]=simde_mm_xor_si128(c2[1440],simde_mm_xor_si128(c2[1280],simde_mm_xor_si128(c2[962],simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[823],simde_mm_xor_si128(c2[663],simde_mm_xor_si128(c2[183],simde_mm_xor_si128(c2[1136],simde_mm_xor_si128(c2[1956],simde_mm_xor_si128(c2[1796],simde_mm_xor_si128(c2[1475],simde_mm_xor_si128(c2[1649],simde_mm_xor_si128(c2[1489],simde_mm_xor_si128(c2[850],simde_mm_xor_si128(c2[1810],simde_mm_xor_si128(c2[1991],simde_mm_xor_si128(c2[547],simde_mm_xor_si128(c2[2145],simde_mm_xor_si128(c2[2007],simde_mm_xor_si128(c2[1206],simde_mm_xor_si128(c2[2179],simde_mm_xor_si128(c2[2019],simde_mm_xor_si128(c2[262],simde_mm_xor_si128(c2[101],simde_mm_xor_si128(c2[1873],simde_mm_xor_si128(c2[915],simde_mm_xor_si128(c2[2533],simde_mm_xor_si128(c2[2534],simde_mm_xor_si128(c2[289],simde_mm_xor_si128(c2[2228],simde_mm_xor_si128(c2[2068],simde_mm_xor_si128(c2[1911],c2[2551]))))))))))))))))))))))))))))))));
+
+//row: 2
+     d2[16]=simde_mm_xor_si128(c2[1440],simde_mm_xor_si128(c2[1280],simde_mm_xor_si128(c2[1122],simde_mm_xor_si128(c2[962],simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[823],simde_mm_xor_si128(c2[663],simde_mm_xor_si128(c2[183],simde_mm_xor_si128(c2[1136],simde_mm_xor_si128(c2[1956],simde_mm_xor_si128(c2[1796],simde_mm_xor_si128(c2[1475],simde_mm_xor_si128(c2[1649],simde_mm_xor_si128(c2[1489],simde_mm_xor_si128(c2[1010],simde_mm_xor_si128(c2[850],simde_mm_xor_si128(c2[1810],simde_mm_xor_si128(c2[2151],simde_mm_xor_si128(c2[1991],simde_mm_xor_si128(c2[547],simde_mm_xor_si128(c2[2145],simde_mm_xor_si128(c2[2167],simde_mm_xor_si128(c2[2007],simde_mm_xor_si128(c2[1206],simde_mm_xor_si128(c2[2179],simde_mm_xor_si128(c2[2019],simde_mm_xor_si128(c2[422],simde_mm_xor_si128(c2[262],simde_mm_xor_si128(c2[101],simde_mm_xor_si128(c2[2033],simde_mm_xor_si128(c2[1873],simde_mm_xor_si128(c2[915],simde_mm_xor_si128(c2[134],simde_mm_xor_si128(c2[2533],simde_mm_xor_si128(c2[2534],simde_mm_xor_si128(c2[289],simde_mm_xor_si128(c2[2228],simde_mm_xor_si128(c2[2068],simde_mm_xor_si128(c2[2071],simde_mm_xor_si128(c2[1911],c2[2551]))))))))))))))))))))))))))))))))))))))));
+
+//row: 3
+     d2[24]=simde_mm_xor_si128(c2[1280],simde_mm_xor_si128(c2[962],simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[663],simde_mm_xor_si128(c2[183],simde_mm_xor_si128(c2[1296],simde_mm_xor_si128(c2[1136],simde_mm_xor_si128(c2[1796],simde_mm_xor_si128(c2[1635],simde_mm_xor_si128(c2[1475],simde_mm_xor_si128(c2[1489],simde_mm_xor_si128(c2[850],simde_mm_xor_si128(c2[1810],simde_mm_xor_si128(c2[1991],simde_mm_xor_si128(c2[547],simde_mm_xor_si128(c2[2305],simde_mm_xor_si128(c2[2145],simde_mm_xor_si128(c2[2007],simde_mm_xor_si128(c2[1366],simde_mm_xor_si128(c2[1206],simde_mm_xor_si128(c2[2019],simde_mm_xor_si128(c2[262],simde_mm_xor_si128(c2[261],simde_mm_xor_si128(c2[101],simde_mm_xor_si128(c2[1873],simde_mm_xor_si128(c2[1075],simde_mm_xor_si128(c2[915],simde_mm_xor_si128(c2[2533],simde_mm_xor_si128(c2[2534],simde_mm_xor_si128(c2[449],simde_mm_xor_si128(c2[289],simde_mm_xor_si128(c2[2068],simde_mm_xor_si128(c2[1911],simde_mm_xor_si128(c2[144],c2[2551]))))))))))))))))))))))))))))))))));
+
+//row: 4
+     d2[32]=simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[2404],simde_mm_xor_si128(c2[2086],simde_mm_xor_si128(c2[1121],simde_mm_xor_si128(c2[483],simde_mm_xor_si128(c2[1939],simde_mm_xor_si128(c2[1779],simde_mm_xor_si128(c2[1299],simde_mm_xor_si128(c2[2260],simde_mm_xor_si128(c2[981],simde_mm_xor_si128(c2[513],simde_mm_xor_si128(c2[353],simde_mm_xor_si128(c2[32],simde_mm_xor_si128(c2[214],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[1974],simde_mm_xor_si128(c2[375],simde_mm_xor_si128(c2[548],simde_mm_xor_si128(c2[1671],simde_mm_xor_si128(c2[710],simde_mm_xor_si128(c2[564],simde_mm_xor_si128(c2[2322],simde_mm_xor_si128(c2[736],simde_mm_xor_si128(c2[576],simde_mm_xor_si128(c2[1378],simde_mm_xor_si128(c2[1217],simde_mm_xor_si128(c2[438],simde_mm_xor_si128(c2[2039],simde_mm_xor_si128(c2[1090],simde_mm_xor_si128(c2[1091],simde_mm_xor_si128(c2[1413],simde_mm_xor_si128(c2[785],simde_mm_xor_si128(c2[625],simde_mm_xor_si128(c2[468],c2[1108]))))))))))))))))))))))))))))))))));
+
+//row: 5
+     d2[40]=simde_mm_xor_si128(c2[7],simde_mm_xor_si128(c2[2406],simde_mm_xor_si128(c2[2080],simde_mm_xor_si128(c2[1123],simde_mm_xor_si128(c2[1126],simde_mm_xor_si128(c2[1941],simde_mm_xor_si128(c2[1781],simde_mm_xor_si128(c2[1301],simde_mm_xor_si128(c2[2262],simde_mm_xor_si128(c2[1458],simde_mm_xor_si128(c2[515],simde_mm_xor_si128(c2[355],simde_mm_xor_si128(c2[34],simde_mm_xor_si128(c2[208],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[1968],simde_mm_xor_si128(c2[369],simde_mm_xor_si128(c2[550],simde_mm_xor_si128(c2[1665],simde_mm_xor_si128(c2[704],simde_mm_xor_si128(c2[566],simde_mm_xor_si128(c2[2324],simde_mm_xor_si128(c2[404],simde_mm_xor_si128(c2[738],simde_mm_xor_si128(c2[578],simde_mm_xor_si128(c2[1380],simde_mm_xor_si128(c2[1219],simde_mm_xor_si128(c2[432],simde_mm_xor_si128(c2[2033],simde_mm_xor_si128(c2[2513],simde_mm_xor_si128(c2[1092],simde_mm_xor_si128(c2[1093],simde_mm_xor_si128(c2[1415],simde_mm_xor_si128(c2[787],simde_mm_xor_si128(c2[627],simde_mm_xor_si128(c2[470],c2[1110]))))))))))))))))))))))))))))))))))));
+
+//row: 6
+     d2[48]=simde_mm_xor_si128(c2[1122],simde_mm_xor_si128(c2[962],simde_mm_xor_si128(c2[644],simde_mm_xor_si128(c2[2246],simde_mm_xor_si128(c2[1761],simde_mm_xor_si128(c2[497],simde_mm_xor_si128(c2[337],simde_mm_xor_si128(c2[2416],simde_mm_xor_si128(c2[818],simde_mm_xor_si128(c2[1638],simde_mm_xor_si128(c2[1478],simde_mm_xor_si128(c2[1157],simde_mm_xor_si128(c2[1331],simde_mm_xor_si128(c2[1171],simde_mm_xor_si128(c2[532],simde_mm_xor_si128(c2[1492],simde_mm_xor_si128(c2[1665],simde_mm_xor_si128(c2[229],simde_mm_xor_si128(c2[1827],simde_mm_xor_si128(c2[1681],simde_mm_xor_si128(c2[880],simde_mm_xor_si128(c2[726],simde_mm_xor_si128(c2[1861],simde_mm_xor_si128(c2[1701],simde_mm_xor_si128(c2[2503],simde_mm_xor_si128(c2[2342],simde_mm_xor_si128(c2[1555],simde_mm_xor_si128(c2[597],simde_mm_xor_si128(c2[2194],simde_mm_xor_si128(c2[2215],simde_mm_xor_si128(c2[2208],simde_mm_xor_si128(c2[2530],simde_mm_xor_si128(c2[1910],simde_mm_xor_si128(c2[1750],simde_mm_xor_si128(c2[1585],simde_mm_xor_si128(c2[2225],c2[2065]))))))))))))))))))))))))))))))))))));
+
+//row: 7
+     d2[56]=simde_mm_xor_si128(c2[1920],simde_mm_xor_si128(c2[1760],simde_mm_xor_si128(c2[1927],simde_mm_xor_si128(c2[1442],simde_mm_xor_si128(c2[1601],simde_mm_xor_si128(c2[485],simde_mm_xor_si128(c2[644],simde_mm_xor_si128(c2[1303],simde_mm_xor_si128(c2[1143],simde_mm_xor_si128(c2[1302],simde_mm_xor_si128(c2[663],simde_mm_xor_si128(c2[822],simde_mm_xor_si128(c2[1943],simde_mm_xor_si128(c2[1616],simde_mm_xor_si128(c2[1783],simde_mm_xor_si128(c2[176],simde_mm_xor_si128(c2[2436],simde_mm_xor_si128(c2[2276],simde_mm_xor_si128(c2[2435],simde_mm_xor_si128(c2[2274],simde_mm_xor_si128(c2[1955],simde_mm_xor_si128(c2[2114],simde_mm_xor_si128(c2[2129],simde_mm_xor_si128(c2[1969],simde_mm_xor_si128(c2[2128],simde_mm_xor_si128(c2[1330],simde_mm_xor_si128(c2[1489],simde_mm_xor_si128(c2[2290],simde_mm_xor_si128(c2[2449],simde_mm_xor_si128(c2[2471],simde_mm_xor_si128(c2[71],simde_mm_xor_si128(c2[1027],simde_mm_xor_si128(c2[1186],simde_mm_xor_si128(c2[385],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[225],simde_mm_xor_si128(c2[2487],simde_mm_xor_si128(c2[87],simde_mm_xor_si128(c2[2005],simde_mm_xor_si128(c2[1686],simde_mm_xor_si128(c2[1845],simde_mm_xor_si128(c2[561],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[2499],simde_mm_xor_si128(c2[99],simde_mm_xor_si128(c2[742],simde_mm_xor_si128(c2[901],simde_mm_xor_si128(c2[900],simde_mm_xor_si128(c2[581],simde_mm_xor_si128(c2[740],simde_mm_xor_si128(c2[2353],simde_mm_xor_si128(c2[2512],simde_mm_xor_si128(c2[1714],simde_mm_xor_si128(c2[1395],simde_mm_xor_si128(c2[1554],simde_mm_xor_si128(c2[2032],simde_mm_xor_si128(c2[454],simde_mm_xor_si128(c2[613],simde_mm_xor_si128(c2[455],simde_mm_xor_si128(c2[614],simde_mm_xor_si128(c2[1088],simde_mm_xor_si128(c2[769],simde_mm_xor_si128(c2[928],simde_mm_xor_si128(c2[149],simde_mm_xor_si128(c2[2548],simde_mm_xor_si128(c2[148],simde_mm_xor_si128(c2[2391],simde_mm_xor_si128(c2[2550],simde_mm_xor_si128(c2[791],simde_mm_xor_si128(c2[464],c2[631]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 8
+     d2[64]=simde_mm_xor_si128(c2[2406],simde_mm_xor_si128(c2[2246],simde_mm_xor_si128(c2[2080],simde_mm_xor_si128(c2[1920],simde_mm_xor_si128(c2[963],simde_mm_xor_si128(c2[2240],simde_mm_xor_si128(c2[1781],simde_mm_xor_si128(c2[1621],simde_mm_xor_si128(c2[1141],simde_mm_xor_si128(c2[2102],simde_mm_xor_si128(c2[2261],simde_mm_xor_si128(c2[355],simde_mm_xor_si128(c2[195],simde_mm_xor_si128(c2[2433],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[2455],simde_mm_xor_si128(c2[1968],simde_mm_xor_si128(c2[1808],simde_mm_xor_si128(c2[209],simde_mm_xor_si128(c2[550],simde_mm_xor_si128(c2[390],simde_mm_xor_si128(c2[1505],simde_mm_xor_si128(c2[544],simde_mm_xor_si128(c2[566],simde_mm_xor_si128(c2[406],simde_mm_xor_si128(c2[2164],simde_mm_xor_si128(c2[578],simde_mm_xor_si128(c2[418],simde_mm_xor_si128(c2[1380],simde_mm_xor_si128(c2[1220],simde_mm_xor_si128(c2[1059],simde_mm_xor_si128(c2[432],simde_mm_xor_si128(c2[272],simde_mm_xor_si128(c2[1873],simde_mm_xor_si128(c2[1092],simde_mm_xor_si128(c2[932],simde_mm_xor_si128(c2[933],simde_mm_xor_si128(c2[1255],simde_mm_xor_si128(c2[627],simde_mm_xor_si128(c2[467],simde_mm_xor_si128(c2[470],simde_mm_xor_si128(c2[310],c2[950]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 9
+     d2[72]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[804],simde_mm_xor_si128(c2[2407],simde_mm_xor_si128(c2[486],simde_mm_xor_si128(c2[2081],simde_mm_xor_si128(c2[2080],simde_mm_xor_si128(c2[1124],simde_mm_xor_si128(c2[1942],simde_mm_xor_si128(c2[179],simde_mm_xor_si128(c2[1782],simde_mm_xor_si128(c2[2258],simde_mm_xor_si128(c2[1302],simde_mm_xor_si128(c2[660],simde_mm_xor_si128(c2[2263],simde_mm_xor_si128(c2[1780],simde_mm_xor_si128(c2[516],simde_mm_xor_si128(c2[1312],simde_mm_xor_si128(c2[356],simde_mm_xor_si128(c2[999],simde_mm_xor_si128(c2[35],simde_mm_xor_si128(c2[209],simde_mm_xor_si128(c2[1013],simde_mm_xor_si128(c2[49],simde_mm_xor_si128(c2[374],simde_mm_xor_si128(c2[1969],simde_mm_xor_si128(c2[1334],simde_mm_xor_si128(c2[370],simde_mm_xor_si128(c2[1507],simde_mm_xor_si128(c2[551],simde_mm_xor_si128(c2[71],simde_mm_xor_si128(c2[1666],simde_mm_xor_si128(c2[1669],simde_mm_xor_si128(c2[705],simde_mm_xor_si128(c2[1523],simde_mm_xor_si128(c2[567],simde_mm_xor_si128(c2[722],simde_mm_xor_si128(c2[2325],simde_mm_xor_si128(c2[739],simde_mm_xor_si128(c2[1543],simde_mm_xor_si128(c2[579],simde_mm_xor_si128(c2[2337],simde_mm_xor_si128(c2[1381],simde_mm_xor_si128(c2[2176],simde_mm_xor_si128(c2[1220],simde_mm_xor_si128(c2[1397],simde_mm_xor_si128(c2[433],simde_mm_xor_si128(c2[439],simde_mm_xor_si128(c2[2034],simde_mm_xor_si128(c2[2049],simde_mm_xor_si128(c2[1093],simde_mm_xor_si128(c2[2050],simde_mm_xor_si128(c2[1094],simde_mm_xor_si128(c2[2372],simde_mm_xor_si128(c2[1408],simde_mm_xor_si128(c2[2212],simde_mm_xor_si128(c2[788],simde_mm_xor_si128(c2[1584],simde_mm_xor_si128(c2[628],simde_mm_xor_si128(c2[1427],simde_mm_xor_si128(c2[471],simde_mm_xor_si128(c2[2067],c2[1111])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 10
+     d2[80]=simde_mm_xor_si128(c2[1760],simde_mm_xor_si128(c2[1459],simde_mm_xor_si128(c2[96],c2[919])));
+
+//row: 11
+     d2[88]=simde_mm_xor_si128(c2[4],simde_mm_xor_si128(c2[2245],simde_mm_xor_si128(c2[1280],simde_mm_xor_si128(c2[1760],simde_mm_xor_si128(c2[1938],simde_mm_xor_si128(c2[1458],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[2419],simde_mm_xor_si128(c2[512],simde_mm_xor_si128(c2[359],simde_mm_xor_si128(c2[199],simde_mm_xor_si128(c2[213],simde_mm_xor_si128(c2[2133],simde_mm_xor_si128(c2[534],simde_mm_xor_si128(c2[707],simde_mm_xor_si128(c2[1830],simde_mm_xor_si128(c2[1029],simde_mm_xor_si128(c2[869],simde_mm_xor_si128(c2[723],simde_mm_xor_si128(c2[82],simde_mm_xor_si128(c2[2481],simde_mm_xor_si128(c2[743],simde_mm_xor_si128(c2[1537],simde_mm_xor_si128(c2[1536],simde_mm_xor_si128(c2[1376],simde_mm_xor_si128(c2[597],simde_mm_xor_si128(c2[2358],simde_mm_xor_si128(c2[2198],simde_mm_xor_si128(c2[2038],simde_mm_xor_si128(c2[1249],simde_mm_xor_si128(c2[1250],simde_mm_xor_si128(c2[1732],simde_mm_xor_si128(c2[1572],simde_mm_xor_si128(c2[784],simde_mm_xor_si128(c2[627],simde_mm_xor_si128(c2[1427],simde_mm_xor_si128(c2[1267],c2[469])))))))))))))))))))))))))))))))))))));
+
+//row: 12
+     d2[96]=simde_mm_xor_si128(c2[1121],simde_mm_xor_si128(c2[961],simde_mm_xor_si128(c2[643],simde_mm_xor_si128(c2[2245],simde_mm_xor_si128(c2[496],simde_mm_xor_si128(c2[336],simde_mm_xor_si128(c2[2423],simde_mm_xor_si128(c2[817],simde_mm_xor_si128(c2[2419],simde_mm_xor_si128(c2[1637],simde_mm_xor_si128(c2[1477],simde_mm_xor_si128(c2[1156],simde_mm_xor_si128(c2[1330],simde_mm_xor_si128(c2[1170],simde_mm_xor_si128(c2[531],simde_mm_xor_si128(c2[1491],simde_mm_xor_si128(c2[2454],simde_mm_xor_si128(c2[1664],simde_mm_xor_si128(c2[228],simde_mm_xor_si128(c2[1826],simde_mm_xor_si128(c2[1680],simde_mm_xor_si128(c2[887],simde_mm_xor_si128(c2[1860],simde_mm_xor_si128(c2[1700],simde_mm_xor_si128(c2[2502],simde_mm_xor_si128(c2[2341],simde_mm_xor_si128(c2[1554],simde_mm_xor_si128(c2[596],simde_mm_xor_si128(c2[2214],simde_mm_xor_si128(c2[2215],simde_mm_xor_si128(c2[2529],simde_mm_xor_si128(c2[1909],simde_mm_xor_si128(c2[1749],simde_mm_xor_si128(c2[1584],c2[2224]))))))))))))))))))))))))))))))))));
+
+//row: 13
+     d2[104]=simde_mm_xor_si128(c2[966],simde_mm_xor_si128(c2[640],simde_mm_xor_si128(c2[2242],simde_mm_xor_si128(c2[485],simde_mm_xor_si128(c2[341],simde_mm_xor_si128(c2[2420],simde_mm_xor_si128(c2[982],simde_mm_xor_si128(c2[822],simde_mm_xor_si128(c2[336],simde_mm_xor_si128(c2[1474],simde_mm_xor_si128(c2[1313],simde_mm_xor_si128(c2[1153],simde_mm_xor_si128(c2[1175],simde_mm_xor_si128(c2[528],simde_mm_xor_si128(c2[1488],simde_mm_xor_si128(c2[1669],simde_mm_xor_si128(c2[225],simde_mm_xor_si128(c2[1991],simde_mm_xor_si128(c2[1831],simde_mm_xor_si128(c2[1685],simde_mm_xor_si128(c2[1044],simde_mm_xor_si128(c2[884],simde_mm_xor_si128(c2[1697],simde_mm_xor_si128(c2[2499],simde_mm_xor_si128(c2[2498],simde_mm_xor_si128(c2[2338],simde_mm_xor_si128(c2[1559],simde_mm_xor_si128(c2[753],simde_mm_xor_si128(c2[593],simde_mm_xor_si128(c2[2211],simde_mm_xor_si128(c2[2212],simde_mm_xor_si128(c2[135],simde_mm_xor_si128(c2[2534],simde_mm_xor_si128(c2[1090],simde_mm_xor_si128(c2[1746],simde_mm_xor_si128(c2[1589],simde_mm_xor_si128(c2[2389],c2[2229])))))))))))))))))))))))))))))))))))));
+
+//row: 14
+     d2[112]=simde_mm_xor_si128(c2[1920],simde_mm_xor_si128(c2[1760],simde_mm_xor_si128(c2[7],simde_mm_xor_si128(c2[1442],simde_mm_xor_si128(c2[2240],simde_mm_xor_si128(c2[485],simde_mm_xor_si128(c2[1283],simde_mm_xor_si128(c2[1303],simde_mm_xor_si128(c2[1143],simde_mm_xor_si128(c2[1941],simde_mm_xor_si128(c2[663],simde_mm_xor_si128(c2[1461],simde_mm_xor_si128(c2[23],simde_mm_xor_si128(c2[1616],simde_mm_xor_si128(c2[2422],simde_mm_xor_si128(c2[503],simde_mm_xor_si128(c2[2436],simde_mm_xor_si128(c2[2276],simde_mm_xor_si128(c2[515],simde_mm_xor_si128(c2[354],simde_mm_xor_si128(c2[1955],simde_mm_xor_si128(c2[194],simde_mm_xor_si128(c2[2129],simde_mm_xor_si128(c2[1969],simde_mm_xor_si128(c2[208],simde_mm_xor_si128(c2[1330],simde_mm_xor_si128(c2[2128],simde_mm_xor_si128(c2[2290],simde_mm_xor_si128(c2[529],simde_mm_xor_si128(c2[2471],simde_mm_xor_si128(c2[710],simde_mm_xor_si128(c2[1027],simde_mm_xor_si128(c2[1825],simde_mm_xor_si128(c2[1024],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[864],simde_mm_xor_si128(c2[2487],simde_mm_xor_si128(c2[726],simde_mm_xor_si128(c2[85],simde_mm_xor_si128(c2[1686],simde_mm_xor_si128(c2[2484],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[2499],simde_mm_xor_si128(c2[738],simde_mm_xor_si128(c2[742],simde_mm_xor_si128(c2[1540],simde_mm_xor_si128(c2[1539],simde_mm_xor_si128(c2[581],simde_mm_xor_si128(c2[1379],simde_mm_xor_si128(c2[257],simde_mm_xor_si128(c2[2353],simde_mm_xor_si128(c2[592],simde_mm_xor_si128(c2[2353],simde_mm_xor_si128(c2[1395],simde_mm_xor_si128(c2[2193],simde_mm_xor_si128(c2[454],simde_mm_xor_si128(c2[1252],simde_mm_xor_si128(c2[455],simde_mm_xor_si128(c2[1253],simde_mm_xor_si128(c2[1735],simde_mm_xor_si128(c2[769],simde_mm_xor_si128(c2[1575],simde_mm_xor_si128(c2[149],simde_mm_xor_si128(c2[2548],simde_mm_xor_si128(c2[787],simde_mm_xor_si128(c2[2391],simde_mm_xor_si128(c2[630],simde_mm_xor_si128(c2[1430],simde_mm_xor_si128(c2[464],c2[1270])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 15
+     d2[120]=simde_mm_xor_si128(c2[2245],simde_mm_xor_si128(c2[1123],simde_mm_xor_si128(c2[2085],simde_mm_xor_si128(c2[805],simde_mm_xor_si128(c2[1767],simde_mm_xor_si128(c2[2407],simde_mm_xor_si128(c2[802],simde_mm_xor_si128(c2[483],simde_mm_xor_si128(c2[1620],simde_mm_xor_si128(c2[498],simde_mm_xor_si128(c2[1460],simde_mm_xor_si128(c2[18],simde_mm_xor_si128(c2[980],simde_mm_xor_si128(c2[979],simde_mm_xor_si128(c2[1941],simde_mm_xor_si128(c2[194],simde_mm_xor_si128(c2[1639],simde_mm_xor_si128(c2[34],simde_mm_xor_si128(c2[1318],simde_mm_xor_si128(c2[2272],simde_mm_xor_si128(c2[2454],simde_mm_xor_si128(c2[1332],simde_mm_xor_si128(c2[2294],simde_mm_xor_si128(c2[693],simde_mm_xor_si128(c2[1655],simde_mm_xor_si128(c2[1653],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[1826],simde_mm_xor_si128(c2[229],simde_mm_xor_si128(c2[390],simde_mm_xor_si128(c2[1344],simde_mm_xor_si128(c2[1988],simde_mm_xor_si128(c2[391],simde_mm_xor_si128(c2[1842],simde_mm_xor_si128(c2[245],simde_mm_xor_si128(c2[1041],simde_mm_xor_si128(c2[2003],simde_mm_xor_si128(c2[417],simde_mm_xor_si128(c2[1862],simde_mm_xor_si128(c2[257],simde_mm_xor_si128(c2[97],simde_mm_xor_si128(c2[1059],simde_mm_xor_si128(c2[2503],simde_mm_xor_si128(c2[898],simde_mm_xor_si128(c2[1716],simde_mm_xor_si128(c2[119],simde_mm_xor_si128(c2[758],simde_mm_xor_si128(c2[1712],simde_mm_xor_si128(c2[2368],simde_mm_xor_si128(c2[771],simde_mm_xor_si128(c2[2369],simde_mm_xor_si128(c2[772],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[1094],simde_mm_xor_si128(c2[466],simde_mm_xor_si128(c2[1911],simde_mm_xor_si128(c2[306],simde_mm_xor_si128(c2[1746],simde_mm_xor_si128(c2[149],simde_mm_xor_si128(c2[2386],c2[789]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 16
+     d2[128]=simde_mm_xor_si128(c2[161],simde_mm_xor_si128(c2[1767],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[1607],simde_mm_xor_si128(c2[1441],simde_mm_xor_si128(c2[2242],simde_mm_xor_si128(c2[1281],simde_mm_xor_si128(c2[1285],simde_mm_xor_si128(c2[324],simde_mm_xor_si128(c2[2103],simde_mm_xor_si128(c2[1142],simde_mm_xor_si128(c2[1943],simde_mm_xor_si128(c2[982],simde_mm_xor_si128(c2[1463],simde_mm_xor_si128(c2[502],simde_mm_xor_si128(c2[2416],simde_mm_xor_si128(c2[1463],simde_mm_xor_si128(c2[1780],simde_mm_xor_si128(c2[677],simde_mm_xor_si128(c2[2275],simde_mm_xor_si128(c2[517],simde_mm_xor_si128(c2[2115],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[1794],simde_mm_xor_si128(c2[370],simde_mm_xor_si128(c2[1968],simde_mm_xor_si128(c2[210],simde_mm_xor_si128(c2[1808],simde_mm_xor_si128(c2[1329],simde_mm_xor_si128(c2[2130],simde_mm_xor_si128(c2[1169],simde_mm_xor_si128(c2[531],simde_mm_xor_si128(c2[2129],simde_mm_xor_si128(c2[2470],simde_mm_xor_si128(c2[704],simde_mm_xor_si128(c2[2310],simde_mm_xor_si128(c2[1827],simde_mm_xor_si128(c2[866],simde_mm_xor_si128(c2[866],simde_mm_xor_si128(c2[2464],simde_mm_xor_si128(c2[2486],simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[2326],simde_mm_xor_si128(c2[2486],simde_mm_xor_si128(c2[1525],simde_mm_xor_si128(c2[900],simde_mm_xor_si128(c2[2498],simde_mm_xor_si128(c2[740],simde_mm_xor_si128(c2[2338],simde_mm_xor_si128(c2[741],simde_mm_xor_si128(c2[1542],simde_mm_xor_si128(c2[581],simde_mm_xor_si128(c2[1381],simde_mm_xor_si128(c2[420],simde_mm_xor_si128(c2[2352],simde_mm_xor_si128(c2[594],simde_mm_xor_si128(c2[2192],simde_mm_xor_si128(c2[2195],simde_mm_xor_si128(c2[1234],simde_mm_xor_si128(c2[453],simde_mm_xor_si128(c2[1254],simde_mm_xor_si128(c2[293],simde_mm_xor_si128(c2[1255],simde_mm_xor_si128(c2[294],simde_mm_xor_si128(c2[1569],simde_mm_xor_si128(c2[608],simde_mm_xor_si128(c2[949],simde_mm_xor_si128(c2[2547],simde_mm_xor_si128(c2[789],simde_mm_xor_si128(c2[2387],simde_mm_xor_si128(c2[2390],simde_mm_xor_si128(c2[624],simde_mm_xor_si128(c2[2230],simde_mm_xor_si128(c2[1264],simde_mm_xor_si128(c2[311],c2[2384])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 17
+     d2[136]=simde_mm_xor_si128(c2[1767],simde_mm_xor_si128(c2[1444],simde_mm_xor_si128(c2[1607],simde_mm_xor_si128(c2[1284],simde_mm_xor_si128(c2[1126],simde_mm_xor_si128(c2[1281],simde_mm_xor_si128(c2[966],simde_mm_xor_si128(c2[324],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[1142],simde_mm_xor_si128(c2[819],simde_mm_xor_si128(c2[982],simde_mm_xor_si128(c2[659],simde_mm_xor_si128(c2[502],simde_mm_xor_si128(c2[179],simde_mm_xor_si128(c2[1463],simde_mm_xor_si128(c2[1140],simde_mm_xor_si128(c2[2263],simde_mm_xor_si128(c2[2275],simde_mm_xor_si128(c2[1952],simde_mm_xor_si128(c2[2115],simde_mm_xor_si128(c2[1792],simde_mm_xor_si128(c2[1794],simde_mm_xor_si128(c2[1479],simde_mm_xor_si128(c2[1968],simde_mm_xor_si128(c2[1653],simde_mm_xor_si128(c2[1808],simde_mm_xor_si128(c2[1493],simde_mm_xor_si128(c2[1014],simde_mm_xor_si128(c2[1169],simde_mm_xor_si128(c2[854],simde_mm_xor_si128(c2[2129],simde_mm_xor_si128(c2[1814],simde_mm_xor_si128(c2[2147],simde_mm_xor_si128(c2[2310],simde_mm_xor_si128(c2[1987],simde_mm_xor_si128(c2[866],simde_mm_xor_si128(c2[551],simde_mm_xor_si128(c2[2464],simde_mm_xor_si128(c2[2149],simde_mm_xor_si128(c2[2163],simde_mm_xor_si128(c2[2326],simde_mm_xor_si128(c2[2003],simde_mm_xor_si128(c2[1525],simde_mm_xor_si128(c2[1202],simde_mm_xor_si128(c2[2007],simde_mm_xor_si128(c2[2498],simde_mm_xor_si128(c2[2183],simde_mm_xor_si128(c2[2338],simde_mm_xor_si128(c2[2023],simde_mm_xor_si128(c2[418],simde_mm_xor_si128(c2[581],simde_mm_xor_si128(c2[258],simde_mm_xor_si128(c2[420],simde_mm_xor_si128(c2[97],simde_mm_xor_si128(c2[2037],simde_mm_xor_si128(c2[2192],simde_mm_xor_si128(c2[1877],simde_mm_xor_si128(c2[1234],simde_mm_xor_si128(c2[919],simde_mm_xor_si128(c2[130],simde_mm_xor_si128(c2[293],simde_mm_xor_si128(c2[2529],simde_mm_xor_si128(c2[294],simde_mm_xor_si128(c2[2530],simde_mm_xor_si128(c2[608],simde_mm_xor_si128(c2[293],simde_mm_xor_si128(c2[2547],simde_mm_xor_si128(c2[2224],simde_mm_xor_si128(c2[2387],simde_mm_xor_si128(c2[2064],simde_mm_xor_si128(c2[2067],simde_mm_xor_si128(c2[2230],simde_mm_xor_si128(c2[1907],simde_mm_xor_si128(c2[311],c2[2547])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 18
+     d2[144]=simde_mm_xor_si128(c2[1925],simde_mm_xor_si128(c2[420],c2[435]));
+
+//row: 19
+     d2[152]=simde_mm_xor_si128(c2[164],simde_mm_xor_si128(c2[2405],simde_mm_xor_si128(c2[1440],simde_mm_xor_si128(c2[1125],simde_mm_xor_si128(c2[2098],simde_mm_xor_si128(c2[1618],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[657],simde_mm_xor_si128(c2[672],simde_mm_xor_si128(c2[359],simde_mm_xor_si128(c2[373],simde_mm_xor_si128(c2[2293],simde_mm_xor_si128(c2[694],simde_mm_xor_si128(c2[867],simde_mm_xor_si128(c2[1990],simde_mm_xor_si128(c2[1029],simde_mm_xor_si128(c2[883],simde_mm_xor_si128(c2[82],simde_mm_xor_si128(c2[903],simde_mm_xor_si128(c2[1697],simde_mm_xor_si128(c2[1536],simde_mm_xor_si128(c2[757],simde_mm_xor_si128(c2[2358],simde_mm_xor_si128(c2[1409],simde_mm_xor_si128(c2[1410],simde_mm_xor_si128(c2[1732],simde_mm_xor_si128(c2[944],simde_mm_xor_si128(c2[787],c2[1427]))))))))))))))))))))))))))));
+
+//row: 20
+     d2[160]=simde_mm_xor_si128(c2[962],simde_mm_xor_si128(c2[802],simde_mm_xor_si128(c2[484],simde_mm_xor_si128(c2[2086],simde_mm_xor_si128(c2[337],simde_mm_xor_si128(c2[177],simde_mm_xor_si128(c2[2256],simde_mm_xor_si128(c2[658],simde_mm_xor_si128(c2[1617],simde_mm_xor_si128(c2[1478],simde_mm_xor_si128(c2[1318],simde_mm_xor_si128(c2[997],simde_mm_xor_si128(c2[1171],simde_mm_xor_si128(c2[1011],simde_mm_xor_si128(c2[372],simde_mm_xor_si128(c2[1332],simde_mm_xor_si128(c2[1505],simde_mm_xor_si128(c2[69],simde_mm_xor_si128(c2[1667],simde_mm_xor_si128(c2[1510],simde_mm_xor_si128(c2[1521],simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[1701],simde_mm_xor_si128(c2[1541],simde_mm_xor_si128(c2[2343],simde_mm_xor_si128(c2[2182],simde_mm_xor_si128(c2[1395],simde_mm_xor_si128(c2[437],simde_mm_xor_si128(c2[2055],simde_mm_xor_si128(c2[2048],simde_mm_xor_si128(c2[2370],simde_mm_xor_si128(c2[1750],simde_mm_xor_si128(c2[1590],simde_mm_xor_si128(c2[1425],c2[2065]))))))))))))))))))))))))))))))))));
+
+//row: 21
+     d2[168]=simde_mm_xor_si128(c2[1605],simde_mm_xor_si128(c2[1287],simde_mm_xor_si128(c2[322],simde_mm_xor_si128(c2[1924],simde_mm_xor_si128(c2[980],simde_mm_xor_si128(c2[500],simde_mm_xor_si128(c2[1621],simde_mm_xor_si128(c2[1461],simde_mm_xor_si128(c2[2113],simde_mm_xor_si128(c2[1952],simde_mm_xor_si128(c2[1792],simde_mm_xor_si128(c2[1814],simde_mm_xor_si128(c2[1175],simde_mm_xor_si128(c2[2135],simde_mm_xor_si128(c2[2308],simde_mm_xor_si128(c2[864],simde_mm_xor_si128(c2[71],simde_mm_xor_si128(c2[2470],simde_mm_xor_si128(c2[2324],simde_mm_xor_si128(c2[1683],simde_mm_xor_si128(c2[1523],simde_mm_xor_si128(c2[2336],simde_mm_xor_si128(c2[579],simde_mm_xor_si128(c2[578],simde_mm_xor_si128(c2[418],simde_mm_xor_si128(c2[2198],simde_mm_xor_si128(c2[1392],simde_mm_xor_si128(c2[1232],simde_mm_xor_si128(c2[291],simde_mm_xor_si128(c2[292],simde_mm_xor_si128(c2[774],simde_mm_xor_si128(c2[614],simde_mm_xor_si128(c2[1730],simde_mm_xor_si128(c2[2385],simde_mm_xor_si128(c2[2228],simde_mm_xor_si128(c2[469],c2[309]))))))))))))))))))))))))))))))))))));
+
+//row: 22
+     d2[176]=simde_mm_xor_si128(c2[2261],c2[2435]);
+
+//row: 23
+     d2[184]=simde_mm_xor_si128(c2[1121],simde_mm_xor_si128(c2[1814],c2[2326]));
+
+//row: 24
+     d2[192]=simde_mm_xor_si128(c2[2258],simde_mm_xor_si128(c2[1792],c2[1424]));
+
+//row: 25
+     d2[200]=simde_mm_xor_si128(c2[646],c2[2001]);
+
+//row: 26
+     d2[208]=simde_mm_xor_si128(c2[1442],simde_mm_xor_si128(c2[1282],simde_mm_xor_si128(c2[320],simde_mm_xor_si128(c2[1124],simde_mm_xor_si128(c2[964],simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[7],simde_mm_xor_si128(c2[1604],simde_mm_xor_si128(c2[817],simde_mm_xor_si128(c2[657],simde_mm_xor_si128(c2[2262],simde_mm_xor_si128(c2[177],simde_mm_xor_si128(c2[1782],simde_mm_xor_si128(c2[336],simde_mm_xor_si128(c2[1138],simde_mm_xor_si128(c2[176],simde_mm_xor_si128(c2[1958],simde_mm_xor_si128(c2[1798],simde_mm_xor_si128(c2[836],simde_mm_xor_si128(c2[675],simde_mm_xor_si128(c2[1477],simde_mm_xor_si128(c2[515],simde_mm_xor_si128(c2[2113],simde_mm_xor_si128(c2[1651],simde_mm_xor_si128(c2[1491],simde_mm_xor_si128(c2[529],simde_mm_xor_si128(c2[1012],simde_mm_xor_si128(c2[852],simde_mm_xor_si128(c2[2449],simde_mm_xor_si128(c2[1812],simde_mm_xor_si128(c2[850],simde_mm_xor_si128(c2[2145],simde_mm_xor_si128(c2[1985],simde_mm_xor_si128(c2[1031],simde_mm_xor_si128(c2[549],simde_mm_xor_si128(c2[2146],simde_mm_xor_si128(c2[1345],simde_mm_xor_si128(c2[2147],simde_mm_xor_si128(c2[1185],simde_mm_xor_si128(c2[2161],simde_mm_xor_si128(c2[2001],simde_mm_xor_si128(c2[1047],simde_mm_xor_si128(c2[406],simde_mm_xor_si128(c2[1200],simde_mm_xor_si128(c2[246],simde_mm_xor_si128(c2[2181],simde_mm_xor_si128(c2[2021],simde_mm_xor_si128(c2[1059],simde_mm_xor_si128(c2[416],simde_mm_xor_si128(c2[256],simde_mm_xor_si128(c2[1861],simde_mm_xor_si128(c2[1860],simde_mm_xor_si128(c2[103],simde_mm_xor_si128(c2[1700],simde_mm_xor_si128(c2[2035],simde_mm_xor_si128(c2[1875],simde_mm_xor_si128(c2[913],simde_mm_xor_si128(c2[115],simde_mm_xor_si128(c2[917],simde_mm_xor_si128(c2[2514],simde_mm_xor_si128(c2[2512],simde_mm_xor_si128(c2[128],simde_mm_xor_si128(c2[2535],simde_mm_xor_si128(c2[1573],simde_mm_xor_si128(c2[2528],simde_mm_xor_si128(c2[1574],simde_mm_xor_si128(c2[2048],simde_mm_xor_si128(c2[291],simde_mm_xor_si128(c2[1888],simde_mm_xor_si128(c2[2230],simde_mm_xor_si128(c2[2070],simde_mm_xor_si128(c2[1108],simde_mm_xor_si128(c2[2065],simde_mm_xor_si128(c2[1905],simde_mm_xor_si128(c2[951],simde_mm_xor_si128(c2[1751],simde_mm_xor_si128(c2[2545],c2[1591])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 27
+     d2[216]=simde_mm_xor_si128(c2[1280],c2[1217]);
+
+//row: 28
+     d2[224]=simde_mm_xor_si128(c2[342],simde_mm_xor_si128(c2[838],c2[1200]));
+
+//row: 29
+     d2[232]=simde_mm_xor_si128(c2[321],c2[1985]);
+
+//row: 30
+     d2[240]=simde_mm_xor_si128(c2[1156],simde_mm_xor_si128(c2[87],simde_mm_xor_si128(c2[1552],c2[789])));
+
+//row: 31
+     d2[248]=simde_mm_xor_si128(c2[1440],simde_mm_xor_si128(c2[1122],simde_mm_xor_si128(c2[165],simde_mm_xor_si128(c2[823],simde_mm_xor_si128(c2[343],simde_mm_xor_si128(c2[1456],simde_mm_xor_si128(c2[1296],simde_mm_xor_si128(c2[1622],simde_mm_xor_si128(c2[1956],simde_mm_xor_si128(c2[1795],simde_mm_xor_si128(c2[1635],simde_mm_xor_si128(c2[1649],simde_mm_xor_si128(c2[1010],simde_mm_xor_si128(c2[1970],simde_mm_xor_si128(c2[2151],simde_mm_xor_si128(c2[707],simde_mm_xor_si128(c2[2465],simde_mm_xor_si128(c2[2305],simde_mm_xor_si128(c2[2167],simde_mm_xor_si128(c2[1526],simde_mm_xor_si128(c2[1366],simde_mm_xor_si128(c2[2179],simde_mm_xor_si128(c2[422],simde_mm_xor_si128(c2[421],simde_mm_xor_si128(c2[261],simde_mm_xor_si128(c2[2033],simde_mm_xor_si128(c2[1235],simde_mm_xor_si128(c2[1075],simde_mm_xor_si128(c2[134],simde_mm_xor_si128(c2[135],simde_mm_xor_si128(c2[609],simde_mm_xor_si128(c2[449],simde_mm_xor_si128(c2[2228],simde_mm_xor_si128(c2[2071],simde_mm_xor_si128(c2[304],c2[144])))))))))))))))))))))))))))))))))));
+
+//row: 32
+     d2[256]=simde_mm_xor_si128(c2[2402],simde_mm_xor_si128(c2[2242],simde_mm_xor_si128(c2[2084],simde_mm_xor_si128(c2[1924],simde_mm_xor_si128(c2[967],simde_mm_xor_si128(c2[327],simde_mm_xor_si128(c2[1777],simde_mm_xor_si128(c2[1617],simde_mm_xor_si128(c2[1137],simde_mm_xor_si128(c2[2098],simde_mm_xor_si128(c2[359],simde_mm_xor_si128(c2[199],simde_mm_xor_si128(c2[2437],simde_mm_xor_si128(c2[52],simde_mm_xor_si128(c2[2451],simde_mm_xor_si128(c2[1972],simde_mm_xor_si128(c2[1812],simde_mm_xor_si128(c2[213],simde_mm_xor_si128(c2[546],simde_mm_xor_si128(c2[386],simde_mm_xor_si128(c2[1509],simde_mm_xor_si128(c2[548],simde_mm_xor_si128(c2[562],simde_mm_xor_si128(c2[402],simde_mm_xor_si128(c2[2160],simde_mm_xor_si128(c2[2002],simde_mm_xor_si128(c2[582],simde_mm_xor_si128(c2[422],simde_mm_xor_si128(c2[1376],simde_mm_xor_si128(c2[1216],simde_mm_xor_si128(c2[1063],simde_mm_xor_si128(c2[436],simde_mm_xor_si128(c2[276],simde_mm_xor_si128(c2[1877],simde_mm_xor_si128(c2[1088],simde_mm_xor_si128(c2[928],simde_mm_xor_si128(c2[929],simde_mm_xor_si128(c2[1251],simde_mm_xor_si128(c2[631],simde_mm_xor_si128(c2[471],simde_mm_xor_si128(c2[466],simde_mm_xor_si128(c2[306],c2[946]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 33
+     d2[264]=simde_mm_xor_si128(c2[487],simde_mm_xor_si128(c2[161],simde_mm_xor_si128(c2[1763],simde_mm_xor_si128(c2[2421],simde_mm_xor_si128(c2[1941],simde_mm_xor_si128(c2[343],simde_mm_xor_si128(c2[995],simde_mm_xor_si128(c2[674],simde_mm_xor_si128(c2[672],simde_mm_xor_si128(c2[688],simde_mm_xor_si128(c2[49],simde_mm_xor_si128(c2[1009],simde_mm_xor_si128(c2[1190],simde_mm_xor_si128(c2[2305],simde_mm_xor_si128(c2[1344],simde_mm_xor_si128(c2[1206],simde_mm_xor_si128(c2[405],simde_mm_xor_si128(c2[1218],simde_mm_xor_si128(c2[2020],simde_mm_xor_si128(c2[1859],simde_mm_xor_si128(c2[1072],simde_mm_xor_si128(c2[114],simde_mm_xor_si128(c2[754],simde_mm_xor_si128(c2[1732],simde_mm_xor_si128(c2[1733],simde_mm_xor_si128(c2[2055],simde_mm_xor_si128(c2[1267],simde_mm_xor_si128(c2[1110],c2[1750]))))))))))))))))))))))))))));
+
+//row: 34
+     d2[272]=simde_mm_xor_si128(c2[2245],simde_mm_xor_si128(c2[2085],simde_mm_xor_si128(c2[1922],simde_mm_xor_si128(c2[1927],simde_mm_xor_si128(c2[1767],simde_mm_xor_si128(c2[1604],simde_mm_xor_si128(c2[802],simde_mm_xor_si128(c2[647],simde_mm_xor_si128(c2[481],simde_mm_xor_si128(c2[1620],simde_mm_xor_si128(c2[1460],simde_mm_xor_si128(c2[1297],simde_mm_xor_si128(c2[980],simde_mm_xor_si128(c2[817],simde_mm_xor_si128(c2[1938],simde_mm_xor_si128(c2[1941],simde_mm_xor_si128(c2[1778],simde_mm_xor_si128(c2[194],simde_mm_xor_si128(c2[34],simde_mm_xor_si128(c2[2438],simde_mm_xor_si128(c2[2277],simde_mm_xor_si128(c2[2272],simde_mm_xor_si128(c2[2117],simde_mm_xor_si128(c2[2454],simde_mm_xor_si128(c2[2294],simde_mm_xor_si128(c2[2131],simde_mm_xor_si128(c2[1815],simde_mm_xor_si128(c2[1655],simde_mm_xor_si128(c2[1492],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[2452],simde_mm_xor_si128(c2[389],simde_mm_xor_si128(c2[229],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[1344],simde_mm_xor_si128(c2[1189],simde_mm_xor_si128(c2[388],simde_mm_xor_si128(c2[391],simde_mm_xor_si128(c2[228],simde_mm_xor_si128(c2[405],simde_mm_xor_si128(c2[245],simde_mm_xor_si128(c2[82],simde_mm_xor_si128(c2[2000],simde_mm_xor_si128(c2[2003],simde_mm_xor_si128(c2[1840],simde_mm_xor_si128(c2[417],simde_mm_xor_si128(c2[257],simde_mm_xor_si128(c2[102],simde_mm_xor_si128(c2[1219],simde_mm_xor_si128(c2[1059],simde_mm_xor_si128(c2[896],simde_mm_xor_si128(c2[903],simde_mm_xor_si128(c2[898],simde_mm_xor_si128(c2[743],simde_mm_xor_si128(c2[279],simde_mm_xor_si128(c2[119],simde_mm_xor_si128(c2[2515],simde_mm_xor_si128(c2[1717],simde_mm_xor_si128(c2[1712],simde_mm_xor_si128(c2[1557],simde_mm_xor_si128(c2[931],simde_mm_xor_si128(c2[771],simde_mm_xor_si128(c2[608],simde_mm_xor_si128(c2[772],simde_mm_xor_si128(c2[609],simde_mm_xor_si128(c2[1091],simde_mm_xor_si128(c2[1094],simde_mm_xor_si128(c2[931],simde_mm_xor_si128(c2[466],simde_mm_xor_si128(c2[306],simde_mm_xor_si128(c2[151],simde_mm_xor_si128(c2[309],simde_mm_xor_si128(c2[149],simde_mm_xor_si128(c2[2545],simde_mm_xor_si128(c2[786],simde_mm_xor_si128(c2[789],c2[626]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 35
+     d2[280]=simde_mm_xor_si128(c2[1284],simde_mm_xor_si128(c2[1124],simde_mm_xor_si128(c2[806],simde_mm_xor_si128(c2[2400],simde_mm_xor_si128(c2[659],simde_mm_xor_si128(c2[499],simde_mm_xor_si128(c2[19],simde_mm_xor_si128(c2[980],simde_mm_xor_si128(c2[1459],simde_mm_xor_si128(c2[1792],simde_mm_xor_si128(c2[1632],simde_mm_xor_si128(c2[1319],simde_mm_xor_si128(c2[1493],simde_mm_xor_si128(c2[1333],simde_mm_xor_si128(c2[694],simde_mm_xor_si128(c2[1654],simde_mm_xor_si128(c2[1827],simde_mm_xor_si128(c2[391],simde_mm_xor_si128(c2[1989],simde_mm_xor_si128(c2[1843],simde_mm_xor_si128(c2[1042],simde_mm_xor_si128(c2[1362],simde_mm_xor_si128(c2[2023],simde_mm_xor_si128(c2[1863],simde_mm_xor_si128(c2[98],simde_mm_xor_si128(c2[2496],simde_mm_xor_si128(c2[1717],simde_mm_xor_si128(c2[759],simde_mm_xor_si128(c2[2369],simde_mm_xor_si128(c2[2370],simde_mm_xor_si128(c2[133],simde_mm_xor_si128(c2[2064],simde_mm_xor_si128(c2[1904],simde_mm_xor_si128(c2[1747],c2[2387]))))))))))))))))))))))))))))))))));
+
+//row: 36
+     d2[288]=simde_mm_xor_si128(c2[1920],simde_mm_xor_si128(c2[994],c2[1713]));
+
+//row: 37
+     d2[296]=simde_mm_xor_si128(c2[486],simde_mm_xor_si128(c2[2401],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[2083],simde_mm_xor_si128(c2[1762],simde_mm_xor_si128(c2[1126],simde_mm_xor_si128(c2[2420],simde_mm_xor_si128(c2[1776],simde_mm_xor_si128(c2[1940],simde_mm_xor_si128(c2[1296],simde_mm_xor_si128(c2[2417],simde_mm_xor_si128(c2[342],simde_mm_xor_si128(c2[2257],simde_mm_xor_si128(c2[994],simde_mm_xor_si128(c2[358],simde_mm_xor_si128(c2[197],simde_mm_xor_si128(c2[673],simde_mm_xor_si128(c2[37],simde_mm_xor_si128(c2[695],simde_mm_xor_si128(c2[51],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[1971],simde_mm_xor_si128(c2[1008],simde_mm_xor_si128(c2[372],simde_mm_xor_si128(c2[1189],simde_mm_xor_si128(c2[545],simde_mm_xor_si128(c2[2304],simde_mm_xor_si128(c2[1668],simde_mm_xor_si128(c2[867],simde_mm_xor_si128(c2[1351],simde_mm_xor_si128(c2[707],simde_mm_xor_si128(c2[1205],simde_mm_xor_si128(c2[561],simde_mm_xor_si128(c2[2487],simde_mm_xor_si128(c2[404],simde_mm_xor_si128(c2[2327],simde_mm_xor_si128(c2[1217],simde_mm_xor_si128(c2[581],simde_mm_xor_si128(c2[2019],simde_mm_xor_si128(c2[1383],simde_mm_xor_si128(c2[1382],simde_mm_xor_si128(c2[1858],simde_mm_xor_si128(c2[1222],simde_mm_xor_si128(c2[1079],simde_mm_xor_si128(c2[435],simde_mm_xor_si128(c2[2196],simde_mm_xor_si128(c2[113],simde_mm_xor_si128(c2[2036],simde_mm_xor_si128(c2[1731],simde_mm_xor_si128(c2[1095],simde_mm_xor_si128(c2[1732],simde_mm_xor_si128(c2[1088],simde_mm_xor_si128(c2[1570],simde_mm_xor_si128(c2[2054],simde_mm_xor_si128(c2[1410],simde_mm_xor_si128(c2[1266],simde_mm_xor_si128(c2[630],simde_mm_xor_si128(c2[1109],simde_mm_xor_si128(c2[465],simde_mm_xor_si128(c2[1265],simde_mm_xor_si128(c2[1749],c2[1105])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 38
+     d2[304]=simde_mm_xor_si128(c2[2402],simde_mm_xor_si128(c2[2242],simde_mm_xor_si128(c2[1924],simde_mm_xor_si128(c2[967],simde_mm_xor_si128(c2[1777],simde_mm_xor_si128(c2[1617],simde_mm_xor_si128(c2[1137],simde_mm_xor_si128(c2[2098],simde_mm_xor_si128(c2[2417],simde_mm_xor_si128(c2[359],simde_mm_xor_si128(c2[199],simde_mm_xor_si128(c2[2437],simde_mm_xor_si128(c2[52],simde_mm_xor_si128(c2[2451],simde_mm_xor_si128(c2[1812],simde_mm_xor_si128(c2[213],simde_mm_xor_si128(c2[386],simde_mm_xor_si128(c2[1509],simde_mm_xor_si128(c2[548],simde_mm_xor_si128(c2[402],simde_mm_xor_si128(c2[2160],simde_mm_xor_si128(c2[404],simde_mm_xor_si128(c2[582],simde_mm_xor_si128(c2[422],simde_mm_xor_si128(c2[1216],simde_mm_xor_si128(c2[1063],simde_mm_xor_si128(c2[276],simde_mm_xor_si128(c2[1877],simde_mm_xor_si128(c2[928],simde_mm_xor_si128(c2[929],simde_mm_xor_si128(c2[1251],simde_mm_xor_si128(c2[631],simde_mm_xor_si128(c2[471],simde_mm_xor_si128(c2[306],c2[946]))))))))))))))))))))))))))))))))));
+
+//row: 39
+     d2[312]=simde_mm_xor_si128(c2[1762],simde_mm_xor_si128(c2[1602],simde_mm_xor_si128(c2[1444],simde_mm_xor_si128(c2[1284],simde_mm_xor_si128(c2[327],simde_mm_xor_si128(c2[2406],simde_mm_xor_si128(c2[1137],simde_mm_xor_si128(c2[977],simde_mm_xor_si128(c2[497],simde_mm_xor_si128(c2[1458],simde_mm_xor_si128(c2[2278],simde_mm_xor_si128(c2[2118],simde_mm_xor_si128(c2[1797],simde_mm_xor_si128(c2[1971],simde_mm_xor_si128(c2[1811],simde_mm_xor_si128(c2[1332],simde_mm_xor_si128(c2[1172],simde_mm_xor_si128(c2[2132],simde_mm_xor_si128(c2[2465],simde_mm_xor_si128(c2[2305],simde_mm_xor_si128(c2[869],simde_mm_xor_si128(c2[2467],simde_mm_xor_si128(c2[2481],simde_mm_xor_si128(c2[2321],simde_mm_xor_si128(c2[1520],simde_mm_xor_si128(c2[2501],simde_mm_xor_si128(c2[2341],simde_mm_xor_si128(c2[736],simde_mm_xor_si128(c2[576],simde_mm_xor_si128(c2[423],simde_mm_xor_si128(c2[2355],simde_mm_xor_si128(c2[2195],simde_mm_xor_si128(c2[1237],simde_mm_xor_si128(c2[2034],simde_mm_xor_si128(c2[448],simde_mm_xor_si128(c2[288],simde_mm_xor_si128(c2[289],simde_mm_xor_si128(c2[611],simde_mm_xor_si128(c2[2550],simde_mm_xor_si128(c2[2390],simde_mm_xor_si128(c2[2385],simde_mm_xor_si128(c2[2225],c2[306]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 40
+     d2[320]=simde_mm_xor_si128(c2[485],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[167],simde_mm_xor_si128(c2[2241],simde_mm_xor_si128(c2[1761],simde_mm_xor_si128(c2[1284],simde_mm_xor_si128(c2[2419],simde_mm_xor_si128(c2[1942],simde_mm_xor_si128(c2[1939],simde_mm_xor_si128(c2[1462],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[341],simde_mm_xor_si128(c2[2423],simde_mm_xor_si128(c2[993],simde_mm_xor_si128(c2[516],simde_mm_xor_si128(c2[355],simde_mm_xor_si128(c2[672],simde_mm_xor_si128(c2[195],simde_mm_xor_si128(c2[32],simde_mm_xor_si128(c2[694],simde_mm_xor_si128(c2[209],simde_mm_xor_si128(c2[55],simde_mm_xor_si128(c2[2129],simde_mm_xor_si128(c2[1015],simde_mm_xor_si128(c2[530],simde_mm_xor_si128(c2[1188],simde_mm_xor_si128(c2[711],simde_mm_xor_si128(c2[2311],simde_mm_xor_si128(c2[1826],simde_mm_xor_si128(c2[1025],simde_mm_xor_si128(c2[1350],simde_mm_xor_si128(c2[865],simde_mm_xor_si128(c2[1204],simde_mm_xor_si128(c2[727],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[403],simde_mm_xor_si128(c2[2485],simde_mm_xor_si128(c2[1216],simde_mm_xor_si128(c2[739],simde_mm_xor_si128(c2[2018],simde_mm_xor_si128(c2[1541],simde_mm_xor_si128(c2[1540],simde_mm_xor_si128(c2[1857],simde_mm_xor_si128(c2[1380],simde_mm_xor_si128(c2[1078],simde_mm_xor_si128(c2[593],simde_mm_xor_si128(c2[2354],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[2194],simde_mm_xor_si128(c2[1730],simde_mm_xor_si128(c2[1253],simde_mm_xor_si128(c2[1731],simde_mm_xor_si128(c2[1254],simde_mm_xor_si128(c2[1728],simde_mm_xor_si128(c2[2053],simde_mm_xor_si128(c2[1568],simde_mm_xor_si128(c2[1265],simde_mm_xor_si128(c2[788],simde_mm_xor_si128(c2[1108],simde_mm_xor_si128(c2[631],simde_mm_xor_si128(c2[1431],simde_mm_xor_si128(c2[1748],c2[1271]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 41
+     d2[328]=simde_mm_xor_si128(c2[2407],simde_mm_xor_si128(c2[2247],simde_mm_xor_si128(c2[1921],simde_mm_xor_si128(c2[964],simde_mm_xor_si128(c2[1782],simde_mm_xor_si128(c2[1622],simde_mm_xor_si128(c2[1142],simde_mm_xor_si128(c2[2103],simde_mm_xor_si128(c2[176],simde_mm_xor_si128(c2[356],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[2434],simde_mm_xor_si128(c2[49],simde_mm_xor_si128(c2[2448],simde_mm_xor_si128(c2[1809],simde_mm_xor_si128(c2[210],simde_mm_xor_si128(c2[391],simde_mm_xor_si128(c2[1506],simde_mm_xor_si128(c2[545],simde_mm_xor_si128(c2[407],simde_mm_xor_si128(c2[2165],simde_mm_xor_si128(c2[886],simde_mm_xor_si128(c2[579],simde_mm_xor_si128(c2[419],simde_mm_xor_si128(c2[1221],simde_mm_xor_si128(c2[1060],simde_mm_xor_si128(c2[273],simde_mm_xor_si128(c2[1874],simde_mm_xor_si128(c2[933],simde_mm_xor_si128(c2[934],simde_mm_xor_si128(c2[1248],simde_mm_xor_si128(c2[628],simde_mm_xor_si128(c2[468],simde_mm_xor_si128(c2[311],c2[951]))))))))))))))))))))))))))))))))));
+  }
+}
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc144_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc144_byte.c
index 1e26b3535d5c4435de7da87c0737afeb95b8dc13..4cbc6fdf0cbd6503cebc3d27a3a6be0880afd2a7 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc144_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc144_byte.c
@@ -1,9 +1,8 @@
 #include "PHY/sse_intrin.h"
 // generated code for Zc=144, byte encoding
 static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG2_Zc144_byte(uint8_t *c,uint8_t *d) {
-  __m128i *csimd=(__m128i *)c,*dsimd=(__m128i *)d;
-
-  __m128i *c2,*d2;
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
+  simde__m128i *c2,*d2;
 
   int i2;
   for (i2=0; i2<9; i2++) {
@@ -11,129 +10,129 @@ static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG
      d2=&dsimd[i2];
 
 //row: 0
-     d2[0]=_mm_xor_si128(c2[360],_mm_xor_si128(c2[361],_mm_xor_si128(c2[2525],_mm_xor_si128(c2[1639],_mm_xor_si128(c2[1644],_mm_xor_si128(c2[1285],_mm_xor_si128(c2[759],_mm_xor_si128(c2[1483],_mm_xor_si128(c2[416],_mm_xor_si128(c2[2399],_mm_xor_si128(c2[2039],_mm_xor_si128(c2[1338],_mm_xor_si128(c2[2418],_mm_xor_si128(c2[973],_mm_xor_si128(c2[274],_mm_xor_si128(c2[630],_mm_xor_si128(c2[475],_mm_xor_si128(c2[653],_mm_xor_si128(c2[1552],_mm_xor_si128(c2[312],_mm_xor_si128(c2[129],_mm_xor_si128(c2[868],_mm_xor_si128(c2[2670],_mm_xor_si128(c2[2851],_mm_xor_si128(c2[2689],_mm_xor_si128(c2[345],c2[2686]))))))))))))))))))))))))));
+     d2[0]=simde_mm_xor_si128(c2[360],simde_mm_xor_si128(c2[361],simde_mm_xor_si128(c2[2525],simde_mm_xor_si128(c2[1639],simde_mm_xor_si128(c2[1644],simde_mm_xor_si128(c2[1285],simde_mm_xor_si128(c2[759],simde_mm_xor_si128(c2[1483],simde_mm_xor_si128(c2[416],simde_mm_xor_si128(c2[2399],simde_mm_xor_si128(c2[2039],simde_mm_xor_si128(c2[1338],simde_mm_xor_si128(c2[2418],simde_mm_xor_si128(c2[973],simde_mm_xor_si128(c2[274],simde_mm_xor_si128(c2[630],simde_mm_xor_si128(c2[475],simde_mm_xor_si128(c2[653],simde_mm_xor_si128(c2[1552],simde_mm_xor_si128(c2[312],simde_mm_xor_si128(c2[129],simde_mm_xor_si128(c2[868],simde_mm_xor_si128(c2[2670],simde_mm_xor_si128(c2[2851],simde_mm_xor_si128(c2[2689],simde_mm_xor_si128(c2[345],c2[2686]))))))))))))))))))))))))));
 
 //row: 1
-     d2[9]=_mm_xor_si128(c2[540],_mm_xor_si128(c2[360],_mm_xor_si128(c2[361],_mm_xor_si128(c2[2525],_mm_xor_si128(c2[1819],_mm_xor_si128(c2[1639],_mm_xor_si128(c2[1644],_mm_xor_si128(c2[1285],_mm_xor_si128(c2[939],_mm_xor_si128(c2[759],_mm_xor_si128(c2[1483],_mm_xor_si128(c2[596],_mm_xor_si128(c2[416],_mm_xor_si128(c2[2399],_mm_xor_si128(c2[2039],_mm_xor_si128(c2[1338],_mm_xor_si128(c2[2418],_mm_xor_si128(c2[973],_mm_xor_si128(c2[274],_mm_xor_si128(c2[630],_mm_xor_si128(c2[655],_mm_xor_si128(c2[475],_mm_xor_si128(c2[653],_mm_xor_si128(c2[1552],_mm_xor_si128(c2[312],_mm_xor_si128(c2[129],_mm_xor_si128(c2[868],_mm_xor_si128(c2[2670],_mm_xor_si128(c2[2851],_mm_xor_si128(c2[2869],_mm_xor_si128(c2[2689],_mm_xor_si128(c2[345],c2[2686]))))))))))))))))))))))))))))))));
+     d2[9]=simde_mm_xor_si128(c2[540],simde_mm_xor_si128(c2[360],simde_mm_xor_si128(c2[361],simde_mm_xor_si128(c2[2525],simde_mm_xor_si128(c2[1819],simde_mm_xor_si128(c2[1639],simde_mm_xor_si128(c2[1644],simde_mm_xor_si128(c2[1285],simde_mm_xor_si128(c2[939],simde_mm_xor_si128(c2[759],simde_mm_xor_si128(c2[1483],simde_mm_xor_si128(c2[596],simde_mm_xor_si128(c2[416],simde_mm_xor_si128(c2[2399],simde_mm_xor_si128(c2[2039],simde_mm_xor_si128(c2[1338],simde_mm_xor_si128(c2[2418],simde_mm_xor_si128(c2[973],simde_mm_xor_si128(c2[274],simde_mm_xor_si128(c2[630],simde_mm_xor_si128(c2[655],simde_mm_xor_si128(c2[475],simde_mm_xor_si128(c2[653],simde_mm_xor_si128(c2[1552],simde_mm_xor_si128(c2[312],simde_mm_xor_si128(c2[129],simde_mm_xor_si128(c2[868],simde_mm_xor_si128(c2[2670],simde_mm_xor_si128(c2[2851],simde_mm_xor_si128(c2[2869],simde_mm_xor_si128(c2[2689],simde_mm_xor_si128(c2[345],c2[2686]))))))))))))))))))))))))))))))));
 
 //row: 2
-     d2[18]=_mm_xor_si128(c2[540],_mm_xor_si128(c2[360],_mm_xor_si128(c2[541],_mm_xor_si128(c2[361],_mm_xor_si128(c2[2525],_mm_xor_si128(c2[1819],_mm_xor_si128(c2[1639],_mm_xor_si128(c2[1644],_mm_xor_si128(c2[1285],_mm_xor_si128(c2[939],_mm_xor_si128(c2[759],_mm_xor_si128(c2[1483],_mm_xor_si128(c2[596],_mm_xor_si128(c2[416],_mm_xor_si128(c2[2579],_mm_xor_si128(c2[2399],_mm_xor_si128(c2[2039],_mm_xor_si128(c2[1518],_mm_xor_si128(c2[1338],_mm_xor_si128(c2[2418],_mm_xor_si128(c2[973],_mm_xor_si128(c2[454],_mm_xor_si128(c2[274],_mm_xor_si128(c2[630],_mm_xor_si128(c2[655],_mm_xor_si128(c2[475],_mm_xor_si128(c2[833],_mm_xor_si128(c2[653],_mm_xor_si128(c2[1552],_mm_xor_si128(c2[492],_mm_xor_si128(c2[312],_mm_xor_si128(c2[129],_mm_xor_si128(c2[1048],_mm_xor_si128(c2[868],_mm_xor_si128(c2[2670],_mm_xor_si128(c2[2851],_mm_xor_si128(c2[2869],_mm_xor_si128(c2[2689],_mm_xor_si128(c2[525],_mm_xor_si128(c2[345],c2[2686]))))))))))))))))))))))))))))))))))))))));
+     d2[18]=simde_mm_xor_si128(c2[540],simde_mm_xor_si128(c2[360],simde_mm_xor_si128(c2[541],simde_mm_xor_si128(c2[361],simde_mm_xor_si128(c2[2525],simde_mm_xor_si128(c2[1819],simde_mm_xor_si128(c2[1639],simde_mm_xor_si128(c2[1644],simde_mm_xor_si128(c2[1285],simde_mm_xor_si128(c2[939],simde_mm_xor_si128(c2[759],simde_mm_xor_si128(c2[1483],simde_mm_xor_si128(c2[596],simde_mm_xor_si128(c2[416],simde_mm_xor_si128(c2[2579],simde_mm_xor_si128(c2[2399],simde_mm_xor_si128(c2[2039],simde_mm_xor_si128(c2[1518],simde_mm_xor_si128(c2[1338],simde_mm_xor_si128(c2[2418],simde_mm_xor_si128(c2[973],simde_mm_xor_si128(c2[454],simde_mm_xor_si128(c2[274],simde_mm_xor_si128(c2[630],simde_mm_xor_si128(c2[655],simde_mm_xor_si128(c2[475],simde_mm_xor_si128(c2[833],simde_mm_xor_si128(c2[653],simde_mm_xor_si128(c2[1552],simde_mm_xor_si128(c2[492],simde_mm_xor_si128(c2[312],simde_mm_xor_si128(c2[129],simde_mm_xor_si128(c2[1048],simde_mm_xor_si128(c2[868],simde_mm_xor_si128(c2[2670],simde_mm_xor_si128(c2[2851],simde_mm_xor_si128(c2[2869],simde_mm_xor_si128(c2[2689],simde_mm_xor_si128(c2[525],simde_mm_xor_si128(c2[345],c2[2686]))))))))))))))))))))))))))))))))))))))));
 
 //row: 3
-     d2[27]=_mm_xor_si128(c2[360],_mm_xor_si128(c2[361],_mm_xor_si128(c2[2525],_mm_xor_si128(c2[1639],_mm_xor_si128(c2[1644],_mm_xor_si128(c2[1465],_mm_xor_si128(c2[1285],_mm_xor_si128(c2[759],_mm_xor_si128(c2[1663],_mm_xor_si128(c2[1483],_mm_xor_si128(c2[416],_mm_xor_si128(c2[2399],_mm_xor_si128(c2[2039],_mm_xor_si128(c2[1338],_mm_xor_si128(c2[2418],_mm_xor_si128(c2[1153],_mm_xor_si128(c2[973],_mm_xor_si128(c2[274],_mm_xor_si128(c2[810],_mm_xor_si128(c2[630],_mm_xor_si128(c2[475],_mm_xor_si128(c2[653],_mm_xor_si128(c2[1732],_mm_xor_si128(c2[1552],_mm_xor_si128(c2[312],_mm_xor_si128(c2[309],_mm_xor_si128(c2[129],_mm_xor_si128(c2[868],_mm_xor_si128(c2[2670],_mm_xor_si128(c2[152],_mm_xor_si128(c2[2851],_mm_xor_si128(c2[2689],_mm_xor_si128(c2[345],_mm_xor_si128(c2[2866],c2[2686]))))))))))))))))))))))))))))))))));
+     d2[27]=simde_mm_xor_si128(c2[360],simde_mm_xor_si128(c2[361],simde_mm_xor_si128(c2[2525],simde_mm_xor_si128(c2[1639],simde_mm_xor_si128(c2[1644],simde_mm_xor_si128(c2[1465],simde_mm_xor_si128(c2[1285],simde_mm_xor_si128(c2[759],simde_mm_xor_si128(c2[1663],simde_mm_xor_si128(c2[1483],simde_mm_xor_si128(c2[416],simde_mm_xor_si128(c2[2399],simde_mm_xor_si128(c2[2039],simde_mm_xor_si128(c2[1338],simde_mm_xor_si128(c2[2418],simde_mm_xor_si128(c2[1153],simde_mm_xor_si128(c2[973],simde_mm_xor_si128(c2[274],simde_mm_xor_si128(c2[810],simde_mm_xor_si128(c2[630],simde_mm_xor_si128(c2[475],simde_mm_xor_si128(c2[653],simde_mm_xor_si128(c2[1732],simde_mm_xor_si128(c2[1552],simde_mm_xor_si128(c2[312],simde_mm_xor_si128(c2[309],simde_mm_xor_si128(c2[129],simde_mm_xor_si128(c2[868],simde_mm_xor_si128(c2[2670],simde_mm_xor_si128(c2[152],simde_mm_xor_si128(c2[2851],simde_mm_xor_si128(c2[2689],simde_mm_xor_si128(c2[345],simde_mm_xor_si128(c2[2866],c2[2686]))))))))))))))))))))))))))))))))));
 
 //row: 4
-     d2[36]=_mm_xor_si128(c2[1083],_mm_xor_si128(c2[903],_mm_xor_si128(c2[904],_mm_xor_si128(c2[180],_mm_xor_si128(c2[1802],_mm_xor_si128(c2[2362],_mm_xor_si128(c2[2182],_mm_xor_si128(c2[2178],_mm_xor_si128(c2[1819],_mm_xor_si128(c2[1459],_mm_xor_si128(c2[1482],_mm_xor_si128(c2[1302],_mm_xor_si128(c2[2017],_mm_xor_si128(c2[1139],_mm_xor_si128(c2[959],_mm_xor_si128(c2[54],_mm_xor_si128(c2[2582],_mm_xor_si128(c2[1872],_mm_xor_si128(c2[73],_mm_xor_si128(c2[1516],_mm_xor_si128(c2[817],_mm_xor_si128(c2[1173],_mm_xor_si128(c2[1189],_mm_xor_si128(c2[1009],_mm_xor_si128(c2[1196],_mm_xor_si128(c2[2095],_mm_xor_si128(c2[846],_mm_xor_si128(c2[672],_mm_xor_si128(c2[1411],_mm_xor_si128(c2[325],_mm_xor_si128(c2[506],_mm_xor_si128(c2[524],_mm_xor_si128(c2[344],_mm_xor_si128(c2[888],c2[350]))))))))))))))))))))))))))))))))));
+     d2[36]=simde_mm_xor_si128(c2[1083],simde_mm_xor_si128(c2[903],simde_mm_xor_si128(c2[904],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[1802],simde_mm_xor_si128(c2[2362],simde_mm_xor_si128(c2[2182],simde_mm_xor_si128(c2[2178],simde_mm_xor_si128(c2[1819],simde_mm_xor_si128(c2[1459],simde_mm_xor_si128(c2[1482],simde_mm_xor_si128(c2[1302],simde_mm_xor_si128(c2[2017],simde_mm_xor_si128(c2[1139],simde_mm_xor_si128(c2[959],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[2582],simde_mm_xor_si128(c2[1872],simde_mm_xor_si128(c2[73],simde_mm_xor_si128(c2[1516],simde_mm_xor_si128(c2[817],simde_mm_xor_si128(c2[1173],simde_mm_xor_si128(c2[1189],simde_mm_xor_si128(c2[1009],simde_mm_xor_si128(c2[1196],simde_mm_xor_si128(c2[2095],simde_mm_xor_si128(c2[846],simde_mm_xor_si128(c2[672],simde_mm_xor_si128(c2[1411],simde_mm_xor_si128(c2[325],simde_mm_xor_si128(c2[506],simde_mm_xor_si128(c2[524],simde_mm_xor_si128(c2[344],simde_mm_xor_si128(c2[888],c2[350]))))))))))))))))))))))))))))))))));
 
 //row: 5
-     d2[45]=_mm_xor_si128(c2[1804],_mm_xor_si128(c2[1624],_mm_xor_si128(c2[1625],_mm_xor_si128(c2[901],_mm_xor_si128(c2[1442],_mm_xor_si128(c2[204],_mm_xor_si128(c2[24],_mm_xor_si128(c2[20],_mm_xor_si128(c2[2540],_mm_xor_si128(c2[2186],_mm_xor_si128(c2[2203],_mm_xor_si128(c2[2023],_mm_xor_si128(c2[2738],_mm_xor_si128(c2[1860],_mm_xor_si128(c2[1680],_mm_xor_si128(c2[775],_mm_xor_si128(c2[415],_mm_xor_si128(c2[2593],_mm_xor_si128(c2[794],_mm_xor_si128(c2[2237],_mm_xor_si128(c2[1538],_mm_xor_si128(c2[1894],_mm_xor_si128(c2[815],_mm_xor_si128(c2[1910],_mm_xor_si128(c2[1730],_mm_xor_si128(c2[1908],_mm_xor_si128(c2[2816],_mm_xor_si128(c2[1567],_mm_xor_si128(c2[1393],_mm_xor_si128(c2[1754],_mm_xor_si128(c2[2132],_mm_xor_si128(c2[1046],_mm_xor_si128(c2[1227],_mm_xor_si128(c2[1245],_mm_xor_si128(c2[1065],_mm_xor_si128(c2[1609],c2[1062]))))))))))))))))))))))))))))))))))));
+     d2[45]=simde_mm_xor_si128(c2[1804],simde_mm_xor_si128(c2[1624],simde_mm_xor_si128(c2[1625],simde_mm_xor_si128(c2[901],simde_mm_xor_si128(c2[1442],simde_mm_xor_si128(c2[204],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[2540],simde_mm_xor_si128(c2[2186],simde_mm_xor_si128(c2[2203],simde_mm_xor_si128(c2[2023],simde_mm_xor_si128(c2[2738],simde_mm_xor_si128(c2[1860],simde_mm_xor_si128(c2[1680],simde_mm_xor_si128(c2[775],simde_mm_xor_si128(c2[415],simde_mm_xor_si128(c2[2593],simde_mm_xor_si128(c2[794],simde_mm_xor_si128(c2[2237],simde_mm_xor_si128(c2[1538],simde_mm_xor_si128(c2[1894],simde_mm_xor_si128(c2[815],simde_mm_xor_si128(c2[1910],simde_mm_xor_si128(c2[1730],simde_mm_xor_si128(c2[1908],simde_mm_xor_si128(c2[2816],simde_mm_xor_si128(c2[1567],simde_mm_xor_si128(c2[1393],simde_mm_xor_si128(c2[1754],simde_mm_xor_si128(c2[2132],simde_mm_xor_si128(c2[1046],simde_mm_xor_si128(c2[1227],simde_mm_xor_si128(c2[1245],simde_mm_xor_si128(c2[1065],simde_mm_xor_si128(c2[1609],c2[1062]))))))))))))))))))))))))))))))))))));
 
 //row: 6
-     d2[54]=_mm_xor_si128(c2[2528],_mm_xor_si128(c2[2348],_mm_xor_si128(c2[2340],_mm_xor_si128(c2[1625],_mm_xor_si128(c2[2346],_mm_xor_si128(c2[919],_mm_xor_si128(c2[739],_mm_xor_si128(c2[744],_mm_xor_si128(c2[385],_mm_xor_si128(c2[39],_mm_xor_si128(c2[2738],_mm_xor_si128(c2[583],_mm_xor_si128(c2[2575],_mm_xor_si128(c2[2395],_mm_xor_si128(c2[1499],_mm_xor_si128(c2[1139],_mm_xor_si128(c2[438],_mm_xor_si128(c2[1518],_mm_xor_si128(c2[73],_mm_xor_si128(c2[2253],_mm_xor_si128(c2[2618],_mm_xor_si128(c2[1355],_mm_xor_si128(c2[2634],_mm_xor_si128(c2[2454],_mm_xor_si128(c2[2632],_mm_xor_si128(c2[652],_mm_xor_si128(c2[2291],_mm_xor_si128(c2[2108],_mm_xor_si128(c2[2112],_mm_xor_si128(c2[2847],_mm_xor_si128(c2[1770],_mm_xor_si128(c2[1951],_mm_xor_si128(c2[1969],_mm_xor_si128(c2[1789],_mm_xor_si128(c2[2324],_mm_xor_si128(c2[1786],c2[1070]))))))))))))))))))))))))))))))))))));
+     d2[54]=simde_mm_xor_si128(c2[2528],simde_mm_xor_si128(c2[2348],simde_mm_xor_si128(c2[2340],simde_mm_xor_si128(c2[1625],simde_mm_xor_si128(c2[2346],simde_mm_xor_si128(c2[919],simde_mm_xor_si128(c2[739],simde_mm_xor_si128(c2[744],simde_mm_xor_si128(c2[385],simde_mm_xor_si128(c2[39],simde_mm_xor_si128(c2[2738],simde_mm_xor_si128(c2[583],simde_mm_xor_si128(c2[2575],simde_mm_xor_si128(c2[2395],simde_mm_xor_si128(c2[1499],simde_mm_xor_si128(c2[1139],simde_mm_xor_si128(c2[438],simde_mm_xor_si128(c2[1518],simde_mm_xor_si128(c2[73],simde_mm_xor_si128(c2[2253],simde_mm_xor_si128(c2[2618],simde_mm_xor_si128(c2[1355],simde_mm_xor_si128(c2[2634],simde_mm_xor_si128(c2[2454],simde_mm_xor_si128(c2[2632],simde_mm_xor_si128(c2[652],simde_mm_xor_si128(c2[2291],simde_mm_xor_si128(c2[2108],simde_mm_xor_si128(c2[2112],simde_mm_xor_si128(c2[2847],simde_mm_xor_si128(c2[1770],simde_mm_xor_si128(c2[1951],simde_mm_xor_si128(c2[1969],simde_mm_xor_si128(c2[1789],simde_mm_xor_si128(c2[2324],simde_mm_xor_si128(c2[1786],c2[1070]))))))))))))))))))))))))))))))))))));
 
 //row: 7
-     d2[63]=_mm_xor_si128(c2[2706],_mm_xor_si128(c2[2526],_mm_xor_si128(c2[544],_mm_xor_si128(c2[2527],_mm_xor_si128(c2[545],_mm_xor_si128(c2[1803],_mm_xor_si128(c2[2700],_mm_xor_si128(c2[1106],_mm_xor_si128(c2[926],_mm_xor_si128(c2[1823],_mm_xor_si128(c2[922],_mm_xor_si128(c2[1819],_mm_xor_si128(c2[563],_mm_xor_si128(c2[1640],_mm_xor_si128(c2[1460],_mm_xor_si128(c2[204],_mm_xor_si128(c2[217],_mm_xor_si128(c2[37],_mm_xor_si128(c2[943],_mm_xor_si128(c2[761],_mm_xor_si128(c2[1838],_mm_xor_si128(c2[1658],_mm_xor_si128(c2[2762],_mm_xor_si128(c2[2582],_mm_xor_si128(c2[600],_mm_xor_si128(c2[1677],_mm_xor_si128(c2[2574],_mm_xor_si128(c2[1317],_mm_xor_si128(c2[2214],_mm_xor_si128(c2[616],_mm_xor_si128(c2[1513],_mm_xor_si128(c2[1696],_mm_xor_si128(c2[2593],_mm_xor_si128(c2[260],_mm_xor_si128(c2[1337],_mm_xor_si128(c2[1157],_mm_xor_si128(c2[2431],_mm_xor_si128(c2[458],_mm_xor_si128(c2[2796],_mm_xor_si128(c2[994],_mm_xor_si128(c2[814],_mm_xor_si128(c2[1358],_mm_xor_si128(c2[2812],_mm_xor_si128(c2[2632],_mm_xor_si128(c2[650],_mm_xor_si128(c2[2810],_mm_xor_si128(c2[828],_mm_xor_si128(c2[830],_mm_xor_si128(c2[1916],_mm_xor_si128(c2[1736],_mm_xor_si128(c2[2469],_mm_xor_si128(c2[487],_mm_xor_si128(c2[2286],_mm_xor_si128(c2[493],_mm_xor_si128(c2[313],_mm_xor_si128(c2[668],_mm_xor_si128(c2[146],_mm_xor_si128(c2[1052],_mm_xor_si128(c2[1948],_mm_xor_si128(c2[2845],_mm_xor_si128(c2[2129],_mm_xor_si128(c2[327],_mm_xor_si128(c2[147],_mm_xor_si128(c2[2147],_mm_xor_si128(c2[1967],_mm_xor_si128(c2[2864],_mm_xor_si128(c2[2502],_mm_xor_si128(c2[529],_mm_xor_si128(c2[1964],_mm_xor_si128(c2[162],c2[2870]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[63]=simde_mm_xor_si128(c2[2706],simde_mm_xor_si128(c2[2526],simde_mm_xor_si128(c2[544],simde_mm_xor_si128(c2[2527],simde_mm_xor_si128(c2[545],simde_mm_xor_si128(c2[1803],simde_mm_xor_si128(c2[2700],simde_mm_xor_si128(c2[1106],simde_mm_xor_si128(c2[926],simde_mm_xor_si128(c2[1823],simde_mm_xor_si128(c2[922],simde_mm_xor_si128(c2[1819],simde_mm_xor_si128(c2[563],simde_mm_xor_si128(c2[1640],simde_mm_xor_si128(c2[1460],simde_mm_xor_si128(c2[204],simde_mm_xor_si128(c2[217],simde_mm_xor_si128(c2[37],simde_mm_xor_si128(c2[943],simde_mm_xor_si128(c2[761],simde_mm_xor_si128(c2[1838],simde_mm_xor_si128(c2[1658],simde_mm_xor_si128(c2[2762],simde_mm_xor_si128(c2[2582],simde_mm_xor_si128(c2[600],simde_mm_xor_si128(c2[1677],simde_mm_xor_si128(c2[2574],simde_mm_xor_si128(c2[1317],simde_mm_xor_si128(c2[2214],simde_mm_xor_si128(c2[616],simde_mm_xor_si128(c2[1513],simde_mm_xor_si128(c2[1696],simde_mm_xor_si128(c2[2593],simde_mm_xor_si128(c2[260],simde_mm_xor_si128(c2[1337],simde_mm_xor_si128(c2[1157],simde_mm_xor_si128(c2[2431],simde_mm_xor_si128(c2[458],simde_mm_xor_si128(c2[2796],simde_mm_xor_si128(c2[994],simde_mm_xor_si128(c2[814],simde_mm_xor_si128(c2[1358],simde_mm_xor_si128(c2[2812],simde_mm_xor_si128(c2[2632],simde_mm_xor_si128(c2[650],simde_mm_xor_si128(c2[2810],simde_mm_xor_si128(c2[828],simde_mm_xor_si128(c2[830],simde_mm_xor_si128(c2[1916],simde_mm_xor_si128(c2[1736],simde_mm_xor_si128(c2[2469],simde_mm_xor_si128(c2[487],simde_mm_xor_si128(c2[2286],simde_mm_xor_si128(c2[493],simde_mm_xor_si128(c2[313],simde_mm_xor_si128(c2[668],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[1052],simde_mm_xor_si128(c2[1948],simde_mm_xor_si128(c2[2845],simde_mm_xor_si128(c2[2129],simde_mm_xor_si128(c2[327],simde_mm_xor_si128(c2[147],simde_mm_xor_si128(c2[2147],simde_mm_xor_si128(c2[1967],simde_mm_xor_si128(c2[2864],simde_mm_xor_si128(c2[2502],simde_mm_xor_si128(c2[529],simde_mm_xor_si128(c2[1964],simde_mm_xor_si128(c2[162],c2[2870]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 8
-     d2[72]=_mm_xor_si128(c2[1985],_mm_xor_si128(c2[1805],_mm_xor_si128(c2[1986],_mm_xor_si128(c2[1806],_mm_xor_si128(c2[1082],_mm_xor_si128(c2[1084],_mm_xor_si128(c2[385],_mm_xor_si128(c2[205],_mm_xor_si128(c2[201],_mm_xor_si128(c2[2721],_mm_xor_si128(c2[922],_mm_xor_si128(c2[2384],_mm_xor_si128(c2[2204],_mm_xor_si128(c2[40],_mm_xor_si128(c2[2041],_mm_xor_si128(c2[1861],_mm_xor_si128(c2[1136],_mm_xor_si128(c2[956],_mm_xor_si128(c2[596],_mm_xor_si128(c2[75],_mm_xor_si128(c2[2774],_mm_xor_si128(c2[975],_mm_xor_si128(c2[2418],_mm_xor_si128(c2[1890],_mm_xor_si128(c2[1710],_mm_xor_si128(c2[2075],_mm_xor_si128(c2[2091],_mm_xor_si128(c2[1911],_mm_xor_si128(c2[2269],_mm_xor_si128(c2[2089],_mm_xor_si128(c2[109],_mm_xor_si128(c2[1928],_mm_xor_si128(c2[1748],_mm_xor_si128(c2[1574],_mm_xor_si128(c2[2484],_mm_xor_si128(c2[2304],_mm_xor_si128(c2[1227],_mm_xor_si128(c2[1408],_mm_xor_si128(c2[1426],_mm_xor_si128(c2[1246],_mm_xor_si128(c2[1970],_mm_xor_si128(c2[1790],c2[1243]))))))))))))))))))))))))))))))))))))))))));
+     d2[72]=simde_mm_xor_si128(c2[1985],simde_mm_xor_si128(c2[1805],simde_mm_xor_si128(c2[1986],simde_mm_xor_si128(c2[1806],simde_mm_xor_si128(c2[1082],simde_mm_xor_si128(c2[1084],simde_mm_xor_si128(c2[385],simde_mm_xor_si128(c2[205],simde_mm_xor_si128(c2[201],simde_mm_xor_si128(c2[2721],simde_mm_xor_si128(c2[922],simde_mm_xor_si128(c2[2384],simde_mm_xor_si128(c2[2204],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[2041],simde_mm_xor_si128(c2[1861],simde_mm_xor_si128(c2[1136],simde_mm_xor_si128(c2[956],simde_mm_xor_si128(c2[596],simde_mm_xor_si128(c2[75],simde_mm_xor_si128(c2[2774],simde_mm_xor_si128(c2[975],simde_mm_xor_si128(c2[2418],simde_mm_xor_si128(c2[1890],simde_mm_xor_si128(c2[1710],simde_mm_xor_si128(c2[2075],simde_mm_xor_si128(c2[2091],simde_mm_xor_si128(c2[1911],simde_mm_xor_si128(c2[2269],simde_mm_xor_si128(c2[2089],simde_mm_xor_si128(c2[109],simde_mm_xor_si128(c2[1928],simde_mm_xor_si128(c2[1748],simde_mm_xor_si128(c2[1574],simde_mm_xor_si128(c2[2484],simde_mm_xor_si128(c2[2304],simde_mm_xor_si128(c2[1227],simde_mm_xor_si128(c2[1408],simde_mm_xor_si128(c2[1426],simde_mm_xor_si128(c2[1246],simde_mm_xor_si128(c2[1970],simde_mm_xor_si128(c2[1790],c2[1243]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 9
-     d2[81]=_mm_xor_si128(c2[1801],_mm_xor_si128(c2[723],_mm_xor_si128(c2[543],_mm_xor_si128(c2[1802],_mm_xor_si128(c2[544],_mm_xor_si128(c2[1087],_mm_xor_si128(c2[2708],_mm_xor_si128(c2[201],_mm_xor_si128(c2[2002],_mm_xor_si128(c2[1822],_mm_xor_si128(c2[206],_mm_xor_si128(c2[1818],_mm_xor_si128(c2[2726],_mm_xor_si128(c2[1459],_mm_xor_si128(c2[204],_mm_xor_si128(c2[2200],_mm_xor_si128(c2[1122],_mm_xor_si128(c2[942],_mm_xor_si128(c2[36],_mm_xor_si128(c2[1657],_mm_xor_si128(c2[1857],_mm_xor_si128(c2[779],_mm_xor_si128(c2[599],_mm_xor_si128(c2[961],_mm_xor_si128(c2[2582],_mm_xor_si128(c2[601],_mm_xor_si128(c2[2222],_mm_xor_si128(c2[2779],_mm_xor_si128(c2[1512],_mm_xor_si128(c2[980],_mm_xor_si128(c2[2592],_mm_xor_si128(c2[2414],_mm_xor_si128(c2[1156],_mm_xor_si128(c2[1715],_mm_xor_si128(c2[457],_mm_xor_si128(c2[2071],_mm_xor_si128(c2[813],_mm_xor_si128(c2[1916],_mm_xor_si128(c2[829],_mm_xor_si128(c2[649],_mm_xor_si128(c2[2094],_mm_xor_si128(c2[836],_mm_xor_si128(c2[114],_mm_xor_si128(c2[1735],_mm_xor_si128(c2[1753],_mm_xor_si128(c2[486],_mm_xor_si128(c2[1570],_mm_xor_si128(c2[312],_mm_xor_si128(c2[2309],_mm_xor_si128(c2[1051],_mm_xor_si128(c2[1232],_mm_xor_si128(c2[2844],_mm_xor_si128(c2[1404],_mm_xor_si128(c2[146],_mm_xor_si128(c2[1586],_mm_xor_si128(c2[1242],_mm_xor_si128(c2[164],_mm_xor_si128(c2[2863],_mm_xor_si128(c2[1786],_mm_xor_si128(c2[528],_mm_xor_si128(c2[1248],c2[2869])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[81]=simde_mm_xor_si128(c2[1801],simde_mm_xor_si128(c2[723],simde_mm_xor_si128(c2[543],simde_mm_xor_si128(c2[1802],simde_mm_xor_si128(c2[544],simde_mm_xor_si128(c2[1087],simde_mm_xor_si128(c2[2708],simde_mm_xor_si128(c2[201],simde_mm_xor_si128(c2[2002],simde_mm_xor_si128(c2[1822],simde_mm_xor_si128(c2[206],simde_mm_xor_si128(c2[1818],simde_mm_xor_si128(c2[2726],simde_mm_xor_si128(c2[1459],simde_mm_xor_si128(c2[204],simde_mm_xor_si128(c2[2200],simde_mm_xor_si128(c2[1122],simde_mm_xor_si128(c2[942],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[1657],simde_mm_xor_si128(c2[1857],simde_mm_xor_si128(c2[779],simde_mm_xor_si128(c2[599],simde_mm_xor_si128(c2[961],simde_mm_xor_si128(c2[2582],simde_mm_xor_si128(c2[601],simde_mm_xor_si128(c2[2222],simde_mm_xor_si128(c2[2779],simde_mm_xor_si128(c2[1512],simde_mm_xor_si128(c2[980],simde_mm_xor_si128(c2[2592],simde_mm_xor_si128(c2[2414],simde_mm_xor_si128(c2[1156],simde_mm_xor_si128(c2[1715],simde_mm_xor_si128(c2[457],simde_mm_xor_si128(c2[2071],simde_mm_xor_si128(c2[813],simde_mm_xor_si128(c2[1916],simde_mm_xor_si128(c2[829],simde_mm_xor_si128(c2[649],simde_mm_xor_si128(c2[2094],simde_mm_xor_si128(c2[836],simde_mm_xor_si128(c2[114],simde_mm_xor_si128(c2[1735],simde_mm_xor_si128(c2[1753],simde_mm_xor_si128(c2[486],simde_mm_xor_si128(c2[1570],simde_mm_xor_si128(c2[312],simde_mm_xor_si128(c2[2309],simde_mm_xor_si128(c2[1051],simde_mm_xor_si128(c2[1232],simde_mm_xor_si128(c2[2844],simde_mm_xor_si128(c2[1404],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[1586],simde_mm_xor_si128(c2[1242],simde_mm_xor_si128(c2[164],simde_mm_xor_si128(c2[2863],simde_mm_xor_si128(c2[1786],simde_mm_xor_si128(c2[528],simde_mm_xor_si128(c2[1248],c2[2869])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 10
-     d2[90]=_mm_xor_si128(c2[2522],_mm_xor_si128(c2[1640],_mm_xor_si128(c2[1014],c2[132])));
+     d2[90]=simde_mm_xor_si128(c2[2522],simde_mm_xor_si128(c2[1640],simde_mm_xor_si128(c2[1014],c2[132])));
 
 //row: 11
-     d2[99]=_mm_xor_si128(c2[364],_mm_xor_si128(c2[365],_mm_xor_si128(c2[2520],_mm_xor_si128(c2[2161],_mm_xor_si128(c2[1643],_mm_xor_si128(c2[1639],_mm_xor_si128(c2[1460],_mm_xor_si128(c2[1280],_mm_xor_si128(c2[763],_mm_xor_si128(c2[1658],_mm_xor_si128(c2[1478],_mm_xor_si128(c2[420],_mm_xor_si128(c2[2394],_mm_xor_si128(c2[2034],_mm_xor_si128(c2[1333],_mm_xor_si128(c2[2413],_mm_xor_si128(c2[1157],_mm_xor_si128(c2[977],_mm_xor_si128(c2[278],_mm_xor_si128(c2[814],_mm_xor_si128(c2[634],_mm_xor_si128(c2[470],_mm_xor_si128(c2[648],_mm_xor_si128(c2[1736],_mm_xor_si128(c2[1556],_mm_xor_si128(c2[307],_mm_xor_si128(c2[313],_mm_xor_si128(c2[133],_mm_xor_si128(c2[2647],_mm_xor_si128(c2[872],_mm_xor_si128(c2[2665],_mm_xor_si128(c2[147],_mm_xor_si128(c2[2846],_mm_xor_si128(c2[2684],_mm_xor_si128(c2[349],_mm_xor_si128(c2[2870],_mm_xor_si128(c2[2690],c2[889])))))))))))))))))))))))))))))))))))));
+     d2[99]=simde_mm_xor_si128(c2[364],simde_mm_xor_si128(c2[365],simde_mm_xor_si128(c2[2520],simde_mm_xor_si128(c2[2161],simde_mm_xor_si128(c2[1643],simde_mm_xor_si128(c2[1639],simde_mm_xor_si128(c2[1460],simde_mm_xor_si128(c2[1280],simde_mm_xor_si128(c2[763],simde_mm_xor_si128(c2[1658],simde_mm_xor_si128(c2[1478],simde_mm_xor_si128(c2[420],simde_mm_xor_si128(c2[2394],simde_mm_xor_si128(c2[2034],simde_mm_xor_si128(c2[1333],simde_mm_xor_si128(c2[2413],simde_mm_xor_si128(c2[1157],simde_mm_xor_si128(c2[977],simde_mm_xor_si128(c2[278],simde_mm_xor_si128(c2[814],simde_mm_xor_si128(c2[634],simde_mm_xor_si128(c2[470],simde_mm_xor_si128(c2[648],simde_mm_xor_si128(c2[1736],simde_mm_xor_si128(c2[1556],simde_mm_xor_si128(c2[307],simde_mm_xor_si128(c2[313],simde_mm_xor_si128(c2[133],simde_mm_xor_si128(c2[2647],simde_mm_xor_si128(c2[872],simde_mm_xor_si128(c2[2665],simde_mm_xor_si128(c2[147],simde_mm_xor_si128(c2[2846],simde_mm_xor_si128(c2[2684],simde_mm_xor_si128(c2[349],simde_mm_xor_si128(c2[2870],simde_mm_xor_si128(c2[2690],c2[889])))))))))))))))))))))))))))))))))))));
 
 //row: 12
-     d2[108]=_mm_xor_si128(c2[1088],_mm_xor_si128(c2[908],_mm_xor_si128(c2[900],_mm_xor_si128(c2[185],_mm_xor_si128(c2[2358],_mm_xor_si128(c2[2178],_mm_xor_si128(c2[2183],_mm_xor_si128(c2[1824],_mm_xor_si128(c2[200],_mm_xor_si128(c2[1478],_mm_xor_si128(c2[1298],_mm_xor_si128(c2[2022],_mm_xor_si128(c2[1135],_mm_xor_si128(c2[955],_mm_xor_si128(c2[59],_mm_xor_si128(c2[2578],_mm_xor_si128(c2[1861],_mm_xor_si128(c2[1877],_mm_xor_si128(c2[78],_mm_xor_si128(c2[1512],_mm_xor_si128(c2[813],_mm_xor_si128(c2[1178],_mm_xor_si128(c2[1194],_mm_xor_si128(c2[1014],_mm_xor_si128(c2[1192],_mm_xor_si128(c2[2091],_mm_xor_si128(c2[851],_mm_xor_si128(c2[668],_mm_xor_si128(c2[1407],_mm_xor_si128(c2[330],_mm_xor_si128(c2[511],_mm_xor_si128(c2[529],_mm_xor_si128(c2[349],_mm_xor_si128(c2[884],c2[346]))))))))))))))))))))))))))))))))));
+     d2[108]=simde_mm_xor_si128(c2[1088],simde_mm_xor_si128(c2[908],simde_mm_xor_si128(c2[900],simde_mm_xor_si128(c2[185],simde_mm_xor_si128(c2[2358],simde_mm_xor_si128(c2[2178],simde_mm_xor_si128(c2[2183],simde_mm_xor_si128(c2[1824],simde_mm_xor_si128(c2[200],simde_mm_xor_si128(c2[1478],simde_mm_xor_si128(c2[1298],simde_mm_xor_si128(c2[2022],simde_mm_xor_si128(c2[1135],simde_mm_xor_si128(c2[955],simde_mm_xor_si128(c2[59],simde_mm_xor_si128(c2[2578],simde_mm_xor_si128(c2[1861],simde_mm_xor_si128(c2[1877],simde_mm_xor_si128(c2[78],simde_mm_xor_si128(c2[1512],simde_mm_xor_si128(c2[813],simde_mm_xor_si128(c2[1178],simde_mm_xor_si128(c2[1194],simde_mm_xor_si128(c2[1014],simde_mm_xor_si128(c2[1192],simde_mm_xor_si128(c2[2091],simde_mm_xor_si128(c2[851],simde_mm_xor_si128(c2[668],simde_mm_xor_si128(c2[1407],simde_mm_xor_si128(c2[330],simde_mm_xor_si128(c2[511],simde_mm_xor_si128(c2[529],simde_mm_xor_si128(c2[349],simde_mm_xor_si128(c2[884],c2[346]))))))))))))))))))))))))))))))))));
 
 //row: 13
-     d2[117]=_mm_xor_si128(c2[183],_mm_xor_si128(c2[184],_mm_xor_si128(c2[2348],_mm_xor_si128(c2[2164],_mm_xor_si128(c2[1462],_mm_xor_si128(c2[1458],_mm_xor_si128(c2[1279],_mm_xor_si128(c2[1099],_mm_xor_si128(c2[920],_mm_xor_si128(c2[582],_mm_xor_si128(c2[1477],_mm_xor_si128(c2[1297],_mm_xor_si128(c2[239],_mm_xor_si128(c2[2222],_mm_xor_si128(c2[1862],_mm_xor_si128(c2[1152],_mm_xor_si128(c2[2232],_mm_xor_si128(c2[976],_mm_xor_si128(c2[796],_mm_xor_si128(c2[97],_mm_xor_si128(c2[633],_mm_xor_si128(c2[453],_mm_xor_si128(c2[289],_mm_xor_si128(c2[476],_mm_xor_si128(c2[1555],_mm_xor_si128(c2[1375],_mm_xor_si128(c2[126],_mm_xor_si128(c2[132],_mm_xor_si128(c2[2831],_mm_xor_si128(c2[691],_mm_xor_si128(c2[2484],_mm_xor_si128(c2[2845],_mm_xor_si128(c2[2665],_mm_xor_si128(c2[2667],_mm_xor_si128(c2[2503],_mm_xor_si128(c2[168],_mm_xor_si128(c2[2689],c2[2509])))))))))))))))))))))))))))))))))))));
+     d2[117]=simde_mm_xor_si128(c2[183],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[2348],simde_mm_xor_si128(c2[2164],simde_mm_xor_si128(c2[1462],simde_mm_xor_si128(c2[1458],simde_mm_xor_si128(c2[1279],simde_mm_xor_si128(c2[1099],simde_mm_xor_si128(c2[920],simde_mm_xor_si128(c2[582],simde_mm_xor_si128(c2[1477],simde_mm_xor_si128(c2[1297],simde_mm_xor_si128(c2[239],simde_mm_xor_si128(c2[2222],simde_mm_xor_si128(c2[1862],simde_mm_xor_si128(c2[1152],simde_mm_xor_si128(c2[2232],simde_mm_xor_si128(c2[976],simde_mm_xor_si128(c2[796],simde_mm_xor_si128(c2[97],simde_mm_xor_si128(c2[633],simde_mm_xor_si128(c2[453],simde_mm_xor_si128(c2[289],simde_mm_xor_si128(c2[476],simde_mm_xor_si128(c2[1555],simde_mm_xor_si128(c2[1375],simde_mm_xor_si128(c2[126],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[2831],simde_mm_xor_si128(c2[691],simde_mm_xor_si128(c2[2484],simde_mm_xor_si128(c2[2845],simde_mm_xor_si128(c2[2665],simde_mm_xor_si128(c2[2667],simde_mm_xor_si128(c2[2503],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[2689],c2[2509])))))))))))))))))))))))))))))))))))));
 
 //row: 14
-     d2[126]=_mm_xor_si128(c2[908],_mm_xor_si128(c2[728],_mm_xor_si128(c2[546],_mm_xor_si128(c2[720],_mm_xor_si128(c2[547],_mm_xor_si128(c2[5],_mm_xor_si128(c2[2702],_mm_xor_si128(c2[2178],_mm_xor_si128(c2[1998],_mm_xor_si128(c2[1825],_mm_xor_si128(c2[2003],_mm_xor_si128(c2[1821],_mm_xor_si128(c2[1644],_mm_xor_si128(c2[1642],_mm_xor_si128(c2[1462],_mm_xor_si128(c2[2726],_mm_xor_si128(c2[1298],_mm_xor_si128(c2[1118],_mm_xor_si128(c2[936],_mm_xor_si128(c2[1842],_mm_xor_si128(c2[1840],_mm_xor_si128(c2[1660],_mm_xor_si128(c2[955],_mm_xor_si128(c2[775],_mm_xor_si128(c2[602],_mm_xor_si128(c2[2758],_mm_xor_si128(c2[2576],_mm_xor_si128(c2[2398],_mm_xor_si128(c2[2216],_mm_xor_si128(c2[1697],_mm_xor_si128(c2[1515],_mm_xor_si128(c2[2777],_mm_xor_si128(c2[2595],_mm_xor_si128(c2[1332],_mm_xor_si128(c2[1339],_mm_xor_si128(c2[1159],_mm_xor_si128(c2[633],_mm_xor_si128(c2[451],_mm_xor_si128(c2[998],_mm_xor_si128(c2[996],_mm_xor_si128(c2[816],_mm_xor_si128(c2[1014],_mm_xor_si128(c2[834],_mm_xor_si128(c2[652],_mm_xor_si128(c2[1012],_mm_xor_si128(c2[830],_mm_xor_si128(c2[1911],_mm_xor_si128(c2[1909],_mm_xor_si128(c2[1729],_mm_xor_si128(c2[651],_mm_xor_si128(c2[671],_mm_xor_si128(c2[489],_mm_xor_si128(c2[488],_mm_xor_si128(c2[486],_mm_xor_si128(c2[306],_mm_xor_si128(c2[1227],_mm_xor_si128(c2[1045],_mm_xor_si128(c2[150],_mm_xor_si128(c2[2847],_mm_xor_si128(c2[331],_mm_xor_si128(c2[329],_mm_xor_si128(c2[149],_mm_xor_si128(c2[349],_mm_xor_si128(c2[169],_mm_xor_si128(c2[2866],_mm_xor_si128(c2[704],_mm_xor_si128(c2[522],_mm_xor_si128(c2[166],_mm_xor_si128(c2[164],c2[2863])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[126]=simde_mm_xor_si128(c2[908],simde_mm_xor_si128(c2[728],simde_mm_xor_si128(c2[546],simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[547],simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[2702],simde_mm_xor_si128(c2[2178],simde_mm_xor_si128(c2[1998],simde_mm_xor_si128(c2[1825],simde_mm_xor_si128(c2[2003],simde_mm_xor_si128(c2[1821],simde_mm_xor_si128(c2[1644],simde_mm_xor_si128(c2[1642],simde_mm_xor_si128(c2[1462],simde_mm_xor_si128(c2[2726],simde_mm_xor_si128(c2[1298],simde_mm_xor_si128(c2[1118],simde_mm_xor_si128(c2[936],simde_mm_xor_si128(c2[1842],simde_mm_xor_si128(c2[1840],simde_mm_xor_si128(c2[1660],simde_mm_xor_si128(c2[955],simde_mm_xor_si128(c2[775],simde_mm_xor_si128(c2[602],simde_mm_xor_si128(c2[2758],simde_mm_xor_si128(c2[2576],simde_mm_xor_si128(c2[2398],simde_mm_xor_si128(c2[2216],simde_mm_xor_si128(c2[1697],simde_mm_xor_si128(c2[1515],simde_mm_xor_si128(c2[2777],simde_mm_xor_si128(c2[2595],simde_mm_xor_si128(c2[1332],simde_mm_xor_si128(c2[1339],simde_mm_xor_si128(c2[1159],simde_mm_xor_si128(c2[633],simde_mm_xor_si128(c2[451],simde_mm_xor_si128(c2[998],simde_mm_xor_si128(c2[996],simde_mm_xor_si128(c2[816],simde_mm_xor_si128(c2[1014],simde_mm_xor_si128(c2[834],simde_mm_xor_si128(c2[652],simde_mm_xor_si128(c2[1012],simde_mm_xor_si128(c2[830],simde_mm_xor_si128(c2[1911],simde_mm_xor_si128(c2[1909],simde_mm_xor_si128(c2[1729],simde_mm_xor_si128(c2[651],simde_mm_xor_si128(c2[671],simde_mm_xor_si128(c2[489],simde_mm_xor_si128(c2[488],simde_mm_xor_si128(c2[486],simde_mm_xor_si128(c2[306],simde_mm_xor_si128(c2[1227],simde_mm_xor_si128(c2[1045],simde_mm_xor_si128(c2[150],simde_mm_xor_si128(c2[2847],simde_mm_xor_si128(c2[331],simde_mm_xor_si128(c2[329],simde_mm_xor_si128(c2[149],simde_mm_xor_si128(c2[349],simde_mm_xor_si128(c2[169],simde_mm_xor_si128(c2[2866],simde_mm_xor_si128(c2[704],simde_mm_xor_si128(c2[522],simde_mm_xor_si128(c2[166],simde_mm_xor_si128(c2[164],c2[2863])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 15
-     d2[135]=_mm_xor_si128(c2[366],_mm_xor_si128(c2[548],_mm_xor_si128(c2[368],_mm_xor_si128(c2[367],_mm_xor_si128(c2[360],_mm_xor_si128(c2[2522],_mm_xor_si128(c2[2524],_mm_xor_si128(c2[1988],_mm_xor_si128(c2[1645],_mm_xor_si128(c2[1818],_mm_xor_si128(c2[1638],_mm_xor_si128(c2[1641],_mm_xor_si128(c2[1643],_mm_xor_si128(c2[1282],_mm_xor_si128(c2[1284],_mm_xor_si128(c2[756],_mm_xor_si128(c2[938],_mm_xor_si128(c2[758],_mm_xor_si128(c2[1480],_mm_xor_si128(c2[1482],_mm_xor_si128(c2[422],_mm_xor_si128(c2[595],_mm_xor_si128(c2[415],_mm_xor_si128(c2[2396],_mm_xor_si128(c2[2398],_mm_xor_si128(c2[2036],_mm_xor_si128(c2[2038],_mm_xor_si128(c2[1335],_mm_xor_si128(c2[1337],_mm_xor_si128(c2[2415],_mm_xor_si128(c2[2417],_mm_xor_si128(c2[979],_mm_xor_si128(c2[972],_mm_xor_si128(c2[271],_mm_xor_si128(c2[273],_mm_xor_si128(c2[636],_mm_xor_si128(c2[638],_mm_xor_si128(c2[472],_mm_xor_si128(c2[654],_mm_xor_si128(c2[474],_mm_xor_si128(c2[650],_mm_xor_si128(c2[652],_mm_xor_si128(c2[1549],_mm_xor_si128(c2[1551],_mm_xor_si128(c2[309],_mm_xor_si128(c2[311],_mm_xor_si128(c2[126],_mm_xor_si128(c2[128],_mm_xor_si128(c2[865],_mm_xor_si128(c2[867],_mm_xor_si128(c2[2667],_mm_xor_si128(c2[2669],_mm_xor_si128(c2[2848],_mm_xor_si128(c2[2850],_mm_xor_si128(c2[2686],_mm_xor_si128(c2[2868],_mm_xor_si128(c2[2688],_mm_xor_si128(c2[342],_mm_xor_si128(c2[344],_mm_xor_si128(c2[2683],c2[2685]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[135]=simde_mm_xor_si128(c2[366],simde_mm_xor_si128(c2[548],simde_mm_xor_si128(c2[368],simde_mm_xor_si128(c2[367],simde_mm_xor_si128(c2[360],simde_mm_xor_si128(c2[2522],simde_mm_xor_si128(c2[2524],simde_mm_xor_si128(c2[1988],simde_mm_xor_si128(c2[1645],simde_mm_xor_si128(c2[1818],simde_mm_xor_si128(c2[1638],simde_mm_xor_si128(c2[1641],simde_mm_xor_si128(c2[1643],simde_mm_xor_si128(c2[1282],simde_mm_xor_si128(c2[1284],simde_mm_xor_si128(c2[756],simde_mm_xor_si128(c2[938],simde_mm_xor_si128(c2[758],simde_mm_xor_si128(c2[1480],simde_mm_xor_si128(c2[1482],simde_mm_xor_si128(c2[422],simde_mm_xor_si128(c2[595],simde_mm_xor_si128(c2[415],simde_mm_xor_si128(c2[2396],simde_mm_xor_si128(c2[2398],simde_mm_xor_si128(c2[2036],simde_mm_xor_si128(c2[2038],simde_mm_xor_si128(c2[1335],simde_mm_xor_si128(c2[1337],simde_mm_xor_si128(c2[2415],simde_mm_xor_si128(c2[2417],simde_mm_xor_si128(c2[979],simde_mm_xor_si128(c2[972],simde_mm_xor_si128(c2[271],simde_mm_xor_si128(c2[273],simde_mm_xor_si128(c2[636],simde_mm_xor_si128(c2[638],simde_mm_xor_si128(c2[472],simde_mm_xor_si128(c2[654],simde_mm_xor_si128(c2[474],simde_mm_xor_si128(c2[650],simde_mm_xor_si128(c2[652],simde_mm_xor_si128(c2[1549],simde_mm_xor_si128(c2[1551],simde_mm_xor_si128(c2[309],simde_mm_xor_si128(c2[311],simde_mm_xor_si128(c2[126],simde_mm_xor_si128(c2[128],simde_mm_xor_si128(c2[865],simde_mm_xor_si128(c2[867],simde_mm_xor_si128(c2[2667],simde_mm_xor_si128(c2[2669],simde_mm_xor_si128(c2[2848],simde_mm_xor_si128(c2[2850],simde_mm_xor_si128(c2[2686],simde_mm_xor_si128(c2[2868],simde_mm_xor_si128(c2[2688],simde_mm_xor_si128(c2[342],simde_mm_xor_si128(c2[344],simde_mm_xor_si128(c2[2683],c2[2685]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 16
-     d2[144]=_mm_xor_si128(c2[2701],_mm_xor_si128(c2[2521],_mm_xor_si128(c2[1980],_mm_xor_si128(c2[1800],_mm_xor_si128(c2[2522],_mm_xor_si128(c2[1981],_mm_xor_si128(c2[1801],_mm_xor_si128(c2[1807],_mm_xor_si128(c2[1086],_mm_xor_si128(c2[1101],_mm_xor_si128(c2[921],_mm_xor_si128(c2[380],_mm_xor_si128(c2[200],_mm_xor_si128(c2[926],_mm_xor_si128(c2[205],_mm_xor_si128(c2[558],_mm_xor_si128(c2[2725],_mm_xor_si128(c2[21],_mm_xor_si128(c2[221],_mm_xor_si128(c2[41],_mm_xor_si128(c2[2379],_mm_xor_si128(c2[2199],_mm_xor_si128(c2[756],_mm_xor_si128(c2[44],_mm_xor_si128(c2[2757],_mm_xor_si128(c2[2577],_mm_xor_si128(c2[2036],_mm_xor_si128(c2[1856],_mm_xor_si128(c2[1681],_mm_xor_si128(c2[1140],_mm_xor_si128(c2[960],_mm_xor_si128(c2[1321],_mm_xor_si128(c2[600],_mm_xor_si128(c2[620],_mm_xor_si128(c2[79],_mm_xor_si128(c2[2778],_mm_xor_si128(c2[1700],_mm_xor_si128(c2[979],_mm_xor_si128(c2[255],_mm_xor_si128(c2[2413],_mm_xor_si128(c2[2435],_mm_xor_si128(c2[1894],_mm_xor_si128(c2[1714],_mm_xor_si128(c2[2791],_mm_xor_si128(c2[2070],_mm_xor_si128(c2[2816],_mm_xor_si128(c2[2636],_mm_xor_si128(c2[2095],_mm_xor_si128(c2[1915],_mm_xor_si128(c2[2814],_mm_xor_si128(c2[2273],_mm_xor_si128(c2[2093],_mm_xor_si128(c2[834],_mm_xor_si128(c2[113],_mm_xor_si128(c2[2473],_mm_xor_si128(c2[1932],_mm_xor_si128(c2[1752],_mm_xor_si128(c2[2290],_mm_xor_si128(c2[1569],_mm_xor_si128(c2[150],_mm_xor_si128(c2[2488],_mm_xor_si128(c2[2308],_mm_xor_si128(c2[1952],_mm_xor_si128(c2[1231],_mm_xor_si128(c2[2124],_mm_xor_si128(c2[1412],_mm_xor_si128(c2[2142],_mm_xor_si128(c2[1962],_mm_xor_si128(c2[1430],_mm_xor_si128(c2[1250],_mm_xor_si128(c2[2506],_mm_xor_si128(c2[1965],_mm_xor_si128(c2[1785],_mm_xor_si128(c2[1968],_mm_xor_si128(c2[1247],c2[1782])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[144]=simde_mm_xor_si128(c2[2701],simde_mm_xor_si128(c2[2521],simde_mm_xor_si128(c2[1980],simde_mm_xor_si128(c2[1800],simde_mm_xor_si128(c2[2522],simde_mm_xor_si128(c2[1981],simde_mm_xor_si128(c2[1801],simde_mm_xor_si128(c2[1807],simde_mm_xor_si128(c2[1086],simde_mm_xor_si128(c2[1101],simde_mm_xor_si128(c2[921],simde_mm_xor_si128(c2[380],simde_mm_xor_si128(c2[200],simde_mm_xor_si128(c2[926],simde_mm_xor_si128(c2[205],simde_mm_xor_si128(c2[558],simde_mm_xor_si128(c2[2725],simde_mm_xor_si128(c2[21],simde_mm_xor_si128(c2[221],simde_mm_xor_si128(c2[41],simde_mm_xor_si128(c2[2379],simde_mm_xor_si128(c2[2199],simde_mm_xor_si128(c2[756],simde_mm_xor_si128(c2[44],simde_mm_xor_si128(c2[2757],simde_mm_xor_si128(c2[2577],simde_mm_xor_si128(c2[2036],simde_mm_xor_si128(c2[1856],simde_mm_xor_si128(c2[1681],simde_mm_xor_si128(c2[1140],simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[1321],simde_mm_xor_si128(c2[600],simde_mm_xor_si128(c2[620],simde_mm_xor_si128(c2[79],simde_mm_xor_si128(c2[2778],simde_mm_xor_si128(c2[1700],simde_mm_xor_si128(c2[979],simde_mm_xor_si128(c2[255],simde_mm_xor_si128(c2[2413],simde_mm_xor_si128(c2[2435],simde_mm_xor_si128(c2[1894],simde_mm_xor_si128(c2[1714],simde_mm_xor_si128(c2[2791],simde_mm_xor_si128(c2[2070],simde_mm_xor_si128(c2[2816],simde_mm_xor_si128(c2[2636],simde_mm_xor_si128(c2[2095],simde_mm_xor_si128(c2[1915],simde_mm_xor_si128(c2[2814],simde_mm_xor_si128(c2[2273],simde_mm_xor_si128(c2[2093],simde_mm_xor_si128(c2[834],simde_mm_xor_si128(c2[113],simde_mm_xor_si128(c2[2473],simde_mm_xor_si128(c2[1932],simde_mm_xor_si128(c2[1752],simde_mm_xor_si128(c2[2290],simde_mm_xor_si128(c2[1569],simde_mm_xor_si128(c2[150],simde_mm_xor_si128(c2[2488],simde_mm_xor_si128(c2[2308],simde_mm_xor_si128(c2[1952],simde_mm_xor_si128(c2[1231],simde_mm_xor_si128(c2[2124],simde_mm_xor_si128(c2[1412],simde_mm_xor_si128(c2[2142],simde_mm_xor_si128(c2[1962],simde_mm_xor_si128(c2[1430],simde_mm_xor_si128(c2[1250],simde_mm_xor_si128(c2[2506],simde_mm_xor_si128(c2[1965],simde_mm_xor_si128(c2[1785],simde_mm_xor_si128(c2[1968],simde_mm_xor_si128(c2[1247],c2[1782])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 17
-     d2[153]=_mm_xor_si128(c2[724],_mm_xor_si128(c2[544],_mm_xor_si128(c2[2342],_mm_xor_si128(c2[2162],_mm_xor_si128(c2[545],_mm_xor_si128(c2[2343],_mm_xor_si128(c2[2163],_mm_xor_si128(c2[2700],_mm_xor_si128(c2[1448],_mm_xor_si128(c2[2003],_mm_xor_si128(c2[1823],_mm_xor_si128(c2[742],_mm_xor_si128(c2[562],_mm_xor_si128(c2[1819],_mm_xor_si128(c2[558],_mm_xor_si128(c2[1460],_mm_xor_si128(c2[199],_mm_xor_si128(c2[1465],_mm_xor_si128(c2[1123],_mm_xor_si128(c2[943],_mm_xor_si128(c2[2741],_mm_xor_si128(c2[2561],_mm_xor_si128(c2[1658],_mm_xor_si128(c2[397],_mm_xor_si128(c2[780],_mm_xor_si128(c2[600],_mm_xor_si128(c2[2398],_mm_xor_si128(c2[2218],_mm_xor_si128(c2[2574],_mm_xor_si128(c2[1502],_mm_xor_si128(c2[1322],_mm_xor_si128(c2[2214],_mm_xor_si128(c2[962],_mm_xor_si128(c2[1513],_mm_xor_si128(c2[432],_mm_xor_si128(c2[252],_mm_xor_si128(c2[2593],_mm_xor_si128(c2[1332],_mm_xor_si128(c2[1157],_mm_xor_si128(c2[2775],_mm_xor_si128(c2[458],_mm_xor_si128(c2[2256],_mm_xor_si128(c2[2076],_mm_xor_si128(c2[814],_mm_xor_si128(c2[2432],_mm_xor_si128(c2[2072],_mm_xor_si128(c2[830],_mm_xor_si128(c2[650],_mm_xor_si128(c2[2448],_mm_xor_si128(c2[2268],_mm_xor_si128(c2[828],_mm_xor_si128(c2[2635],_mm_xor_si128(c2[2455],_mm_xor_si128(c2[1736],_mm_xor_si128(c2[475],_mm_xor_si128(c2[487],_mm_xor_si128(c2[2294],_mm_xor_si128(c2[2114],_mm_xor_si128(c2[313],_mm_xor_si128(c2[1931],_mm_xor_si128(c2[1052],_mm_xor_si128(c2[2850],_mm_xor_si128(c2[2670],_mm_xor_si128(c2[2845],_mm_xor_si128(c2[1584],_mm_xor_si128(c2[147],_mm_xor_si128(c2[1765],_mm_xor_si128(c2[165],_mm_xor_si128(c2[2864],_mm_xor_si128(c2[1783],_mm_xor_si128(c2[1603],_mm_xor_si128(c2[529],_mm_xor_si128(c2[2327],_mm_xor_si128(c2[2147],_mm_xor_si128(c2[2870],c2[1609])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[153]=simde_mm_xor_si128(c2[724],simde_mm_xor_si128(c2[544],simde_mm_xor_si128(c2[2342],simde_mm_xor_si128(c2[2162],simde_mm_xor_si128(c2[545],simde_mm_xor_si128(c2[2343],simde_mm_xor_si128(c2[2163],simde_mm_xor_si128(c2[2700],simde_mm_xor_si128(c2[1448],simde_mm_xor_si128(c2[2003],simde_mm_xor_si128(c2[1823],simde_mm_xor_si128(c2[742],simde_mm_xor_si128(c2[562],simde_mm_xor_si128(c2[1819],simde_mm_xor_si128(c2[558],simde_mm_xor_si128(c2[1460],simde_mm_xor_si128(c2[199],simde_mm_xor_si128(c2[1465],simde_mm_xor_si128(c2[1123],simde_mm_xor_si128(c2[943],simde_mm_xor_si128(c2[2741],simde_mm_xor_si128(c2[2561],simde_mm_xor_si128(c2[1658],simde_mm_xor_si128(c2[397],simde_mm_xor_si128(c2[780],simde_mm_xor_si128(c2[600],simde_mm_xor_si128(c2[2398],simde_mm_xor_si128(c2[2218],simde_mm_xor_si128(c2[2574],simde_mm_xor_si128(c2[1502],simde_mm_xor_si128(c2[1322],simde_mm_xor_si128(c2[2214],simde_mm_xor_si128(c2[962],simde_mm_xor_si128(c2[1513],simde_mm_xor_si128(c2[432],simde_mm_xor_si128(c2[252],simde_mm_xor_si128(c2[2593],simde_mm_xor_si128(c2[1332],simde_mm_xor_si128(c2[1157],simde_mm_xor_si128(c2[2775],simde_mm_xor_si128(c2[458],simde_mm_xor_si128(c2[2256],simde_mm_xor_si128(c2[2076],simde_mm_xor_si128(c2[814],simde_mm_xor_si128(c2[2432],simde_mm_xor_si128(c2[2072],simde_mm_xor_si128(c2[830],simde_mm_xor_si128(c2[650],simde_mm_xor_si128(c2[2448],simde_mm_xor_si128(c2[2268],simde_mm_xor_si128(c2[828],simde_mm_xor_si128(c2[2635],simde_mm_xor_si128(c2[2455],simde_mm_xor_si128(c2[1736],simde_mm_xor_si128(c2[475],simde_mm_xor_si128(c2[487],simde_mm_xor_si128(c2[2294],simde_mm_xor_si128(c2[2114],simde_mm_xor_si128(c2[313],simde_mm_xor_si128(c2[1931],simde_mm_xor_si128(c2[1052],simde_mm_xor_si128(c2[2850],simde_mm_xor_si128(c2[2670],simde_mm_xor_si128(c2[2845],simde_mm_xor_si128(c2[1584],simde_mm_xor_si128(c2[147],simde_mm_xor_si128(c2[1765],simde_mm_xor_si128(c2[165],simde_mm_xor_si128(c2[2864],simde_mm_xor_si128(c2[1783],simde_mm_xor_si128(c2[1603],simde_mm_xor_si128(c2[529],simde_mm_xor_si128(c2[2327],simde_mm_xor_si128(c2[2147],simde_mm_xor_si128(c2[2870],c2[1609])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 18
-     d2[162]=_mm_xor_si128(c2[181],_mm_xor_si128(c2[1914],c2[2654]));
+     d2[162]=simde_mm_xor_si128(c2[181],simde_mm_xor_si128(c2[1914],c2[2654]));
 
 //row: 19
-     d2[171]=_mm_xor_si128(c2[1982],_mm_xor_si128(c2[1983],_mm_xor_si128(c2[1268],_mm_xor_si128(c2[2704],_mm_xor_si128(c2[382],_mm_xor_si128(c2[378],_mm_xor_si128(c2[19],_mm_xor_si128(c2[2179],_mm_xor_si128(c2[2381],_mm_xor_si128(c2[217],_mm_xor_si128(c2[2038],_mm_xor_si128(c2[1142],_mm_xor_si128(c2[782],_mm_xor_si128(c2[72],_mm_xor_si128(c2[1152],_mm_xor_si128(c2[2595],_mm_xor_si128(c2[1896],_mm_xor_si128(c2[2252],_mm_xor_si128(c2[2088],_mm_xor_si128(c2[2275],_mm_xor_si128(c2[295],_mm_xor_si128(c2[1934],_mm_xor_si128(c2[1751],_mm_xor_si128(c2[2490],_mm_xor_si128(c2[1404],_mm_xor_si128(c2[1585],_mm_xor_si128(c2[1423],_mm_xor_si128(c2[1967],c2[1429]))))))))))))))))))))))))))));
+     d2[171]=simde_mm_xor_si128(c2[1982],simde_mm_xor_si128(c2[1983],simde_mm_xor_si128(c2[1268],simde_mm_xor_si128(c2[2704],simde_mm_xor_si128(c2[382],simde_mm_xor_si128(c2[378],simde_mm_xor_si128(c2[19],simde_mm_xor_si128(c2[2179],simde_mm_xor_si128(c2[2381],simde_mm_xor_si128(c2[217],simde_mm_xor_si128(c2[2038],simde_mm_xor_si128(c2[1142],simde_mm_xor_si128(c2[782],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[1152],simde_mm_xor_si128(c2[2595],simde_mm_xor_si128(c2[1896],simde_mm_xor_si128(c2[2252],simde_mm_xor_si128(c2[2088],simde_mm_xor_si128(c2[2275],simde_mm_xor_si128(c2[295],simde_mm_xor_si128(c2[1934],simde_mm_xor_si128(c2[1751],simde_mm_xor_si128(c2[2490],simde_mm_xor_si128(c2[1404],simde_mm_xor_si128(c2[1585],simde_mm_xor_si128(c2[1423],simde_mm_xor_si128(c2[1967],c2[1429]))))))))))))))))))))))))))));
 
 //row: 20
-     d2[180]=_mm_xor_si128(c2[185],_mm_xor_si128(c2[5],_mm_xor_si128(c2[6],_mm_xor_si128(c2[2161],_mm_xor_si128(c2[1464],_mm_xor_si128(c2[1284],_mm_xor_si128(c2[1280],_mm_xor_si128(c2[921],_mm_xor_si128(c2[378],_mm_xor_si128(c2[584],_mm_xor_si128(c2[404],_mm_xor_si128(c2[1119],_mm_xor_si128(c2[241],_mm_xor_si128(c2[61],_mm_xor_si128(c2[2035],_mm_xor_si128(c2[1675],_mm_xor_si128(c2[974],_mm_xor_si128(c2[2054],_mm_xor_si128(c2[618],_mm_xor_si128(c2[1338],_mm_xor_si128(c2[2798],_mm_xor_si128(c2[275],_mm_xor_si128(c2[291],_mm_xor_si128(c2[111],_mm_xor_si128(c2[289],_mm_xor_si128(c2[1188],_mm_xor_si128(c2[2827],_mm_xor_si128(c2[2653],_mm_xor_si128(c2[504],_mm_xor_si128(c2[2306],_mm_xor_si128(c2[2487],_mm_xor_si128(c2[2505],_mm_xor_si128(c2[2325],_mm_xor_si128(c2[2869],c2[2322]))))))))))))))))))))))))))))))))));
+     d2[180]=simde_mm_xor_si128(c2[185],simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[6],simde_mm_xor_si128(c2[2161],simde_mm_xor_si128(c2[1464],simde_mm_xor_si128(c2[1284],simde_mm_xor_si128(c2[1280],simde_mm_xor_si128(c2[921],simde_mm_xor_si128(c2[378],simde_mm_xor_si128(c2[584],simde_mm_xor_si128(c2[404],simde_mm_xor_si128(c2[1119],simde_mm_xor_si128(c2[241],simde_mm_xor_si128(c2[61],simde_mm_xor_si128(c2[2035],simde_mm_xor_si128(c2[1675],simde_mm_xor_si128(c2[974],simde_mm_xor_si128(c2[2054],simde_mm_xor_si128(c2[618],simde_mm_xor_si128(c2[1338],simde_mm_xor_si128(c2[2798],simde_mm_xor_si128(c2[275],simde_mm_xor_si128(c2[291],simde_mm_xor_si128(c2[111],simde_mm_xor_si128(c2[289],simde_mm_xor_si128(c2[1188],simde_mm_xor_si128(c2[2827],simde_mm_xor_si128(c2[2653],simde_mm_xor_si128(c2[504],simde_mm_xor_si128(c2[2306],simde_mm_xor_si128(c2[2487],simde_mm_xor_si128(c2[2505],simde_mm_xor_si128(c2[2325],simde_mm_xor_si128(c2[2869],c2[2322]))))))))))))))))))))))))))))))))));
 
 //row: 21
-     d2[189]=_mm_xor_si128(c2[545],_mm_xor_si128(c2[546],_mm_xor_si128(c2[2701],_mm_xor_si128(c2[1985],_mm_xor_si128(c2[1824],_mm_xor_si128(c2[1820],_mm_xor_si128(c2[1641],_mm_xor_si128(c2[1461],_mm_xor_si128(c2[944],_mm_xor_si128(c2[1839],_mm_xor_si128(c2[1659],_mm_xor_si128(c2[601],_mm_xor_si128(c2[2575],_mm_xor_si128(c2[2215],_mm_xor_si128(c2[1514],_mm_xor_si128(c2[2594],_mm_xor_si128(c2[1338],_mm_xor_si128(c2[1158],_mm_xor_si128(c2[450],_mm_xor_si128(c2[995],_mm_xor_si128(c2[815],_mm_xor_si128(c2[651],_mm_xor_si128(c2[829],_mm_xor_si128(c2[1908],_mm_xor_si128(c2[1728],_mm_xor_si128(c2[488],_mm_xor_si128(c2[494],_mm_xor_si128(c2[314],_mm_xor_si128(c2[1044],_mm_xor_si128(c2[2846],_mm_xor_si128(c2[328],_mm_xor_si128(c2[148],_mm_xor_si128(c2[2128],_mm_xor_si128(c2[2865],_mm_xor_si128(c2[530],_mm_xor_si128(c2[163],c2[2862]))))))))))))))))))))))))))))))))))));
+     d2[189]=simde_mm_xor_si128(c2[545],simde_mm_xor_si128(c2[546],simde_mm_xor_si128(c2[2701],simde_mm_xor_si128(c2[1985],simde_mm_xor_si128(c2[1824],simde_mm_xor_si128(c2[1820],simde_mm_xor_si128(c2[1641],simde_mm_xor_si128(c2[1461],simde_mm_xor_si128(c2[944],simde_mm_xor_si128(c2[1839],simde_mm_xor_si128(c2[1659],simde_mm_xor_si128(c2[601],simde_mm_xor_si128(c2[2575],simde_mm_xor_si128(c2[2215],simde_mm_xor_si128(c2[1514],simde_mm_xor_si128(c2[2594],simde_mm_xor_si128(c2[1338],simde_mm_xor_si128(c2[1158],simde_mm_xor_si128(c2[450],simde_mm_xor_si128(c2[995],simde_mm_xor_si128(c2[815],simde_mm_xor_si128(c2[651],simde_mm_xor_si128(c2[829],simde_mm_xor_si128(c2[1908],simde_mm_xor_si128(c2[1728],simde_mm_xor_si128(c2[488],simde_mm_xor_si128(c2[494],simde_mm_xor_si128(c2[314],simde_mm_xor_si128(c2[1044],simde_mm_xor_si128(c2[2846],simde_mm_xor_si128(c2[328],simde_mm_xor_si128(c2[148],simde_mm_xor_si128(c2[2128],simde_mm_xor_si128(c2[2865],simde_mm_xor_si128(c2[530],simde_mm_xor_si128(c2[163],c2[2862]))))))))))))))))))))))))))))))))))));
 
 //row: 22
-     d2[198]=_mm_xor_si128(c2[1101],c2[764]);
+     d2[198]=simde_mm_xor_si128(c2[1101],c2[764]);
 
 //row: 23
-     d2[207]=_mm_xor_si128(c2[724],_mm_xor_si128(c2[601],c2[1533]));
+     d2[207]=simde_mm_xor_si128(c2[724],simde_mm_xor_si128(c2[601],c2[1533]));
 
 //row: 24
-     d2[216]=_mm_xor_si128(c2[2539],_mm_xor_si128(c2[1838],c2[1068]));
+     d2[216]=simde_mm_xor_si128(c2[2539],simde_mm_xor_si128(c2[1838],c2[1068]));
 
 //row: 25
-     d2[225]=_mm_xor_si128(c2[8],c2[2793]);
+     d2[225]=simde_mm_xor_si128(c2[8],c2[2793]);
 
 //row: 26
-     d2[234]=_mm_xor_si128(c2[1266],_mm_xor_si128(c2[1086],_mm_xor_si128(c2[1268],_mm_xor_si128(c2[1267],_mm_xor_si128(c2[1087],_mm_xor_si128(c2[1260],_mm_xor_si128(c2[363],_mm_xor_si128(c2[545],_mm_xor_si128(c2[2545],_mm_xor_si128(c2[2365],_mm_xor_si128(c2[2538],_mm_xor_si128(c2[2361],_mm_xor_si128(c2[2543],_mm_xor_si128(c2[2002],_mm_xor_si128(c2[2364],_mm_xor_si128(c2[2184],_mm_xor_si128(c2[1656],_mm_xor_si128(c2[1476],_mm_xor_si128(c2[1658],_mm_xor_si128(c2[2200],_mm_xor_si128(c2[2562],_mm_xor_si128(c2[2382],_mm_xor_si128(c2[2564],_mm_xor_si128(c2[1322],_mm_xor_si128(c2[1142],_mm_xor_si128(c2[1315],_mm_xor_si128(c2[417],_mm_xor_si128(c2[237],_mm_xor_si128(c2[419],_mm_xor_si128(c2[2756],_mm_xor_si128(c2[59],_mm_xor_si128(c2[2235],_mm_xor_si128(c2[2055],_mm_xor_si128(c2[2237],_mm_xor_si128(c2[256],_mm_xor_si128(c2[438],_mm_xor_si128(c2[1699],_mm_xor_si128(c2[2052],_mm_xor_si128(c2[1872],_mm_xor_si128(c2[1171],_mm_xor_si128(c2[991],_mm_xor_si128(c2[1173],_mm_xor_si128(c2[1356],_mm_xor_si128(c2[1718],_mm_xor_si128(c2[1538],_mm_xor_si128(c2[1372],_mm_xor_si128(c2[1192],_mm_xor_si128(c2[1374],_mm_xor_si128(c2[1550],_mm_xor_si128(c2[1370],_mm_xor_si128(c2[1552],_mm_xor_si128(c2[2269],_mm_xor_si128(c2[2631],_mm_xor_si128(c2[2451],_mm_xor_si128(c2[1209],_mm_xor_si128(c2[1029],_mm_xor_si128(c2[1211],_mm_xor_si128(c2[846],_mm_xor_si128(c2[1208],_mm_xor_si128(c2[1028],_mm_xor_si128(c2[2287],_mm_xor_si128(c2[1765],_mm_xor_si128(c2[1585],_mm_xor_si128(c2[1767],_mm_xor_si128(c2[508],_mm_xor_si128(c2[690],_mm_xor_si128(c2[689],_mm_xor_si128(c2[1051],_mm_xor_si128(c2[871],_mm_xor_si128(c2[707],_mm_xor_si128(c2[527],_mm_xor_si128(c2[709],_mm_xor_si128(c2[1242],_mm_xor_si128(c2[1062],_mm_xor_si128(c2[1244],_mm_xor_si128(c2[524],_mm_xor_si128(c2[886],c2[706])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[234]=simde_mm_xor_si128(c2[1266],simde_mm_xor_si128(c2[1086],simde_mm_xor_si128(c2[1268],simde_mm_xor_si128(c2[1267],simde_mm_xor_si128(c2[1087],simde_mm_xor_si128(c2[1260],simde_mm_xor_si128(c2[363],simde_mm_xor_si128(c2[545],simde_mm_xor_si128(c2[2545],simde_mm_xor_si128(c2[2365],simde_mm_xor_si128(c2[2538],simde_mm_xor_si128(c2[2361],simde_mm_xor_si128(c2[2543],simde_mm_xor_si128(c2[2002],simde_mm_xor_si128(c2[2364],simde_mm_xor_si128(c2[2184],simde_mm_xor_si128(c2[1656],simde_mm_xor_si128(c2[1476],simde_mm_xor_si128(c2[1658],simde_mm_xor_si128(c2[2200],simde_mm_xor_si128(c2[2562],simde_mm_xor_si128(c2[2382],simde_mm_xor_si128(c2[2564],simde_mm_xor_si128(c2[1322],simde_mm_xor_si128(c2[1142],simde_mm_xor_si128(c2[1315],simde_mm_xor_si128(c2[417],simde_mm_xor_si128(c2[237],simde_mm_xor_si128(c2[419],simde_mm_xor_si128(c2[2756],simde_mm_xor_si128(c2[59],simde_mm_xor_si128(c2[2235],simde_mm_xor_si128(c2[2055],simde_mm_xor_si128(c2[2237],simde_mm_xor_si128(c2[256],simde_mm_xor_si128(c2[438],simde_mm_xor_si128(c2[1699],simde_mm_xor_si128(c2[2052],simde_mm_xor_si128(c2[1872],simde_mm_xor_si128(c2[1171],simde_mm_xor_si128(c2[991],simde_mm_xor_si128(c2[1173],simde_mm_xor_si128(c2[1356],simde_mm_xor_si128(c2[1718],simde_mm_xor_si128(c2[1538],simde_mm_xor_si128(c2[1372],simde_mm_xor_si128(c2[1192],simde_mm_xor_si128(c2[1374],simde_mm_xor_si128(c2[1550],simde_mm_xor_si128(c2[1370],simde_mm_xor_si128(c2[1552],simde_mm_xor_si128(c2[2269],simde_mm_xor_si128(c2[2631],simde_mm_xor_si128(c2[2451],simde_mm_xor_si128(c2[1209],simde_mm_xor_si128(c2[1029],simde_mm_xor_si128(c2[1211],simde_mm_xor_si128(c2[846],simde_mm_xor_si128(c2[1208],simde_mm_xor_si128(c2[1028],simde_mm_xor_si128(c2[2287],simde_mm_xor_si128(c2[1765],simde_mm_xor_si128(c2[1585],simde_mm_xor_si128(c2[1767],simde_mm_xor_si128(c2[508],simde_mm_xor_si128(c2[690],simde_mm_xor_si128(c2[689],simde_mm_xor_si128(c2[1051],simde_mm_xor_si128(c2[871],simde_mm_xor_si128(c2[707],simde_mm_xor_si128(c2[527],simde_mm_xor_si128(c2[709],simde_mm_xor_si128(c2[1242],simde_mm_xor_si128(c2[1062],simde_mm_xor_si128(c2[1244],simde_mm_xor_si128(c2[524],simde_mm_xor_si128(c2[886],c2[706])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 27
-     d2[243]=_mm_xor_si128(c2[2340],c2[1908]);
+     d2[243]=simde_mm_xor_si128(c2[2340],c2[1908]);
 
 //row: 28
-     d2[252]=_mm_xor_si128(c2[1824],_mm_xor_si128(c2[2380],c2[2072]));
+     d2[252]=simde_mm_xor_si128(c2[1824],simde_mm_xor_si128(c2[2380],c2[2072]));
 
 //row: 29
-     d2[261]=_mm_xor_si128(c2[908],c2[1693]);
+     d2[261]=simde_mm_xor_si128(c2[908],c2[1693]);
 
 //row: 30
-     d2[270]=_mm_xor_si128(c2[1301],_mm_xor_si128(c2[1533],_mm_xor_si128(c2[1572],c2[1246])));
+     d2[270]=simde_mm_xor_si128(c2[1301],simde_mm_xor_si128(c2[1533],simde_mm_xor_si128(c2[1572],c2[1246])));
 
 //row: 31
-     d2[279]=_mm_xor_si128(c2[2348],_mm_xor_si128(c2[2340],_mm_xor_si128(c2[1625],_mm_xor_si128(c2[739],_mm_xor_si128(c2[744],_mm_xor_si128(c2[565],_mm_xor_si128(c2[385],_mm_xor_si128(c2[23],_mm_xor_si128(c2[2738],_mm_xor_si128(c2[763],_mm_xor_si128(c2[583],_mm_xor_si128(c2[2395],_mm_xor_si128(c2[1499],_mm_xor_si128(c2[1139],_mm_xor_si128(c2[438],_mm_xor_si128(c2[1518],_mm_xor_si128(c2[253],_mm_xor_si128(c2[73],_mm_xor_si128(c2[2253],_mm_xor_si128(c2[2798],_mm_xor_si128(c2[2618],_mm_xor_si128(c2[2454],_mm_xor_si128(c2[2632],_mm_xor_si128(c2[832],_mm_xor_si128(c2[652],_mm_xor_si128(c2[2291],_mm_xor_si128(c2[2288],_mm_xor_si128(c2[2108],_mm_xor_si128(c2[2847],_mm_xor_si128(c2[1770],_mm_xor_si128(c2[2131],_mm_xor_si128(c2[1951],_mm_xor_si128(c2[1789],_mm_xor_si128(c2[2324],_mm_xor_si128(c2[1966],c2[1786])))))))))))))))))))))))))))))))))));
+     d2[279]=simde_mm_xor_si128(c2[2348],simde_mm_xor_si128(c2[2340],simde_mm_xor_si128(c2[1625],simde_mm_xor_si128(c2[739],simde_mm_xor_si128(c2[744],simde_mm_xor_si128(c2[565],simde_mm_xor_si128(c2[385],simde_mm_xor_si128(c2[23],simde_mm_xor_si128(c2[2738],simde_mm_xor_si128(c2[763],simde_mm_xor_si128(c2[583],simde_mm_xor_si128(c2[2395],simde_mm_xor_si128(c2[1499],simde_mm_xor_si128(c2[1139],simde_mm_xor_si128(c2[438],simde_mm_xor_si128(c2[1518],simde_mm_xor_si128(c2[253],simde_mm_xor_si128(c2[73],simde_mm_xor_si128(c2[2253],simde_mm_xor_si128(c2[2798],simde_mm_xor_si128(c2[2618],simde_mm_xor_si128(c2[2454],simde_mm_xor_si128(c2[2632],simde_mm_xor_si128(c2[832],simde_mm_xor_si128(c2[652],simde_mm_xor_si128(c2[2291],simde_mm_xor_si128(c2[2288],simde_mm_xor_si128(c2[2108],simde_mm_xor_si128(c2[2847],simde_mm_xor_si128(c2[1770],simde_mm_xor_si128(c2[2131],simde_mm_xor_si128(c2[1951],simde_mm_xor_si128(c2[1789],simde_mm_xor_si128(c2[2324],simde_mm_xor_si128(c2[1966],c2[1786])))))))))))))))))))))))))))))))))));
 
 //row: 32
-     d2[288]=_mm_xor_si128(c2[1804],_mm_xor_si128(c2[1624],_mm_xor_si128(c2[1805],_mm_xor_si128(c2[1625],_mm_xor_si128(c2[901],_mm_xor_si128(c2[2],_mm_xor_si128(c2[204],_mm_xor_si128(c2[24],_mm_xor_si128(c2[20],_mm_xor_si128(c2[2540],_mm_xor_si128(c2[2203],_mm_xor_si128(c2[2023],_mm_xor_si128(c2[2738],_mm_xor_si128(c2[1860],_mm_xor_si128(c2[1680],_mm_xor_si128(c2[955],_mm_xor_si128(c2[775],_mm_xor_si128(c2[415],_mm_xor_si128(c2[2773],_mm_xor_si128(c2[2593],_mm_xor_si128(c2[794],_mm_xor_si128(c2[2237],_mm_xor_si128(c2[1718],_mm_xor_si128(c2[1538],_mm_xor_si128(c2[1894],_mm_xor_si128(c2[1715],_mm_xor_si128(c2[1910],_mm_xor_si128(c2[1730],_mm_xor_si128(c2[2088],_mm_xor_si128(c2[1908],_mm_xor_si128(c2[2816],_mm_xor_si128(c2[1747],_mm_xor_si128(c2[1567],_mm_xor_si128(c2[1393],_mm_xor_si128(c2[2312],_mm_xor_si128(c2[2132],_mm_xor_si128(c2[1046],_mm_xor_si128(c2[1227],_mm_xor_si128(c2[1245],_mm_xor_si128(c2[1065],_mm_xor_si128(c2[1789],_mm_xor_si128(c2[1609],c2[1062]))))))))))))))))))))))))))))))))))))))))));
+     d2[288]=simde_mm_xor_si128(c2[1804],simde_mm_xor_si128(c2[1624],simde_mm_xor_si128(c2[1805],simde_mm_xor_si128(c2[1625],simde_mm_xor_si128(c2[901],simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[204],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[2540],simde_mm_xor_si128(c2[2203],simde_mm_xor_si128(c2[2023],simde_mm_xor_si128(c2[2738],simde_mm_xor_si128(c2[1860],simde_mm_xor_si128(c2[1680],simde_mm_xor_si128(c2[955],simde_mm_xor_si128(c2[775],simde_mm_xor_si128(c2[415],simde_mm_xor_si128(c2[2773],simde_mm_xor_si128(c2[2593],simde_mm_xor_si128(c2[794],simde_mm_xor_si128(c2[2237],simde_mm_xor_si128(c2[1718],simde_mm_xor_si128(c2[1538],simde_mm_xor_si128(c2[1894],simde_mm_xor_si128(c2[1715],simde_mm_xor_si128(c2[1910],simde_mm_xor_si128(c2[1730],simde_mm_xor_si128(c2[2088],simde_mm_xor_si128(c2[1908],simde_mm_xor_si128(c2[2816],simde_mm_xor_si128(c2[1747],simde_mm_xor_si128(c2[1567],simde_mm_xor_si128(c2[1393],simde_mm_xor_si128(c2[2312],simde_mm_xor_si128(c2[2132],simde_mm_xor_si128(c2[1046],simde_mm_xor_si128(c2[1227],simde_mm_xor_si128(c2[1245],simde_mm_xor_si128(c2[1065],simde_mm_xor_si128(c2[1789],simde_mm_xor_si128(c2[1609],c2[1062]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 33
-     d2[297]=_mm_xor_si128(c2[720],_mm_xor_si128(c2[721],_mm_xor_si128(c2[6],_mm_xor_si128(c2[1999],_mm_xor_si128(c2[2004],_mm_xor_si128(c2[1645],_mm_xor_si128(c2[1119],_mm_xor_si128(c2[1843],_mm_xor_si128(c2[1304],_mm_xor_si128(c2[776],_mm_xor_si128(c2[2759],_mm_xor_si128(c2[2399],_mm_xor_si128(c2[1698],_mm_xor_si128(c2[2778],_mm_xor_si128(c2[1333],_mm_xor_si128(c2[634],_mm_xor_si128(c2[990],_mm_xor_si128(c2[835],_mm_xor_si128(c2[1013],_mm_xor_si128(c2[1912],_mm_xor_si128(c2[672],_mm_xor_si128(c2[489],_mm_xor_si128(c2[1206],_mm_xor_si128(c2[1228],_mm_xor_si128(c2[151],_mm_xor_si128(c2[332],_mm_xor_si128(c2[170],_mm_xor_si128(c2[705],c2[167]))))))))))))))))))))))))))));
+     d2[297]=simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[721],simde_mm_xor_si128(c2[6],simde_mm_xor_si128(c2[1999],simde_mm_xor_si128(c2[2004],simde_mm_xor_si128(c2[1645],simde_mm_xor_si128(c2[1119],simde_mm_xor_si128(c2[1843],simde_mm_xor_si128(c2[1304],simde_mm_xor_si128(c2[776],simde_mm_xor_si128(c2[2759],simde_mm_xor_si128(c2[2399],simde_mm_xor_si128(c2[1698],simde_mm_xor_si128(c2[2778],simde_mm_xor_si128(c2[1333],simde_mm_xor_si128(c2[634],simde_mm_xor_si128(c2[990],simde_mm_xor_si128(c2[835],simde_mm_xor_si128(c2[1013],simde_mm_xor_si128(c2[1912],simde_mm_xor_si128(c2[672],simde_mm_xor_si128(c2[489],simde_mm_xor_si128(c2[1206],simde_mm_xor_si128(c2[1228],simde_mm_xor_si128(c2[151],simde_mm_xor_si128(c2[332],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[705],c2[167]))))))))))))))))))))))))))));
 
 //row: 34
-     d2[306]=_mm_xor_si128(c2[2161],_mm_xor_si128(c2[1981],_mm_xor_si128(c2[727],_mm_xor_si128(c2[2162],_mm_xor_si128(c2[1982],_mm_xor_si128(c2[728],_mm_xor_si128(c2[1267],_mm_xor_si128(c2[4],_mm_xor_si128(c2[902],_mm_xor_si128(c2[561],_mm_xor_si128(c2[381],_mm_xor_si128(c2[2006],_mm_xor_si128(c2[386],_mm_xor_si128(c2[2002],_mm_xor_si128(c2[18],_mm_xor_si128(c2[1823],_mm_xor_si128(c2[1643],_mm_xor_si128(c2[2560],_mm_xor_si128(c2[2380],_mm_xor_si128(c2[1117],_mm_xor_si128(c2[216],_mm_xor_si128(c2[2021],_mm_xor_si128(c2[1841],_mm_xor_si128(c2[2217],_mm_xor_si128(c2[2037],_mm_xor_si128(c2[774],_mm_xor_si128(c2[1321],_mm_xor_si128(c2[1141],_mm_xor_si128(c2[2757],_mm_xor_si128(c2[781],_mm_xor_si128(c2[2397],_mm_xor_si128(c2[260],_mm_xor_si128(c2[80],_mm_xor_si128(c2[1696],_mm_xor_si128(c2[1160],_mm_xor_si128(c2[2776],_mm_xor_si128(c2[2594],_mm_xor_si128(c2[1520],_mm_xor_si128(c2[1340],_mm_xor_si128(c2[2075],_mm_xor_si128(c2[1895],_mm_xor_si128(c2[632],_mm_xor_si128(c2[2251],_mm_xor_si128(c2[1177],_mm_xor_si128(c2[997],_mm_xor_si128(c2[2276],_mm_xor_si128(c2[2096],_mm_xor_si128(c2[833],_mm_xor_si128(c2[2454],_mm_xor_si128(c2[2274],_mm_xor_si128(c2[1011],_mm_xor_si128(c2[294],_mm_xor_si128(c2[2090],_mm_xor_si128(c2[1910],_mm_xor_si128(c2[2113],_mm_xor_si128(c2[1933],_mm_xor_si128(c2[670],_mm_xor_si128(c2[1750],_mm_xor_si128(c2[667],_mm_xor_si128(c2[487],_mm_xor_si128(c2[2669],_mm_xor_si128(c2[2489],_mm_xor_si128(c2[1226],_mm_xor_si128(c2[1412],_mm_xor_si128(c2[149],_mm_xor_si128(c2[1584],_mm_xor_si128(c2[510],_mm_xor_si128(c2[330],_mm_xor_si128(c2[1602],_mm_xor_si128(c2[1422],_mm_xor_si128(c2[168],_mm_xor_si128(c2[2146],_mm_xor_si128(c2[1966],_mm_xor_si128(c2[703],_mm_xor_si128(c2[1428],_mm_xor_si128(c2[345],c2[165]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[306]=simde_mm_xor_si128(c2[2161],simde_mm_xor_si128(c2[1981],simde_mm_xor_si128(c2[727],simde_mm_xor_si128(c2[2162],simde_mm_xor_si128(c2[1982],simde_mm_xor_si128(c2[728],simde_mm_xor_si128(c2[1267],simde_mm_xor_si128(c2[4],simde_mm_xor_si128(c2[902],simde_mm_xor_si128(c2[561],simde_mm_xor_si128(c2[381],simde_mm_xor_si128(c2[2006],simde_mm_xor_si128(c2[386],simde_mm_xor_si128(c2[2002],simde_mm_xor_si128(c2[18],simde_mm_xor_si128(c2[1823],simde_mm_xor_si128(c2[1643],simde_mm_xor_si128(c2[2560],simde_mm_xor_si128(c2[2380],simde_mm_xor_si128(c2[1117],simde_mm_xor_si128(c2[216],simde_mm_xor_si128(c2[2021],simde_mm_xor_si128(c2[1841],simde_mm_xor_si128(c2[2217],simde_mm_xor_si128(c2[2037],simde_mm_xor_si128(c2[774],simde_mm_xor_si128(c2[1321],simde_mm_xor_si128(c2[1141],simde_mm_xor_si128(c2[2757],simde_mm_xor_si128(c2[781],simde_mm_xor_si128(c2[2397],simde_mm_xor_si128(c2[260],simde_mm_xor_si128(c2[80],simde_mm_xor_si128(c2[1696],simde_mm_xor_si128(c2[1160],simde_mm_xor_si128(c2[2776],simde_mm_xor_si128(c2[2594],simde_mm_xor_si128(c2[1520],simde_mm_xor_si128(c2[1340],simde_mm_xor_si128(c2[2075],simde_mm_xor_si128(c2[1895],simde_mm_xor_si128(c2[632],simde_mm_xor_si128(c2[2251],simde_mm_xor_si128(c2[1177],simde_mm_xor_si128(c2[997],simde_mm_xor_si128(c2[2276],simde_mm_xor_si128(c2[2096],simde_mm_xor_si128(c2[833],simde_mm_xor_si128(c2[2454],simde_mm_xor_si128(c2[2274],simde_mm_xor_si128(c2[1011],simde_mm_xor_si128(c2[294],simde_mm_xor_si128(c2[2090],simde_mm_xor_si128(c2[1910],simde_mm_xor_si128(c2[2113],simde_mm_xor_si128(c2[1933],simde_mm_xor_si128(c2[670],simde_mm_xor_si128(c2[1750],simde_mm_xor_si128(c2[667],simde_mm_xor_si128(c2[487],simde_mm_xor_si128(c2[2669],simde_mm_xor_si128(c2[2489],simde_mm_xor_si128(c2[1226],simde_mm_xor_si128(c2[1412],simde_mm_xor_si128(c2[149],simde_mm_xor_si128(c2[1584],simde_mm_xor_si128(c2[510],simde_mm_xor_si128(c2[330],simde_mm_xor_si128(c2[1602],simde_mm_xor_si128(c2[1422],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[2146],simde_mm_xor_si128(c2[1966],simde_mm_xor_si128(c2[703],simde_mm_xor_si128(c2[1428],simde_mm_xor_si128(c2[345],c2[165]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 35
-     d2[315]=_mm_xor_si128(c2[6],_mm_xor_si128(c2[2705],_mm_xor_si128(c2[2706],_mm_xor_si128(c2[1982],_mm_xor_si128(c2[1285],_mm_xor_si128(c2[1105],_mm_xor_si128(c2[1101],_mm_xor_si128(c2[742],_mm_xor_si128(c2[2181],_mm_xor_si128(c2[396],_mm_xor_si128(c2[216],_mm_xor_si128(c2[940],_mm_xor_si128(c2[62],_mm_xor_si128(c2[2761],_mm_xor_si128(c2[1856],_mm_xor_si128(c2[1496],_mm_xor_si128(c2[795],_mm_xor_si128(c2[1875],_mm_xor_si128(c2[439],_mm_xor_si128(c2[2610],_mm_xor_si128(c2[96],_mm_xor_si128(c2[1718],_mm_xor_si128(c2[112],_mm_xor_si128(c2[2811],_mm_xor_si128(c2[110],_mm_xor_si128(c2[1009],_mm_xor_si128(c2[2648],_mm_xor_si128(c2[2474],_mm_xor_si128(c2[325],_mm_xor_si128(c2[2127],_mm_xor_si128(c2[2308],_mm_xor_si128(c2[2326],_mm_xor_si128(c2[2146],_mm_xor_si128(c2[2690],c2[2143]))))))))))))))))))))))))))))))))));
+     d2[315]=simde_mm_xor_si128(c2[6],simde_mm_xor_si128(c2[2705],simde_mm_xor_si128(c2[2706],simde_mm_xor_si128(c2[1982],simde_mm_xor_si128(c2[1285],simde_mm_xor_si128(c2[1105],simde_mm_xor_si128(c2[1101],simde_mm_xor_si128(c2[742],simde_mm_xor_si128(c2[2181],simde_mm_xor_si128(c2[396],simde_mm_xor_si128(c2[216],simde_mm_xor_si128(c2[940],simde_mm_xor_si128(c2[62],simde_mm_xor_si128(c2[2761],simde_mm_xor_si128(c2[1856],simde_mm_xor_si128(c2[1496],simde_mm_xor_si128(c2[795],simde_mm_xor_si128(c2[1875],simde_mm_xor_si128(c2[439],simde_mm_xor_si128(c2[2610],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[1718],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[2811],simde_mm_xor_si128(c2[110],simde_mm_xor_si128(c2[1009],simde_mm_xor_si128(c2[2648],simde_mm_xor_si128(c2[2474],simde_mm_xor_si128(c2[325],simde_mm_xor_si128(c2[2127],simde_mm_xor_si128(c2[2308],simde_mm_xor_si128(c2[2326],simde_mm_xor_si128(c2[2146],simde_mm_xor_si128(c2[2690],c2[2143]))))))))))))))))))))))))))))))))));
 
 //row: 36
-     d2[324]=_mm_xor_si128(c2[1627],_mm_xor_si128(c2[224],c2[1927]));
+     d2[324]=simde_mm_xor_si128(c2[1627],simde_mm_xor_si128(c2[224],c2[1927]));
 
 //row: 37
-     d2[333]=_mm_xor_si128(c2[546],_mm_xor_si128(c2[1803],_mm_xor_si128(c2[547],_mm_xor_si128(c2[1804],_mm_xor_si128(c2[2702],_mm_xor_si128(c2[1080],_mm_xor_si128(c2[1825],_mm_xor_si128(c2[203],_mm_xor_si128(c2[1821],_mm_xor_si128(c2[199],_mm_xor_si128(c2[1462],_mm_xor_si128(c2[20],_mm_xor_si128(c2[2719],_mm_xor_si128(c2[936],_mm_xor_si128(c2[2202],_mm_xor_si128(c2[1660],_mm_xor_si128(c2[218],_mm_xor_si128(c2[38],_mm_xor_si128(c2[602],_mm_xor_si128(c2[1859],_mm_xor_si128(c2[2576],_mm_xor_si128(c2[954],_mm_xor_si128(c2[2216],_mm_xor_si128(c2[594],_mm_xor_si128(c2[1515],_mm_xor_si128(c2[2772],_mm_xor_si128(c2[2595],_mm_xor_si128(c2[973],_mm_xor_si128(c2[1159],_mm_xor_si128(c2[2596],_mm_xor_si128(c2[2416],_mm_xor_si128(c2[451],_mm_xor_si128(c2[1717],_mm_xor_si128(c2[816],_mm_xor_si128(c2[2253],_mm_xor_si128(c2[2073],_mm_xor_si128(c2[652],_mm_xor_si128(c2[1909],_mm_xor_si128(c2[830],_mm_xor_si128(c2[2096],_mm_xor_si128(c2[1729],_mm_xor_si128(c2[296],_mm_xor_si128(c2[116],_mm_xor_si128(c2[489],_mm_xor_si128(c2[1746],_mm_xor_si128(c2[306],_mm_xor_si128(c2[1752],_mm_xor_si128(c2[1572],_mm_xor_si128(c2[1045],_mm_xor_si128(c2[2311],_mm_xor_si128(c2[2847],_mm_xor_si128(c2[1225],_mm_xor_si128(c2[149],_mm_xor_si128(c2[1586],_mm_xor_si128(c2[1406],_mm_xor_si128(c2[2866],_mm_xor_si128(c2[1244],_mm_xor_si128(c2[522],_mm_xor_si128(c2[1788],_mm_xor_si128(c2[2863],_mm_xor_si128(c2[1430],c2[1250])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[333]=simde_mm_xor_si128(c2[546],simde_mm_xor_si128(c2[1803],simde_mm_xor_si128(c2[547],simde_mm_xor_si128(c2[1804],simde_mm_xor_si128(c2[2702],simde_mm_xor_si128(c2[1080],simde_mm_xor_si128(c2[1825],simde_mm_xor_si128(c2[203],simde_mm_xor_si128(c2[1821],simde_mm_xor_si128(c2[199],simde_mm_xor_si128(c2[1462],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[2719],simde_mm_xor_si128(c2[936],simde_mm_xor_si128(c2[2202],simde_mm_xor_si128(c2[1660],simde_mm_xor_si128(c2[218],simde_mm_xor_si128(c2[38],simde_mm_xor_si128(c2[602],simde_mm_xor_si128(c2[1859],simde_mm_xor_si128(c2[2576],simde_mm_xor_si128(c2[954],simde_mm_xor_si128(c2[2216],simde_mm_xor_si128(c2[594],simde_mm_xor_si128(c2[1515],simde_mm_xor_si128(c2[2772],simde_mm_xor_si128(c2[2595],simde_mm_xor_si128(c2[973],simde_mm_xor_si128(c2[1159],simde_mm_xor_si128(c2[2596],simde_mm_xor_si128(c2[2416],simde_mm_xor_si128(c2[451],simde_mm_xor_si128(c2[1717],simde_mm_xor_si128(c2[816],simde_mm_xor_si128(c2[2253],simde_mm_xor_si128(c2[2073],simde_mm_xor_si128(c2[652],simde_mm_xor_si128(c2[1909],simde_mm_xor_si128(c2[830],simde_mm_xor_si128(c2[2096],simde_mm_xor_si128(c2[1729],simde_mm_xor_si128(c2[296],simde_mm_xor_si128(c2[116],simde_mm_xor_si128(c2[489],simde_mm_xor_si128(c2[1746],simde_mm_xor_si128(c2[306],simde_mm_xor_si128(c2[1752],simde_mm_xor_si128(c2[1572],simde_mm_xor_si128(c2[1045],simde_mm_xor_si128(c2[2311],simde_mm_xor_si128(c2[2847],simde_mm_xor_si128(c2[1225],simde_mm_xor_si128(c2[149],simde_mm_xor_si128(c2[1586],simde_mm_xor_si128(c2[1406],simde_mm_xor_si128(c2[2866],simde_mm_xor_si128(c2[1244],simde_mm_xor_si128(c2[522],simde_mm_xor_si128(c2[1788],simde_mm_xor_si128(c2[2863],simde_mm_xor_si128(c2[1430],c2[1250])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 38
-     d2[342]=_mm_xor_si128(c2[720],_mm_xor_si128(c2[540],_mm_xor_si128(c2[541],_mm_xor_si128(c2[2705],_mm_xor_si128(c2[1999],_mm_xor_si128(c2[1819],_mm_xor_si128(c2[1824],_mm_xor_si128(c2[1465],_mm_xor_si128(c2[198],_mm_xor_si128(c2[1119],_mm_xor_si128(c2[939],_mm_xor_si128(c2[1663],_mm_xor_si128(c2[776],_mm_xor_si128(c2[596],_mm_xor_si128(c2[2579],_mm_xor_si128(c2[2219],_mm_xor_si128(c2[1518],_mm_xor_si128(c2[2598],_mm_xor_si128(c2[1153],_mm_xor_si128(c2[454],_mm_xor_si128(c2[810],_mm_xor_si128(c2[1174],_mm_xor_si128(c2[835],_mm_xor_si128(c2[655],_mm_xor_si128(c2[833],_mm_xor_si128(c2[1732],_mm_xor_si128(c2[492],_mm_xor_si128(c2[309],_mm_xor_si128(c2[1048],_mm_xor_si128(c2[2850],_mm_xor_si128(c2[152],_mm_xor_si128(c2[170],_mm_xor_si128(c2[2869],_mm_xor_si128(c2[525],c2[2866]))))))))))))))))))))))))))))))))));
+     d2[342]=simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[540],simde_mm_xor_si128(c2[541],simde_mm_xor_si128(c2[2705],simde_mm_xor_si128(c2[1999],simde_mm_xor_si128(c2[1819],simde_mm_xor_si128(c2[1824],simde_mm_xor_si128(c2[1465],simde_mm_xor_si128(c2[198],simde_mm_xor_si128(c2[1119],simde_mm_xor_si128(c2[939],simde_mm_xor_si128(c2[1663],simde_mm_xor_si128(c2[776],simde_mm_xor_si128(c2[596],simde_mm_xor_si128(c2[2579],simde_mm_xor_si128(c2[2219],simde_mm_xor_si128(c2[1518],simde_mm_xor_si128(c2[2598],simde_mm_xor_si128(c2[1153],simde_mm_xor_si128(c2[454],simde_mm_xor_si128(c2[810],simde_mm_xor_si128(c2[1174],simde_mm_xor_si128(c2[835],simde_mm_xor_si128(c2[655],simde_mm_xor_si128(c2[833],simde_mm_xor_si128(c2[1732],simde_mm_xor_si128(c2[492],simde_mm_xor_si128(c2[309],simde_mm_xor_si128(c2[1048],simde_mm_xor_si128(c2[2850],simde_mm_xor_si128(c2[152],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[2869],simde_mm_xor_si128(c2[525],c2[2866]))))))))))))))))))))))))))))))))));
 
 //row: 39
-     d2[351]=_mm_xor_si128(c2[180],_mm_xor_si128(c2[0],_mm_xor_si128(c2[181],_mm_xor_si128(c2[1],_mm_xor_si128(c2[2165],_mm_xor_si128(c2[1267],_mm_xor_si128(c2[1459],_mm_xor_si128(c2[1279],_mm_xor_si128(c2[1284],_mm_xor_si128(c2[925],_mm_xor_si128(c2[579],_mm_xor_si128(c2[399],_mm_xor_si128(c2[1123],_mm_xor_si128(c2[236],_mm_xor_si128(c2[56],_mm_xor_si128(c2[2219],_mm_xor_si128(c2[2039],_mm_xor_si128(c2[1679],_mm_xor_si128(c2[1158],_mm_xor_si128(c2[978],_mm_xor_si128(c2[2058],_mm_xor_si128(c2[613],_mm_xor_si128(c2[94],_mm_xor_si128(c2[2793],_mm_xor_si128(c2[270],_mm_xor_si128(c2[295],_mm_xor_si128(c2[115],_mm_xor_si128(c2[473],_mm_xor_si128(c2[293],_mm_xor_si128(c2[1192],_mm_xor_si128(c2[132],_mm_xor_si128(c2[2831],_mm_xor_si128(c2[2648],_mm_xor_si128(c2[128],_mm_xor_si128(c2[688],_mm_xor_si128(c2[508],_mm_xor_si128(c2[2310],_mm_xor_si128(c2[2491],_mm_xor_si128(c2[2509],_mm_xor_si128(c2[2329],_mm_xor_si128(c2[165],_mm_xor_si128(c2[2864],c2[2326]))))))))))))))))))))))))))))))))))))))))));
+     d2[351]=simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[181],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[2165],simde_mm_xor_si128(c2[1267],simde_mm_xor_si128(c2[1459],simde_mm_xor_si128(c2[1279],simde_mm_xor_si128(c2[1284],simde_mm_xor_si128(c2[925],simde_mm_xor_si128(c2[579],simde_mm_xor_si128(c2[399],simde_mm_xor_si128(c2[1123],simde_mm_xor_si128(c2[236],simde_mm_xor_si128(c2[56],simde_mm_xor_si128(c2[2219],simde_mm_xor_si128(c2[2039],simde_mm_xor_si128(c2[1679],simde_mm_xor_si128(c2[1158],simde_mm_xor_si128(c2[978],simde_mm_xor_si128(c2[2058],simde_mm_xor_si128(c2[613],simde_mm_xor_si128(c2[94],simde_mm_xor_si128(c2[2793],simde_mm_xor_si128(c2[270],simde_mm_xor_si128(c2[295],simde_mm_xor_si128(c2[115],simde_mm_xor_si128(c2[473],simde_mm_xor_si128(c2[293],simde_mm_xor_si128(c2[1192],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[2831],simde_mm_xor_si128(c2[2648],simde_mm_xor_si128(c2[128],simde_mm_xor_si128(c2[688],simde_mm_xor_si128(c2[508],simde_mm_xor_si128(c2[2310],simde_mm_xor_si128(c2[2491],simde_mm_xor_si128(c2[2509],simde_mm_xor_si128(c2[2329],simde_mm_xor_si128(c2[165],simde_mm_xor_si128(c2[2864],c2[2326]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 40
-     d2[360]=_mm_xor_si128(c2[1984],_mm_xor_si128(c2[1446],_mm_xor_si128(c2[1985],_mm_xor_si128(c2[1447],_mm_xor_si128(c2[1261],_mm_xor_si128(c2[723],_mm_xor_si128(c2[384],_mm_xor_si128(c2[2725],_mm_xor_si128(c2[380],_mm_xor_si128(c2[2721],_mm_xor_si128(c2[21],_mm_xor_si128(c2[2542],_mm_xor_si128(c2[2362],_mm_xor_si128(c2[2383],_mm_xor_si128(c2[1836],_mm_xor_si128(c2[219],_mm_xor_si128(c2[2740],_mm_xor_si128(c2[2560],_mm_xor_si128(c2[1116],_mm_xor_si128(c2[2040],_mm_xor_si128(c2[1502],_mm_xor_si128(c2[1135],_mm_xor_si128(c2[597],_mm_xor_si128(c2[775],_mm_xor_si128(c2[237],_mm_xor_si128(c2[74],_mm_xor_si128(c2[2415],_mm_xor_si128(c2[1154],_mm_xor_si128(c2[616],_mm_xor_si128(c2[2597],_mm_xor_si128(c2[2239],_mm_xor_si128(c2[2059],_mm_xor_si128(c2[1898],_mm_xor_si128(c2[1351],_mm_xor_si128(c2[2254],_mm_xor_si128(c2[1896],_mm_xor_si128(c2[1716],_mm_xor_si128(c2[2090],_mm_xor_si128(c2[1552],_mm_xor_si128(c2[2268],_mm_xor_si128(c2[1730],_mm_xor_si128(c2[288],_mm_xor_si128(c2[2809],_mm_xor_si128(c2[2629],_mm_xor_si128(c2[1927],_mm_xor_si128(c2[1389],_mm_xor_si128(c2[1753],_mm_xor_si128(c2[1386],_mm_xor_si128(c2[1206],_mm_xor_si128(c2[2492],_mm_xor_si128(c2[1945],_mm_xor_si128(c2[1406],_mm_xor_si128(c2[868],_mm_xor_si128(c2[1587],_mm_xor_si128(c2[1229],_mm_xor_si128(c2[1049],_mm_xor_si128(c2[1425],_mm_xor_si128(c2[887],_mm_xor_si128(c2[1969],_mm_xor_si128(c2[1422],_mm_xor_si128(c2[1422],_mm_xor_si128(c2[1064],c2[884]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[360]=simde_mm_xor_si128(c2[1984],simde_mm_xor_si128(c2[1446],simde_mm_xor_si128(c2[1985],simde_mm_xor_si128(c2[1447],simde_mm_xor_si128(c2[1261],simde_mm_xor_si128(c2[723],simde_mm_xor_si128(c2[384],simde_mm_xor_si128(c2[2725],simde_mm_xor_si128(c2[380],simde_mm_xor_si128(c2[2721],simde_mm_xor_si128(c2[21],simde_mm_xor_si128(c2[2542],simde_mm_xor_si128(c2[2362],simde_mm_xor_si128(c2[2383],simde_mm_xor_si128(c2[1836],simde_mm_xor_si128(c2[219],simde_mm_xor_si128(c2[2740],simde_mm_xor_si128(c2[2560],simde_mm_xor_si128(c2[1116],simde_mm_xor_si128(c2[2040],simde_mm_xor_si128(c2[1502],simde_mm_xor_si128(c2[1135],simde_mm_xor_si128(c2[597],simde_mm_xor_si128(c2[775],simde_mm_xor_si128(c2[237],simde_mm_xor_si128(c2[74],simde_mm_xor_si128(c2[2415],simde_mm_xor_si128(c2[1154],simde_mm_xor_si128(c2[616],simde_mm_xor_si128(c2[2597],simde_mm_xor_si128(c2[2239],simde_mm_xor_si128(c2[2059],simde_mm_xor_si128(c2[1898],simde_mm_xor_si128(c2[1351],simde_mm_xor_si128(c2[2254],simde_mm_xor_si128(c2[1896],simde_mm_xor_si128(c2[1716],simde_mm_xor_si128(c2[2090],simde_mm_xor_si128(c2[1552],simde_mm_xor_si128(c2[2268],simde_mm_xor_si128(c2[1730],simde_mm_xor_si128(c2[288],simde_mm_xor_si128(c2[2809],simde_mm_xor_si128(c2[2629],simde_mm_xor_si128(c2[1927],simde_mm_xor_si128(c2[1389],simde_mm_xor_si128(c2[1753],simde_mm_xor_si128(c2[1386],simde_mm_xor_si128(c2[1206],simde_mm_xor_si128(c2[2492],simde_mm_xor_si128(c2[1945],simde_mm_xor_si128(c2[1406],simde_mm_xor_si128(c2[868],simde_mm_xor_si128(c2[1587],simde_mm_xor_si128(c2[1229],simde_mm_xor_si128(c2[1049],simde_mm_xor_si128(c2[1425],simde_mm_xor_si128(c2[887],simde_mm_xor_si128(c2[1969],simde_mm_xor_si128(c2[1422],simde_mm_xor_si128(c2[1422],simde_mm_xor_si128(c2[1064],c2[884]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 41
-     d2[369]=_mm_xor_si128(c2[1081],_mm_xor_si128(c2[901],_mm_xor_si128(c2[902],_mm_xor_si128(c2[187],_mm_xor_si128(c2[2360],_mm_xor_si128(c2[2180],_mm_xor_si128(c2[2185],_mm_xor_si128(c2[1826],_mm_xor_si128(c2[21],_mm_xor_si128(c2[1480],_mm_xor_si128(c2[1300],_mm_xor_si128(c2[2024],_mm_xor_si128(c2[1137],_mm_xor_si128(c2[957],_mm_xor_si128(c2[61],_mm_xor_si128(c2[2580],_mm_xor_si128(c2[1879],_mm_xor_si128(c2[80],_mm_xor_si128(c2[1514],_mm_xor_si128(c2[815],_mm_xor_si128(c2[1171],_mm_xor_si128(c2[2792],_mm_xor_si128(c2[1196],_mm_xor_si128(c2[1016],_mm_xor_si128(c2[1194],_mm_xor_si128(c2[2093],_mm_xor_si128(c2[853],_mm_xor_si128(c2[670],_mm_xor_si128(c2[1409],_mm_xor_si128(c2[332],_mm_xor_si128(c2[504],_mm_xor_si128(c2[522],_mm_xor_si128(c2[342],_mm_xor_si128(c2[886],c2[348]))))))))))))))))))))))))))))))))));
+     d2[369]=simde_mm_xor_si128(c2[1081],simde_mm_xor_si128(c2[901],simde_mm_xor_si128(c2[902],simde_mm_xor_si128(c2[187],simde_mm_xor_si128(c2[2360],simde_mm_xor_si128(c2[2180],simde_mm_xor_si128(c2[2185],simde_mm_xor_si128(c2[1826],simde_mm_xor_si128(c2[21],simde_mm_xor_si128(c2[1480],simde_mm_xor_si128(c2[1300],simde_mm_xor_si128(c2[2024],simde_mm_xor_si128(c2[1137],simde_mm_xor_si128(c2[957],simde_mm_xor_si128(c2[61],simde_mm_xor_si128(c2[2580],simde_mm_xor_si128(c2[1879],simde_mm_xor_si128(c2[80],simde_mm_xor_si128(c2[1514],simde_mm_xor_si128(c2[815],simde_mm_xor_si128(c2[1171],simde_mm_xor_si128(c2[2792],simde_mm_xor_si128(c2[1196],simde_mm_xor_si128(c2[1016],simde_mm_xor_si128(c2[1194],simde_mm_xor_si128(c2[2093],simde_mm_xor_si128(c2[853],simde_mm_xor_si128(c2[670],simde_mm_xor_si128(c2[1409],simde_mm_xor_si128(c2[332],simde_mm_xor_si128(c2[504],simde_mm_xor_si128(c2[522],simde_mm_xor_si128(c2[342],simde_mm_xor_si128(c2[886],c2[348]))))))))))))))))))))))))))))))))));
   }
 }
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc160_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc160_byte.c
index 1c95ccb31581489f223d0960618d37c45c8c2036..4951e3071e4d4b43d2d3fff7982f39ed8ee3808b 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc160_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc160_byte.c
@@ -1,9 +1,10 @@
+#ifdef __AVX2__
 #include "PHY/sse_intrin.h"
 // generated code for Zc=160, byte encoding
 static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG2_Zc160_byte(uint8_t *c,uint8_t *d) {
-  __m256i *csimd=(__m256i *)c,*dsimd=(__m256i *)d;
+  simde__m256i *csimd=(simde__m256i *)c,*dsimd=(simde__m256i *)d;
 
-  __m256i *c2,*d2;
+  simde__m256i *c2,*d2;
 
   int i2;
   for (i2=0; i2<5; i2++) {
@@ -137,3 +138,4 @@ static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG
      d2[205]=simde_mm256_xor_si256(c2[2301],simde_mm256_xor_si256(c2[2201],simde_mm256_xor_si256(c2[3100],simde_mm256_xor_si256(c2[1002],simde_mm256_xor_si256(c2[2311],simde_mm256_xor_si256(c2[2211],simde_mm256_xor_si256(c2[2014],simde_mm256_xor_si256(c2[2812],simde_mm256_xor_si256(c2[10],simde_mm256_xor_si256(c2[2321],simde_mm256_xor_si256(c2[2221],simde_mm256_xor_si256(c2[522],simde_mm256_xor_si256(c2[2331],simde_mm256_xor_si256(c2[2231],simde_mm256_xor_si256(c2[1830],simde_mm256_xor_si256(c2[2534],simde_mm256_xor_si256(c2[2241],simde_mm256_xor_si256(c2[3141],simde_mm256_xor_si256(c2[2844],simde_mm256_xor_si256(c2[2251],simde_mm256_xor_si256(c2[851],simde_mm256_xor_si256(c2[250],simde_mm256_xor_si256(c2[2361],simde_mm256_xor_si256(c2[2261],simde_mm256_xor_si256(c2[1464],simde_mm256_xor_si256(c2[262],simde_mm256_xor_si256(c2[2271],simde_mm256_xor_si256(c2[1573],simde_mm256_xor_si256(c2[2281],simde_mm256_xor_si256(c2[280],simde_mm256_xor_si256(c2[1183],simde_mm256_xor_si256(c2[2391],simde_mm256_xor_si256(c2[2291],simde_mm256_xor_si256(c2[1393],c2[493]))))))))))))))))))))))))))))))))));
   }
 }
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc160_byte_128.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc160_byte_128.c
new file mode 100644
index 0000000000000000000000000000000000000000..beb8def97bbceae31bb0f575ab7e386d72a0f960
--- /dev/null
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc160_byte_128.c
@@ -0,0 +1,141 @@
+#ifndef __AVX2__
+#include "PHY/sse_intrin.h"
+// generated code for Zc=160, byte encoding
+static inline void ldpc_BG2_Zc160_byte(uint8_t *c,uint8_t *d) {
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
+
+  simde__m128i *c2,*d2;
+
+  int i2;
+  for (i2=0; i2<10; i2++) {
+     c2=&csimd[i2];
+     d2=&dsimd[i2];
+
+//row: 0
+     d2[0]=simde_mm_xor_si128(c2[3009],simde_mm_xor_si128(c2[1608],simde_mm_xor_si128(c2[601],simde_mm_xor_si128(c2[3029],simde_mm_xor_si128(c2[2625],simde_mm_xor_si128(c2[1022],simde_mm_xor_si128(c2[3049],simde_mm_xor_si128(c2[2840],simde_mm_xor_si128(c2[3069],simde_mm_xor_si128(c2[2267],simde_mm_xor_si128(c2[466],simde_mm_xor_si128(c2[3089],simde_mm_xor_si128(c2[1680],simde_mm_xor_si128(c2[1086],simde_mm_xor_si128(c2[3109],simde_mm_xor_si128(c2[309],simde_mm_xor_si128(c2[3129],simde_mm_xor_si128(c2[1525],simde_mm_xor_si128(c2[2320],simde_mm_xor_si128(c2[3149],simde_mm_xor_si128(c2[1743],simde_mm_xor_si128(c2[3169],simde_mm_xor_si128(c2[2366],simde_mm_xor_si128(c2[963],simde_mm_xor_si128(c2[3189],simde_mm_xor_si128(c2[1383],c2[2782]))))))))))))))))))))))))));
+
+//row: 1
+     d2[10]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[3009],simde_mm_xor_si128(c2[1608],simde_mm_xor_si128(c2[601],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[3029],simde_mm_xor_si128(c2[2625],simde_mm_xor_si128(c2[1022],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[3049],simde_mm_xor_si128(c2[2840],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[3069],simde_mm_xor_si128(c2[2267],simde_mm_xor_si128(c2[466],simde_mm_xor_si128(c2[3089],simde_mm_xor_si128(c2[1680],simde_mm_xor_si128(c2[1086],simde_mm_xor_si128(c2[3109],simde_mm_xor_si128(c2[309],simde_mm_xor_si128(c2[120],simde_mm_xor_si128(c2[3129],simde_mm_xor_si128(c2[1525],simde_mm_xor_si128(c2[2320],simde_mm_xor_si128(c2[3149],simde_mm_xor_si128(c2[1743],simde_mm_xor_si128(c2[3169],simde_mm_xor_si128(c2[2366],simde_mm_xor_si128(c2[963],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[3189],simde_mm_xor_si128(c2[1383],c2[2782]))))))))))))))))))))))))))))))));
+
+//row: 2
+     d2[20]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[3009],simde_mm_xor_si128(c2[1808],simde_mm_xor_si128(c2[1608],simde_mm_xor_si128(c2[601],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[3029],simde_mm_xor_si128(c2[2625],simde_mm_xor_si128(c2[1022],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[3049],simde_mm_xor_si128(c2[2840],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[3069],simde_mm_xor_si128(c2[2467],simde_mm_xor_si128(c2[2267],simde_mm_xor_si128(c2[466],simde_mm_xor_si128(c2[80],simde_mm_xor_si128(c2[3089],simde_mm_xor_si128(c2[1680],simde_mm_xor_si128(c2[1086],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[3109],simde_mm_xor_si128(c2[309],simde_mm_xor_si128(c2[120],simde_mm_xor_si128(c2[3129],simde_mm_xor_si128(c2[1725],simde_mm_xor_si128(c2[1525],simde_mm_xor_si128(c2[2320],simde_mm_xor_si128(c2[140],simde_mm_xor_si128(c2[3149],simde_mm_xor_si128(c2[1743],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[3169],simde_mm_xor_si128(c2[2366],simde_mm_xor_si128(c2[963],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[3189],simde_mm_xor_si128(c2[1583],simde_mm_xor_si128(c2[1383],c2[2782]))))))))))))))))))))))))))))))))))))))));
+
+//row: 3
+     d2[30]=simde_mm_xor_si128(c2[3009],simde_mm_xor_si128(c2[1608],simde_mm_xor_si128(c2[601],simde_mm_xor_si128(c2[3029],simde_mm_xor_si128(c2[2625],simde_mm_xor_si128(c2[1222],simde_mm_xor_si128(c2[1022],simde_mm_xor_si128(c2[3049],simde_mm_xor_si128(c2[3040],simde_mm_xor_si128(c2[2840],simde_mm_xor_si128(c2[3069],simde_mm_xor_si128(c2[2267],simde_mm_xor_si128(c2[466],simde_mm_xor_si128(c2[3089],simde_mm_xor_si128(c2[1680],simde_mm_xor_si128(c2[1286],simde_mm_xor_si128(c2[1086],simde_mm_xor_si128(c2[3109],simde_mm_xor_si128(c2[509],simde_mm_xor_si128(c2[309],simde_mm_xor_si128(c2[3129],simde_mm_xor_si128(c2[1525],simde_mm_xor_si128(c2[2520],simde_mm_xor_si128(c2[2320],simde_mm_xor_si128(c2[3149],simde_mm_xor_si128(c2[1943],simde_mm_xor_si128(c2[1743],simde_mm_xor_si128(c2[3169],simde_mm_xor_si128(c2[2366],simde_mm_xor_si128(c2[1163],simde_mm_xor_si128(c2[963],simde_mm_xor_si128(c2[3189],simde_mm_xor_si128(c2[1383],simde_mm_xor_si128(c2[2982],c2[2782]))))))))))))))))))))))))))))))))));
+
+//row: 4
+     d2[40]=simde_mm_xor_si128(c2[2609],simde_mm_xor_si128(c2[2409],simde_mm_xor_si128(c2[1008],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[2629],simde_mm_xor_si128(c2[2429],simde_mm_xor_si128(c2[2025],simde_mm_xor_si128(c2[422],simde_mm_xor_si128(c2[1628],simde_mm_xor_si128(c2[2649],simde_mm_xor_si128(c2[2449],simde_mm_xor_si128(c2[2240],simde_mm_xor_si128(c2[2669],simde_mm_xor_si128(c2[2469],simde_mm_xor_si128(c2[1667],simde_mm_xor_si128(c2[3065],simde_mm_xor_si128(c2[2489],simde_mm_xor_si128(c2[1080],simde_mm_xor_si128(c2[486],simde_mm_xor_si128(c2[2509],simde_mm_xor_si128(c2[2908],simde_mm_xor_si128(c2[2729],simde_mm_xor_si128(c2[2529],simde_mm_xor_si128(c2[925],simde_mm_xor_si128(c2[1720],simde_mm_xor_si128(c2[2549],simde_mm_xor_si128(c2[1143],simde_mm_xor_si128(c2[2569],simde_mm_xor_si128(c2[1766],simde_mm_xor_si128(c2[363],simde_mm_xor_si128(c2[2789],simde_mm_xor_si128(c2[2589],simde_mm_xor_si128(c2[783],c2[2182]))))))))))))))))))))))))))))))))));
+
+//row: 5
+     d2[50]=simde_mm_xor_si128(c2[4],simde_mm_xor_si128(c2[3003],simde_mm_xor_si128(c2[1602],simde_mm_xor_si128(c2[605],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[3023],simde_mm_xor_si128(c2[2629],simde_mm_xor_si128(c2[1026],simde_mm_xor_si128(c2[628],simde_mm_xor_si128(c2[44],simde_mm_xor_si128(c2[3043],simde_mm_xor_si128(c2[2844],simde_mm_xor_si128(c2[64],simde_mm_xor_si128(c2[3063],simde_mm_xor_si128(c2[2261],simde_mm_xor_si128(c2[460],simde_mm_xor_si128(c2[3083],simde_mm_xor_si128(c2[1684],simde_mm_xor_si128(c2[1080],simde_mm_xor_si128(c2[3103],simde_mm_xor_si128(c2[303],simde_mm_xor_si128(c2[2908],simde_mm_xor_si128(c2[124],simde_mm_xor_si128(c2[3123],simde_mm_xor_si128(c2[1529],simde_mm_xor_si128(c2[2324],simde_mm_xor_si128(c2[3143],simde_mm_xor_si128(c2[1747],simde_mm_xor_si128(c2[2748],simde_mm_xor_si128(c2[3163],simde_mm_xor_si128(c2[2360],simde_mm_xor_si128(c2[967],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[3183],simde_mm_xor_si128(c2[1387],c2[2786]))))))))))))))))))))))))))))))))))));
+
+//row: 6
+     d2[60]=simde_mm_xor_si128(c2[809],simde_mm_xor_si128(c2[609],simde_mm_xor_si128(c2[2407],simde_mm_xor_si128(c2[1400],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[829],simde_mm_xor_si128(c2[629],simde_mm_xor_si128(c2[225],simde_mm_xor_si128(c2[1821],simde_mm_xor_si128(c2[849],simde_mm_xor_si128(c2[649],simde_mm_xor_si128(c2[440],simde_mm_xor_si128(c2[869],simde_mm_xor_si128(c2[669],simde_mm_xor_si128(c2[3066],simde_mm_xor_si128(c2[1265],simde_mm_xor_si128(c2[689],simde_mm_xor_si128(c2[2489],simde_mm_xor_si128(c2[1885],simde_mm_xor_si128(c2[709],simde_mm_xor_si128(c2[1108],simde_mm_xor_si128(c2[2507],simde_mm_xor_si128(c2[929],simde_mm_xor_si128(c2[729],simde_mm_xor_si128(c2[2324],simde_mm_xor_si128(c2[3129],simde_mm_xor_si128(c2[749],simde_mm_xor_si128(c2[2542],simde_mm_xor_si128(c2[746],simde_mm_xor_si128(c2[769],simde_mm_xor_si128(c2[3165],simde_mm_xor_si128(c2[1762],simde_mm_xor_si128(c2[989],simde_mm_xor_si128(c2[789],simde_mm_xor_si128(c2[2182],simde_mm_xor_si128(c2[382],c2[2782]))))))))))))))))))))))))))))))))))));
+
+//row: 7
+     d2[70]=simde_mm_xor_si128(c2[6],simde_mm_xor_si128(c2[3005],simde_mm_xor_si128(c2[2604],simde_mm_xor_si128(c2[1604],simde_mm_xor_si128(c2[1203],simde_mm_xor_si128(c2[607],simde_mm_xor_si128(c2[206],simde_mm_xor_si128(c2[26],simde_mm_xor_si128(c2[3025],simde_mm_xor_si128(c2[2624],simde_mm_xor_si128(c2[2621],simde_mm_xor_si128(c2[2220],simde_mm_xor_si128(c2[1028],simde_mm_xor_si128(c2[827],simde_mm_xor_si128(c2[627],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[46],simde_mm_xor_si128(c2[3045],simde_mm_xor_si128(c2[2644],simde_mm_xor_si128(c2[2846],simde_mm_xor_si128(c2[2645],simde_mm_xor_si128(c2[2445],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[3065],simde_mm_xor_si128(c2[2664],simde_mm_xor_si128(c2[2263],simde_mm_xor_si128(c2[1862],simde_mm_xor_si128(c2[462],simde_mm_xor_si128(c2[61],simde_mm_xor_si128(c2[3085],simde_mm_xor_si128(c2[2684],simde_mm_xor_si128(c2[1686],simde_mm_xor_si128(c2[1285],simde_mm_xor_si128(c2[1082],simde_mm_xor_si128(c2[881],simde_mm_xor_si128(c2[681],simde_mm_xor_si128(c2[3105],simde_mm_xor_si128(c2[2704],simde_mm_xor_si128(c2[305],simde_mm_xor_si128(c2[104],simde_mm_xor_si128(c2[3103],simde_mm_xor_si128(c2[2702],simde_mm_xor_si128(c2[126],simde_mm_xor_si128(c2[3125],simde_mm_xor_si128(c2[2724],simde_mm_xor_si128(c2[1521],simde_mm_xor_si128(c2[1120],simde_mm_xor_si128(c2[2326],simde_mm_xor_si128(c2[2125],simde_mm_xor_si128(c2[1925],simde_mm_xor_si128(c2[3145],simde_mm_xor_si128(c2[2744],simde_mm_xor_si128(c2[1749],simde_mm_xor_si128(c2[1548],simde_mm_xor_si128(c2[1348],simde_mm_xor_si128(c2[949],simde_mm_xor_si128(c2[3165],simde_mm_xor_si128(c2[2764],simde_mm_xor_si128(c2[2362],simde_mm_xor_si128(c2[1961],simde_mm_xor_si128(c2[969],simde_mm_xor_si128(c2[768],simde_mm_xor_si128(c2[568],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[3185],simde_mm_xor_si128(c2[2784],simde_mm_xor_si128(c2[1389],simde_mm_xor_si128(c2[988],simde_mm_xor_si128(c2[2788],simde_mm_xor_si128(c2[2587],c2[2387]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 8
+     d2[80]=simde_mm_xor_si128(c2[1405],simde_mm_xor_si128(c2[1205],simde_mm_xor_si128(c2[4],simde_mm_xor_si128(c2[3003],simde_mm_xor_si128(c2[2006],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[1425],simde_mm_xor_si128(c2[1225],simde_mm_xor_si128(c2[821],simde_mm_xor_si128(c2[2427],simde_mm_xor_si128(c2[224],simde_mm_xor_si128(c2[1445],simde_mm_xor_si128(c2[1245],simde_mm_xor_si128(c2[1046],simde_mm_xor_si128(c2[1465],simde_mm_xor_si128(c2[1265],simde_mm_xor_si128(c2[663],simde_mm_xor_si128(c2[463],simde_mm_xor_si128(c2[1861],simde_mm_xor_si128(c2[1485],simde_mm_xor_si128(c2[1285],simde_mm_xor_si128(c2[3085],simde_mm_xor_si128(c2[2481],simde_mm_xor_si128(c2[1505],simde_mm_xor_si128(c2[1305],simde_mm_xor_si128(c2[1704],simde_mm_xor_si128(c2[1525],simde_mm_xor_si128(c2[1325],simde_mm_xor_si128(c2[3120],simde_mm_xor_si128(c2[2920],simde_mm_xor_si128(c2[526],simde_mm_xor_si128(c2[1545],simde_mm_xor_si128(c2[1345],simde_mm_xor_si128(c2[3148],simde_mm_xor_si128(c2[1565],simde_mm_xor_si128(c2[1365],simde_mm_xor_si128(c2[562],simde_mm_xor_si128(c2[2368],simde_mm_xor_si128(c2[1585],simde_mm_xor_si128(c2[1385],simde_mm_xor_si128(c2[2988],simde_mm_xor_si128(c2[2788],c2[988]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 9
+     d2[90]=simde_mm_xor_si128(c2[403],simde_mm_xor_si128(c2[1005],simde_mm_xor_si128(c2[805],simde_mm_xor_si128(c2[2201],simde_mm_xor_si128(c2[2603],simde_mm_xor_si128(c2[1204],simde_mm_xor_si128(c2[1606],simde_mm_xor_si128(c2[423],simde_mm_xor_si128(c2[1025],simde_mm_xor_si128(c2[825],simde_mm_xor_si128(c2[29],simde_mm_xor_si128(c2[421],simde_mm_xor_si128(c2[1625],simde_mm_xor_si128(c2[2027],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[443],simde_mm_xor_si128(c2[1045],simde_mm_xor_si128(c2[845],simde_mm_xor_si128(c2[244],simde_mm_xor_si128(c2[646],simde_mm_xor_si128(c2[463],simde_mm_xor_si128(c2[1065],simde_mm_xor_si128(c2[865],simde_mm_xor_si128(c2[2860],simde_mm_xor_si128(c2[63],simde_mm_xor_si128(c2[1069],simde_mm_xor_si128(c2[1461],simde_mm_xor_si128(c2[483],simde_mm_xor_si128(c2[885],simde_mm_xor_si128(c2[2283],simde_mm_xor_si128(c2[2685],simde_mm_xor_si128(c2[1689],simde_mm_xor_si128(c2[2081],simde_mm_xor_si128(c2[503],simde_mm_xor_si128(c2[905],simde_mm_xor_si128(c2[902],simde_mm_xor_si128(c2[1304],simde_mm_xor_si128(c2[523],simde_mm_xor_si128(c2[1125],simde_mm_xor_si128(c2[925],simde_mm_xor_si128(c2[2128],simde_mm_xor_si128(c2[2520],simde_mm_xor_si128(c2[2923],simde_mm_xor_si128(c2[126],simde_mm_xor_si128(c2[543],simde_mm_xor_si128(c2[945],simde_mm_xor_si128(c2[2346],simde_mm_xor_si128(c2[2748],simde_mm_xor_si128(c2[563],simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[2969],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[1566],simde_mm_xor_si128(c2[1968],simde_mm_xor_si128(c2[366],simde_mm_xor_si128(c2[583],simde_mm_xor_si128(c2[1185],simde_mm_xor_si128(c2[985],simde_mm_xor_si128(c2[1986],simde_mm_xor_si128(c2[2388],simde_mm_xor_si128(c2[186],c2[588])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 10
+     d2[100]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[221],simde_mm_xor_si128(c2[2529],c2[941])));
+
+//row: 11
+     d2[110]=simde_mm_xor_si128(c2[200],simde_mm_xor_si128(c2[2008],simde_mm_xor_si128(c2[1001],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[220],simde_mm_xor_si128(c2[3025],simde_mm_xor_si128(c2[1622],simde_mm_xor_si128(c2[1422],simde_mm_xor_si128(c2[240],simde_mm_xor_si128(c2[241],simde_mm_xor_si128(c2[41],simde_mm_xor_si128(c2[260],simde_mm_xor_si128(c2[2667],simde_mm_xor_si128(c2[866],simde_mm_xor_si128(c2[280],simde_mm_xor_si128(c2[2080],simde_mm_xor_si128(c2[1686],simde_mm_xor_si128(c2[1486],simde_mm_xor_si128(c2[300],simde_mm_xor_si128(c2[909],simde_mm_xor_si128(c2[709],simde_mm_xor_si128(c2[320],simde_mm_xor_si128(c2[1925],simde_mm_xor_si128(c2[2920],simde_mm_xor_si128(c2[2720],simde_mm_xor_si128(c2[340],simde_mm_xor_si128(c2[2343],simde_mm_xor_si128(c2[2143],simde_mm_xor_si128(c2[1540],simde_mm_xor_si128(c2[360],simde_mm_xor_si128(c2[2766],simde_mm_xor_si128(c2[1563],simde_mm_xor_si128(c2[1363],simde_mm_xor_si128(c2[380],simde_mm_xor_si128(c2[1783],simde_mm_xor_si128(c2[183],simde_mm_xor_si128(c2[3182],c2[980])))))))))))))))))))))))))))))))))))));
+
+//row: 12
+     d2[120]=simde_mm_xor_si128(c2[3],simde_mm_xor_si128(c2[3002],simde_mm_xor_si128(c2[1601],simde_mm_xor_si128(c2[604],simde_mm_xor_si128(c2[23],simde_mm_xor_si128(c2[3022],simde_mm_xor_si128(c2[2628],simde_mm_xor_si128(c2[1025],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[43],simde_mm_xor_si128(c2[3042],simde_mm_xor_si128(c2[2843],simde_mm_xor_si128(c2[63],simde_mm_xor_si128(c2[3062],simde_mm_xor_si128(c2[2260],simde_mm_xor_si128(c2[469],simde_mm_xor_si128(c2[267],simde_mm_xor_si128(c2[3082],simde_mm_xor_si128(c2[1683],simde_mm_xor_si128(c2[1089],simde_mm_xor_si128(c2[3102],simde_mm_xor_si128(c2[302],simde_mm_xor_si128(c2[123],simde_mm_xor_si128(c2[3122],simde_mm_xor_si128(c2[1528],simde_mm_xor_si128(c2[2323],simde_mm_xor_si128(c2[3142],simde_mm_xor_si128(c2[1746],simde_mm_xor_si128(c2[3162],simde_mm_xor_si128(c2[2369],simde_mm_xor_si128(c2[966],simde_mm_xor_si128(c2[183],simde_mm_xor_si128(c2[3182],simde_mm_xor_si128(c2[1386],c2[2785]))))))))))))))))))))))))))))))))));
+
+//row: 13
+     d2[130]=simde_mm_xor_si128(c2[1801],simde_mm_xor_si128(c2[400],simde_mm_xor_si128(c2[2602],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[1821],simde_mm_xor_si128(c2[1427],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[3023],simde_mm_xor_si128(c2[27],simde_mm_xor_si128(c2[1841],simde_mm_xor_si128(c2[1842],simde_mm_xor_si128(c2[1642],simde_mm_xor_si128(c2[1861],simde_mm_xor_si128(c2[1069],simde_mm_xor_si128(c2[2467],simde_mm_xor_si128(c2[1881],simde_mm_xor_si128(c2[482],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[3087],simde_mm_xor_si128(c2[1901],simde_mm_xor_si128(c2[2500],simde_mm_xor_si128(c2[2300],simde_mm_xor_si128(c2[1921],simde_mm_xor_si128(c2[327],simde_mm_xor_si128(c2[1322],simde_mm_xor_si128(c2[1122],simde_mm_xor_si128(c2[1941],simde_mm_xor_si128(c2[745],simde_mm_xor_si128(c2[545],simde_mm_xor_si128(c2[1961],simde_mm_xor_si128(c2[1168],simde_mm_xor_si128(c2[3164],simde_mm_xor_si128(c2[2964],simde_mm_xor_si128(c2[1366],simde_mm_xor_si128(c2[1981],simde_mm_xor_si128(c2[185],simde_mm_xor_si128(c2[1784],c2[1584])))))))))))))))))))))))))))))))))))));
+
+//row: 14
+     d2[140]=simde_mm_xor_si128(c2[1803],simde_mm_xor_si128(c2[1603],simde_mm_xor_si128(c2[2001],simde_mm_xor_si128(c2[202],simde_mm_xor_si128(c2[600],simde_mm_xor_si128(c2[2404],simde_mm_xor_si128(c2[2802],simde_mm_xor_si128(c2[1823],simde_mm_xor_si128(c2[1623],simde_mm_xor_si128(c2[2021],simde_mm_xor_si128(c2[1229],simde_mm_xor_si128(c2[1627],simde_mm_xor_si128(c2[2825],simde_mm_xor_si128(c2[224],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[1843],simde_mm_xor_si128(c2[1643],simde_mm_xor_si128(c2[2041],simde_mm_xor_si128(c2[1444],simde_mm_xor_si128(c2[2042],simde_mm_xor_si128(c2[1842],simde_mm_xor_si128(c2[1863],simde_mm_xor_si128(c2[1663],simde_mm_xor_si128(c2[2061],simde_mm_xor_si128(c2[861],simde_mm_xor_si128(c2[1269],simde_mm_xor_si128(c2[2269],simde_mm_xor_si128(c2[2667],simde_mm_xor_si128(c2[1683],simde_mm_xor_si128(c2[2081],simde_mm_xor_si128(c2[284],simde_mm_xor_si128(c2[682],simde_mm_xor_si128(c2[2889],simde_mm_xor_si128(c2[288],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[1703],simde_mm_xor_si128(c2[2101],simde_mm_xor_si128(c2[2102],simde_mm_xor_si128(c2[2700],simde_mm_xor_si128(c2[2500],simde_mm_xor_si128(c2[1923],simde_mm_xor_si128(c2[1723],simde_mm_xor_si128(c2[2121],simde_mm_xor_si128(c2[129],simde_mm_xor_si128(c2[527],simde_mm_xor_si128(c2[924],simde_mm_xor_si128(c2[1522],simde_mm_xor_si128(c2[1322],simde_mm_xor_si128(c2[2128],simde_mm_xor_si128(c2[1743],simde_mm_xor_si128(c2[2141],simde_mm_xor_si128(c2[347],simde_mm_xor_si128(c2[945],simde_mm_xor_si128(c2[745],simde_mm_xor_si128(c2[1763],simde_mm_xor_si128(c2[2161],simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[1368],simde_mm_xor_si128(c2[2766],simde_mm_xor_si128(c2[165],simde_mm_xor_si128(c2[3164],simde_mm_xor_si128(c2[1983],simde_mm_xor_si128(c2[1783],simde_mm_xor_si128(c2[2181],simde_mm_xor_si128(c2[3186],simde_mm_xor_si128(c2[385],simde_mm_xor_si128(c2[1386],simde_mm_xor_si128(c2[1984],c2[1784])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 15
+     d2[150]=simde_mm_xor_si128(c2[1604],simde_mm_xor_si128(c2[606],simde_mm_xor_si128(c2[406],simde_mm_xor_si128(c2[203],simde_mm_xor_si128(c2[2204],simde_mm_xor_si128(c2[2405],simde_mm_xor_si128(c2[1207],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[1624],simde_mm_xor_si128(c2[626],simde_mm_xor_si128(c2[426],simde_mm_xor_si128(c2[1220],simde_mm_xor_si128(c2[22],simde_mm_xor_si128(c2[2826],simde_mm_xor_si128(c2[1628],simde_mm_xor_si128(c2[1644],simde_mm_xor_si128(c2[646],simde_mm_xor_si128(c2[446],simde_mm_xor_si128(c2[1445],simde_mm_xor_si128(c2[247],simde_mm_xor_si128(c2[1664],simde_mm_xor_si128(c2[666],simde_mm_xor_si128(c2[466],simde_mm_xor_si128(c2[862],simde_mm_xor_si128(c2[2863],simde_mm_xor_si128(c2[2260],simde_mm_xor_si128(c2[1062],simde_mm_xor_si128(c2[1684],simde_mm_xor_si128(c2[486],simde_mm_xor_si128(c2[285],simde_mm_xor_si128(c2[2286],simde_mm_xor_si128(c2[2880],simde_mm_xor_si128(c2[1682],simde_mm_xor_si128(c2[1704],simde_mm_xor_si128(c2[506],simde_mm_xor_si128(c2[2103],simde_mm_xor_si128(c2[905],simde_mm_xor_si128(c2[1724],simde_mm_xor_si128(c2[726],simde_mm_xor_si128(c2[526],simde_mm_xor_si128(c2[120],simde_mm_xor_si128(c2[2121],simde_mm_xor_si128(c2[925],simde_mm_xor_si128(c2[2926],simde_mm_xor_si128(c2[1744],simde_mm_xor_si128(c2[546],simde_mm_xor_si128(c2[348],simde_mm_xor_si128(c2[2349],simde_mm_xor_si128(c2[1764],simde_mm_xor_si128(c2[566],simde_mm_xor_si128(c2[961],simde_mm_xor_si128(c2[2962],simde_mm_xor_si128(c2[2767],simde_mm_xor_si128(c2[1569],simde_mm_xor_si128(c2[1784],simde_mm_xor_si128(c2[786],simde_mm_xor_si128(c2[586],simde_mm_xor_si128(c2[3187],simde_mm_xor_si128(c2[1989],simde_mm_xor_si128(c2[1387],c2[189]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 16
+     d2[160]=simde_mm_xor_si128(c2[3006],simde_mm_xor_si128(c2[2806],simde_mm_xor_si128(c2[3008],simde_mm_xor_si128(c2[2808],simde_mm_xor_si128(c2[1405],simde_mm_xor_si128(c2[1607],simde_mm_xor_si128(c2[1407],simde_mm_xor_si128(c2[408],simde_mm_xor_si128(c2[400],simde_mm_xor_si128(c2[3026],simde_mm_xor_si128(c2[2826],simde_mm_xor_si128(c2[3028],simde_mm_xor_si128(c2[2828],simde_mm_xor_si128(c2[2422],simde_mm_xor_si128(c2[2424],simde_mm_xor_si128(c2[829],simde_mm_xor_si128(c2[821],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[3046],simde_mm_xor_si128(c2[2846],simde_mm_xor_si128(c2[3048],simde_mm_xor_si128(c2[2848],simde_mm_xor_si128(c2[2647],simde_mm_xor_si128(c2[2649],simde_mm_xor_si128(c2[3066],simde_mm_xor_si128(c2[2866],simde_mm_xor_si128(c2[3068],simde_mm_xor_si128(c2[2868],simde_mm_xor_si128(c2[2064],simde_mm_xor_si128(c2[2266],simde_mm_xor_si128(c2[2066],simde_mm_xor_si128(c2[263],simde_mm_xor_si128(c2[265],simde_mm_xor_si128(c2[2886],simde_mm_xor_si128(c2[3088],simde_mm_xor_si128(c2[2888],simde_mm_xor_si128(c2[1487],simde_mm_xor_si128(c2[1489],simde_mm_xor_si128(c2[883],simde_mm_xor_si128(c2[885],simde_mm_xor_si128(c2[2906],simde_mm_xor_si128(c2[3108],simde_mm_xor_si128(c2[2908],simde_mm_xor_si128(c2[106],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[3126],simde_mm_xor_si128(c2[2926],simde_mm_xor_si128(c2[3128],simde_mm_xor_si128(c2[2928],simde_mm_xor_si128(c2[1322],simde_mm_xor_si128(c2[1524],simde_mm_xor_si128(c2[1324],simde_mm_xor_si128(c2[2127],simde_mm_xor_si128(c2[2129],simde_mm_xor_si128(c2[2946],simde_mm_xor_si128(c2[3148],simde_mm_xor_si128(c2[2948],simde_mm_xor_si128(c2[1540],simde_mm_xor_si128(c2[1542],simde_mm_xor_si128(c2[2966],simde_mm_xor_si128(c2[3168],simde_mm_xor_si128(c2[2968],simde_mm_xor_si128(c2[2163],simde_mm_xor_si128(c2[2165],simde_mm_xor_si128(c2[760],simde_mm_xor_si128(c2[762],simde_mm_xor_si128(c2[3186],simde_mm_xor_si128(c2[2986],simde_mm_xor_si128(c2[3188],simde_mm_xor_si128(c2[2988],simde_mm_xor_si128(c2[1180],simde_mm_xor_si128(c2[1382],simde_mm_xor_si128(c2[1182],simde_mm_xor_si128(c2[2589],simde_mm_xor_si128(c2[2581],c2[3184])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 17
+     d2[170]=simde_mm_xor_si128(c2[2606],simde_mm_xor_si128(c2[2406],simde_mm_xor_si128(c2[401],simde_mm_xor_si128(c2[201],simde_mm_xor_si128(c2[1005],simde_mm_xor_si128(c2[2209],simde_mm_xor_si128(c2[2009],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[1002],simde_mm_xor_si128(c2[2626],simde_mm_xor_si128(c2[2426],simde_mm_xor_si128(c2[421],simde_mm_xor_si128(c2[221],simde_mm_xor_si128(c2[2022],simde_mm_xor_si128(c2[3026],simde_mm_xor_si128(c2[429],simde_mm_xor_si128(c2[1423],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[2646],simde_mm_xor_si128(c2[2446],simde_mm_xor_si128(c2[441],simde_mm_xor_si128(c2[241],simde_mm_xor_si128(c2[2247],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[2666],simde_mm_xor_si128(c2[2466],simde_mm_xor_si128(c2[461],simde_mm_xor_si128(c2[261],simde_mm_xor_si128(c2[1664],simde_mm_xor_si128(c2[2868],simde_mm_xor_si128(c2[2668],simde_mm_xor_si128(c2[3062],simde_mm_xor_si128(c2[867],simde_mm_xor_si128(c2[2486],simde_mm_xor_si128(c2[481],simde_mm_xor_si128(c2[281],simde_mm_xor_si128(c2[1087],simde_mm_xor_si128(c2[2081],simde_mm_xor_si128(c2[483],simde_mm_xor_si128(c2[1487],simde_mm_xor_si128(c2[2506],simde_mm_xor_si128(c2[501],simde_mm_xor_si128(c2[301],simde_mm_xor_si128(c2[2905],simde_mm_xor_si128(c2[700],simde_mm_xor_si128(c2[1701],simde_mm_xor_si128(c2[2726],simde_mm_xor_si128(c2[2526],simde_mm_xor_si128(c2[521],simde_mm_xor_si128(c2[321],simde_mm_xor_si128(c2[922],simde_mm_xor_si128(c2[2126],simde_mm_xor_si128(c2[1926],simde_mm_xor_si128(c2[1727],simde_mm_xor_si128(c2[2721],simde_mm_xor_si128(c2[2546],simde_mm_xor_si128(c2[541],simde_mm_xor_si128(c2[341],simde_mm_xor_si128(c2[1140],simde_mm_xor_si128(c2[2144],simde_mm_xor_si128(c2[2566],simde_mm_xor_si128(c2[561],simde_mm_xor_si128(c2[361],simde_mm_xor_si128(c2[1763],simde_mm_xor_si128(c2[2767],simde_mm_xor_si128(c2[360],simde_mm_xor_si128(c2[1364],simde_mm_xor_si128(c2[2786],simde_mm_xor_si128(c2[2586],simde_mm_xor_si128(c2[581],simde_mm_xor_si128(c2[381],simde_mm_xor_si128(c2[780],simde_mm_xor_si128(c2[1984],simde_mm_xor_si128(c2[1784],simde_mm_xor_si128(c2[2189],c2[3183])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 18
+     d2[180]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[521],c2[1345]));
+
+//row: 19
+     d2[190]=simde_mm_xor_si128(c2[1809],simde_mm_xor_si128(c2[408],simde_mm_xor_si128(c2[2600],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[1829],simde_mm_xor_si128(c2[1425],simde_mm_xor_si128(c2[3021],simde_mm_xor_si128(c2[2829],simde_mm_xor_si128(c2[1849],simde_mm_xor_si128(c2[1640],simde_mm_xor_si128(c2[1869],simde_mm_xor_si128(c2[1067],simde_mm_xor_si128(c2[2465],simde_mm_xor_si128(c2[1889],simde_mm_xor_si128(c2[480],simde_mm_xor_si128(c2[3085],simde_mm_xor_si128(c2[1909],simde_mm_xor_si128(c2[2308],simde_mm_xor_si128(c2[1929],simde_mm_xor_si128(c2[325],simde_mm_xor_si128(c2[1120],simde_mm_xor_si128(c2[1949],simde_mm_xor_si128(c2[543],simde_mm_xor_si128(c2[1969],simde_mm_xor_si128(c2[1166],simde_mm_xor_si128(c2[2962],simde_mm_xor_si128(c2[1989],simde_mm_xor_si128(c2[183],c2[1582]))))))))))))))))))))))))))));
+
+//row: 20
+     d2[200]=simde_mm_xor_si128(c2[1606],simde_mm_xor_si128(c2[1406],simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[2207],simde_mm_xor_si128(c2[1626],simde_mm_xor_si128(c2[1426],simde_mm_xor_si128(c2[1022],simde_mm_xor_si128(c2[2628],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[1646],simde_mm_xor_si128(c2[1446],simde_mm_xor_si128(c2[1247],simde_mm_xor_si128(c2[1666],simde_mm_xor_si128(c2[1466],simde_mm_xor_si128(c2[664],simde_mm_xor_si128(c2[2062],simde_mm_xor_si128(c2[1486],simde_mm_xor_si128(c2[87],simde_mm_xor_si128(c2[2682],simde_mm_xor_si128(c2[889],simde_mm_xor_si128(c2[1506],simde_mm_xor_si128(c2[1905],simde_mm_xor_si128(c2[1726],simde_mm_xor_si128(c2[1526],simde_mm_xor_si128(c2[3121],simde_mm_xor_si128(c2[727],simde_mm_xor_si128(c2[1546],simde_mm_xor_si128(c2[140],simde_mm_xor_si128(c2[1566],simde_mm_xor_si128(c2[763],simde_mm_xor_si128(c2[2569],simde_mm_xor_si128(c2[1786],simde_mm_xor_si128(c2[1586],simde_mm_xor_si128(c2[2989],c2[1189]))))))))))))))))))))))))))))))))));
+
+//row: 21
+     d2[210]=simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[1800],simde_mm_xor_si128(c2[803],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[22],simde_mm_xor_si128(c2[2827],simde_mm_xor_si128(c2[1424],simde_mm_xor_si128(c2[1224],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[43],simde_mm_xor_si128(c2[3042],simde_mm_xor_si128(c2[62],simde_mm_xor_si128(c2[2469],simde_mm_xor_si128(c2[668],simde_mm_xor_si128(c2[82],simde_mm_xor_si128(c2[1882],simde_mm_xor_si128(c2[1488],simde_mm_xor_si128(c2[1288],simde_mm_xor_si128(c2[102],simde_mm_xor_si128(c2[701],simde_mm_xor_si128(c2[501],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[1727],simde_mm_xor_si128(c2[2722],simde_mm_xor_si128(c2[2522],simde_mm_xor_si128(c2[142],simde_mm_xor_si128(c2[2145],simde_mm_xor_si128(c2[1945],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[2568],simde_mm_xor_si128(c2[1365],simde_mm_xor_si128(c2[1165],simde_mm_xor_si128(c2[361],simde_mm_xor_si128(c2[182],simde_mm_xor_si128(c2[1585],simde_mm_xor_si128(c2[3184],c2[2984]))))))))))))))))))))))))))))))))))));
+
+//row: 22
+     d2[220]=simde_mm_xor_si128(c2[20],c2[840]);
+
+//row: 23
+     d2[230]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[2264],c2[2909]));
+
+//row: 24
+     d2[240]=simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[1044],c2[1585]));
+
+//row: 25
+     d2[250]=simde_mm_xor_si128(c2[0],c2[304]);
+
+//row: 26
+     d2[260]=simde_mm_xor_si128(c2[2600],simde_mm_xor_si128(c2[2400],simde_mm_xor_si128(c2[1200],simde_mm_xor_si128(c2[1209],simde_mm_xor_si128(c2[1009],simde_mm_xor_si128(c2[3008],simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[2001],simde_mm_xor_si128(c2[2620],simde_mm_xor_si128(c2[2420],simde_mm_xor_si128(c2[1220],simde_mm_xor_si128(c2[2026],simde_mm_xor_si128(c2[826],simde_mm_xor_si128(c2[423],simde_mm_xor_si128(c2[2622],simde_mm_xor_si128(c2[2422],simde_mm_xor_si128(c2[2640],simde_mm_xor_si128(c2[2440],simde_mm_xor_si128(c2[1240],simde_mm_xor_si128(c2[2241],simde_mm_xor_si128(c2[1241],simde_mm_xor_si128(c2[1041],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[2660],simde_mm_xor_si128(c2[2460],simde_mm_xor_si128(c2[1260],simde_mm_xor_si128(c2[1868],simde_mm_xor_si128(c2[1668],simde_mm_xor_si128(c2[468],simde_mm_xor_si128(c2[3066],simde_mm_xor_si128(c2[1866],simde_mm_xor_si128(c2[2680],simde_mm_xor_si128(c2[2480],simde_mm_xor_si128(c2[1280],simde_mm_xor_si128(c2[1081],simde_mm_xor_si128(c2[3080],simde_mm_xor_si128(c2[487],simde_mm_xor_si128(c2[2686],simde_mm_xor_si128(c2[2486],simde_mm_xor_si128(c2[2700],simde_mm_xor_si128(c2[2500],simde_mm_xor_si128(c2[1300],simde_mm_xor_si128(c2[2909],simde_mm_xor_si128(c2[1909],simde_mm_xor_si128(c2[1709],simde_mm_xor_si128(c2[2720],simde_mm_xor_si128(c2[2520],simde_mm_xor_si128(c2[1320],simde_mm_xor_si128(c2[1126],simde_mm_xor_si128(c2[926],simde_mm_xor_si128(c2[2925],simde_mm_xor_si128(c2[1721],simde_mm_xor_si128(c2[721],simde_mm_xor_si128(c2[521],simde_mm_xor_si128(c2[2740],simde_mm_xor_si128(c2[2540],simde_mm_xor_si128(c2[1340],simde_mm_xor_si128(c2[1144],simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[3143],simde_mm_xor_si128(c2[946],simde_mm_xor_si128(c2[2760],simde_mm_xor_si128(c2[2560],simde_mm_xor_si128(c2[1360],simde_mm_xor_si128(c2[1767],simde_mm_xor_si128(c2[567],simde_mm_xor_si128(c2[364],simde_mm_xor_si128(c2[2563],simde_mm_xor_si128(c2[2363],simde_mm_xor_si128(c2[2780],simde_mm_xor_si128(c2[2580],simde_mm_xor_si128(c2[1380],simde_mm_xor_si128(c2[984],simde_mm_xor_si128(c2[784],simde_mm_xor_si128(c2[2783],simde_mm_xor_si128(c2[2183],simde_mm_xor_si128(c2[1183],c2[983])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 27
+     d2[270]=simde_mm_xor_si128(c2[0],c2[122]);
+
+//row: 28
+     d2[280]=simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[2847],c2[2906]));
+
+//row: 29
+     d2[290]=simde_mm_xor_si128(c2[0],c2[2089]);
+
+//row: 30
+     d2[300]=simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[702],simde_mm_xor_si128(c2[743],c2[1388])));
+
+//row: 31
+     d2[310]=simde_mm_xor_si128(c2[601],simde_mm_xor_si128(c2[2409],simde_mm_xor_si128(c2[1402],simde_mm_xor_si128(c2[621],simde_mm_xor_si128(c2[227],simde_mm_xor_si128(c2[2023],simde_mm_xor_si128(c2[1823],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[641],simde_mm_xor_si128(c2[642],simde_mm_xor_si128(c2[442],simde_mm_xor_si128(c2[661],simde_mm_xor_si128(c2[3068],simde_mm_xor_si128(c2[1267],simde_mm_xor_si128(c2[681],simde_mm_xor_si128(c2[2481],simde_mm_xor_si128(c2[2087],simde_mm_xor_si128(c2[1887],simde_mm_xor_si128(c2[701],simde_mm_xor_si128(c2[1300],simde_mm_xor_si128(c2[1100],simde_mm_xor_si128(c2[721],simde_mm_xor_si128(c2[2326],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[3121],simde_mm_xor_si128(c2[741],simde_mm_xor_si128(c2[2744],simde_mm_xor_si128(c2[2544],simde_mm_xor_si128(c2[761],simde_mm_xor_si128(c2[3167],simde_mm_xor_si128(c2[1964],simde_mm_xor_si128(c2[1764],simde_mm_xor_si128(c2[781],simde_mm_xor_si128(c2[2184],simde_mm_xor_si128(c2[584],c2[384])))))))))))))))))))))))))))))))))));
+
+//row: 32
+     d2[320]=simde_mm_xor_si128(c2[2007],simde_mm_xor_si128(c2[1807],simde_mm_xor_si128(c2[606],simde_mm_xor_si128(c2[406],simde_mm_xor_si128(c2[2608],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[2027],simde_mm_xor_si128(c2[1827],simde_mm_xor_si128(c2[1423],simde_mm_xor_si128(c2[3029],simde_mm_xor_si128(c2[2047],simde_mm_xor_si128(c2[1847],simde_mm_xor_si128(c2[1648],simde_mm_xor_si128(c2[2067],simde_mm_xor_si128(c2[1867],simde_mm_xor_si128(c2[1265],simde_mm_xor_si128(c2[1065],simde_mm_xor_si128(c2[2463],simde_mm_xor_si128(c2[2087],simde_mm_xor_si128(c2[1887],simde_mm_xor_si128(c2[488],simde_mm_xor_si128(c2[3083],simde_mm_xor_si128(c2[2107],simde_mm_xor_si128(c2[1907],simde_mm_xor_si128(c2[2306],simde_mm_xor_si128(c2[901],simde_mm_xor_si128(c2[2127],simde_mm_xor_si128(c2[1927],simde_mm_xor_si128(c2[523],simde_mm_xor_si128(c2[323],simde_mm_xor_si128(c2[1128],simde_mm_xor_si128(c2[2147],simde_mm_xor_si128(c2[1947],simde_mm_xor_si128(c2[541],simde_mm_xor_si128(c2[2167],simde_mm_xor_si128(c2[1967],simde_mm_xor_si128(c2[1164],simde_mm_xor_si128(c2[2960],simde_mm_xor_si128(c2[2187],simde_mm_xor_si128(c2[1987],simde_mm_xor_si128(c2[381],simde_mm_xor_si128(c2[181],c2[1580]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 33
+     d2[330]=simde_mm_xor_si128(c2[2400],simde_mm_xor_si128(c2[1009],simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[2420],simde_mm_xor_si128(c2[2026],simde_mm_xor_si128(c2[423],simde_mm_xor_si128(c2[2440],simde_mm_xor_si128(c2[2241],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[2460],simde_mm_xor_si128(c2[1668],simde_mm_xor_si128(c2[3066],simde_mm_xor_si128(c2[2480],simde_mm_xor_si128(c2[1081],simde_mm_xor_si128(c2[487],simde_mm_xor_si128(c2[2500],simde_mm_xor_si128(c2[2909],simde_mm_xor_si128(c2[2520],simde_mm_xor_si128(c2[926],simde_mm_xor_si128(c2[1721],simde_mm_xor_si128(c2[2540],simde_mm_xor_si128(c2[1144],simde_mm_xor_si128(c2[1745],simde_mm_xor_si128(c2[2560],simde_mm_xor_si128(c2[1767],simde_mm_xor_si128(c2[364],simde_mm_xor_si128(c2[2580],simde_mm_xor_si128(c2[784],c2[2183]))))))))))))))))))))))))))));
+
+//row: 34
+     d2[340]=simde_mm_xor_si128(c2[601],simde_mm_xor_si128(c2[401],simde_mm_xor_si128(c2[2604],simde_mm_xor_si128(c2[2409],simde_mm_xor_si128(c2[2209],simde_mm_xor_si128(c2[1203],simde_mm_xor_si128(c2[1202],simde_mm_xor_si128(c2[206],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[621],simde_mm_xor_si128(c2[421],simde_mm_xor_si128(c2[2624],simde_mm_xor_si128(c2[27],simde_mm_xor_si128(c2[2220],simde_mm_xor_si128(c2[1623],simde_mm_xor_si128(c2[827],simde_mm_xor_si128(c2[627],simde_mm_xor_si128(c2[641],simde_mm_xor_si128(c2[441],simde_mm_xor_si128(c2[2644],simde_mm_xor_si128(c2[242],simde_mm_xor_si128(c2[2645],simde_mm_xor_si128(c2[2445],simde_mm_xor_si128(c2[661],simde_mm_xor_si128(c2[461],simde_mm_xor_si128(c2[2664],simde_mm_xor_si128(c2[3068],simde_mm_xor_si128(c2[2868],simde_mm_xor_si128(c2[1862],simde_mm_xor_si128(c2[1067],simde_mm_xor_si128(c2[61],simde_mm_xor_si128(c2[681],simde_mm_xor_si128(c2[481],simde_mm_xor_si128(c2[2684],simde_mm_xor_si128(c2[2281],simde_mm_xor_si128(c2[1285],simde_mm_xor_si128(c2[1687],simde_mm_xor_si128(c2[881],simde_mm_xor_si128(c2[681],simde_mm_xor_si128(c2[701],simde_mm_xor_si128(c2[501],simde_mm_xor_si128(c2[2704],simde_mm_xor_si128(c2[900],simde_mm_xor_si128(c2[104],simde_mm_xor_si128(c2[3103],simde_mm_xor_si128(c2[721],simde_mm_xor_si128(c2[521],simde_mm_xor_si128(c2[2724],simde_mm_xor_si128(c2[2326],simde_mm_xor_si128(c2[2126],simde_mm_xor_si128(c2[1120],simde_mm_xor_si128(c2[2921],simde_mm_xor_si128(c2[2125],simde_mm_xor_si128(c2[1925],simde_mm_xor_si128(c2[741],simde_mm_xor_si128(c2[541],simde_mm_xor_si128(c2[2744],simde_mm_xor_si128(c2[2344],simde_mm_xor_si128(c2[1548],simde_mm_xor_si128(c2[1348],simde_mm_xor_si128(c2[761],simde_mm_xor_si128(c2[561],simde_mm_xor_si128(c2[2764],simde_mm_xor_si128(c2[2967],simde_mm_xor_si128(c2[1961],simde_mm_xor_si128(c2[1564],simde_mm_xor_si128(c2[768],simde_mm_xor_si128(c2[568],simde_mm_xor_si128(c2[781],simde_mm_xor_si128(c2[581],simde_mm_xor_si128(c2[2784],simde_mm_xor_si128(c2[2184],simde_mm_xor_si128(c2[1984],simde_mm_xor_si128(c2[988],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[2587],c2[2387]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 35
+     d2[350]=simde_mm_xor_si128(c2[1200],simde_mm_xor_si128(c2[1000],simde_mm_xor_si128(c2[2808],simde_mm_xor_si128(c2[1801],simde_mm_xor_si128(c2[1220],simde_mm_xor_si128(c2[1020],simde_mm_xor_si128(c2[626],simde_mm_xor_si128(c2[2222],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[1240],simde_mm_xor_si128(c2[1040],simde_mm_xor_si128(c2[841],simde_mm_xor_si128(c2[1260],simde_mm_xor_si128(c2[1060],simde_mm_xor_si128(c2[268],simde_mm_xor_si128(c2[1666],simde_mm_xor_si128(c2[1080],simde_mm_xor_si128(c2[2880],simde_mm_xor_si128(c2[2286],simde_mm_xor_si128(c2[1100],simde_mm_xor_si128(c2[1509],simde_mm_xor_si128(c2[2709],simde_mm_xor_si128(c2[1320],simde_mm_xor_si128(c2[1120],simde_mm_xor_si128(c2[2725],simde_mm_xor_si128(c2[321],simde_mm_xor_si128(c2[1140],simde_mm_xor_si128(c2[2943],simde_mm_xor_si128(c2[1160],simde_mm_xor_si128(c2[367],simde_mm_xor_si128(c2[2163],simde_mm_xor_si128(c2[1380],simde_mm_xor_si128(c2[1180],simde_mm_xor_si128(c2[2583],c2[783]))))))))))))))))))))))))))))))))));
+
+//row: 36
+     d2[360]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[3043],c2[545]));
+
+//row: 37
+     d2[370]=simde_mm_xor_si128(c2[3009],simde_mm_xor_si128(c2[3008],simde_mm_xor_si128(c2[1608],simde_mm_xor_si128(c2[1607],simde_mm_xor_si128(c2[601],simde_mm_xor_si128(c2[600],simde_mm_xor_si128(c2[3029],simde_mm_xor_si128(c2[3028],simde_mm_xor_si128(c2[2625],simde_mm_xor_si128(c2[2624],simde_mm_xor_si128(c2[1022],simde_mm_xor_si128(c2[1221],simde_mm_xor_si128(c2[1021],simde_mm_xor_si128(c2[3049],simde_mm_xor_si128(c2[3048],simde_mm_xor_si128(c2[2840],simde_mm_xor_si128(c2[3049],simde_mm_xor_si128(c2[2849],simde_mm_xor_si128(c2[3069],simde_mm_xor_si128(c2[3068],simde_mm_xor_si128(c2[2267],simde_mm_xor_si128(c2[2266],simde_mm_xor_si128(c2[466],simde_mm_xor_si128(c2[465],simde_mm_xor_si128(c2[3089],simde_mm_xor_si128(c2[3088],simde_mm_xor_si128(c2[1680],simde_mm_xor_si128(c2[1689],simde_mm_xor_si128(c2[1086],simde_mm_xor_si128(c2[1285],simde_mm_xor_si128(c2[1085],simde_mm_xor_si128(c2[3109],simde_mm_xor_si128(c2[3108],simde_mm_xor_si128(c2[309],simde_mm_xor_si128(c2[508],simde_mm_xor_si128(c2[308],simde_mm_xor_si128(c2[3129],simde_mm_xor_si128(c2[3128],simde_mm_xor_si128(c2[1525],simde_mm_xor_si128(c2[1524],simde_mm_xor_si128(c2[2320],simde_mm_xor_si128(c2[2529],simde_mm_xor_si128(c2[2329],simde_mm_xor_si128(c2[3149],simde_mm_xor_si128(c2[3148],simde_mm_xor_si128(c2[1743],simde_mm_xor_si128(c2[1942],simde_mm_xor_si128(c2[1742],simde_mm_xor_si128(c2[3169],simde_mm_xor_si128(c2[3168],simde_mm_xor_si128(c2[2366],simde_mm_xor_si128(c2[2365],simde_mm_xor_si128(c2[963],simde_mm_xor_si128(c2[1162],simde_mm_xor_si128(c2[962],simde_mm_xor_si128(c2[3189],simde_mm_xor_si128(c2[3188],simde_mm_xor_si128(c2[1383],simde_mm_xor_si128(c2[1382],simde_mm_xor_si128(c2[2782],simde_mm_xor_si128(c2[2981],c2[2781])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 38
+     d2[380]=simde_mm_xor_si128(c2[601],simde_mm_xor_si128(c2[401],simde_mm_xor_si128(c2[2209],simde_mm_xor_si128(c2[1202],simde_mm_xor_si128(c2[621],simde_mm_xor_si128(c2[421],simde_mm_xor_si128(c2[27],simde_mm_xor_si128(c2[1623],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[641],simde_mm_xor_si128(c2[441],simde_mm_xor_si128(c2[242],simde_mm_xor_si128(c2[661],simde_mm_xor_si128(c2[461],simde_mm_xor_si128(c2[2868],simde_mm_xor_si128(c2[1067],simde_mm_xor_si128(c2[481],simde_mm_xor_si128(c2[2281],simde_mm_xor_si128(c2[1687],simde_mm_xor_si128(c2[501],simde_mm_xor_si128(c2[900],simde_mm_xor_si128(c2[2705],simde_mm_xor_si128(c2[721],simde_mm_xor_si128(c2[521],simde_mm_xor_si128(c2[2126],simde_mm_xor_si128(c2[2921],simde_mm_xor_si128(c2[541],simde_mm_xor_si128(c2[2344],simde_mm_xor_si128(c2[561],simde_mm_xor_si128(c2[2967],simde_mm_xor_si128(c2[1564],simde_mm_xor_si128(c2[781],simde_mm_xor_si128(c2[581],simde_mm_xor_si128(c2[1984],c2[184]))))))))))))))))))))))))))))))))));
+
+//row: 39
+     d2[390]=simde_mm_xor_si128(c2[2008],simde_mm_xor_si128(c2[1808],simde_mm_xor_si128(c2[607],simde_mm_xor_si128(c2[407],simde_mm_xor_si128(c2[2609],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[2028],simde_mm_xor_si128(c2[1828],simde_mm_xor_si128(c2[1424],simde_mm_xor_si128(c2[3020],simde_mm_xor_si128(c2[2048],simde_mm_xor_si128(c2[1848],simde_mm_xor_si128(c2[1649],simde_mm_xor_si128(c2[2068],simde_mm_xor_si128(c2[1868],simde_mm_xor_si128(c2[1266],simde_mm_xor_si128(c2[1066],simde_mm_xor_si128(c2[2464],simde_mm_xor_si128(c2[2088],simde_mm_xor_si128(c2[1888],simde_mm_xor_si128(c2[489],simde_mm_xor_si128(c2[3084],simde_mm_xor_si128(c2[2108],simde_mm_xor_si128(c2[1908],simde_mm_xor_si128(c2[2307],simde_mm_xor_si128(c2[2128],simde_mm_xor_si128(c2[1928],simde_mm_xor_si128(c2[524],simde_mm_xor_si128(c2[324],simde_mm_xor_si128(c2[1129],simde_mm_xor_si128(c2[2148],simde_mm_xor_si128(c2[1948],simde_mm_xor_si128(c2[542],simde_mm_xor_si128(c2[1741],simde_mm_xor_si128(c2[2168],simde_mm_xor_si128(c2[1968],simde_mm_xor_si128(c2[1165],simde_mm_xor_si128(c2[2961],simde_mm_xor_si128(c2[2188],simde_mm_xor_si128(c2[1988],simde_mm_xor_si128(c2[382],simde_mm_xor_si128(c2[182],c2[1581]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 40
+     d2[400]=simde_mm_xor_si128(c2[602],simde_mm_xor_si128(c2[2808],simde_mm_xor_si128(c2[2400],simde_mm_xor_si128(c2[1407],simde_mm_xor_si128(c2[1403],simde_mm_xor_si128(c2[400],simde_mm_xor_si128(c2[622],simde_mm_xor_si128(c2[2828],simde_mm_xor_si128(c2[228],simde_mm_xor_si128(c2[2424],simde_mm_xor_si128(c2[1824],simde_mm_xor_si128(c2[1021],simde_mm_xor_si128(c2[821],simde_mm_xor_si128(c2[642],simde_mm_xor_si128(c2[2848],simde_mm_xor_si128(c2[443],simde_mm_xor_si128(c2[2849],simde_mm_xor_si128(c2[2649],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[662],simde_mm_xor_si128(c2[2868],simde_mm_xor_si128(c2[3069],simde_mm_xor_si128(c2[2066],simde_mm_xor_si128(c2[1268],simde_mm_xor_si128(c2[265],simde_mm_xor_si128(c2[682],simde_mm_xor_si128(c2[2888],simde_mm_xor_si128(c2[2482],simde_mm_xor_si128(c2[1489],simde_mm_xor_si128(c2[1888],simde_mm_xor_si128(c2[1085],simde_mm_xor_si128(c2[885],simde_mm_xor_si128(c2[702],simde_mm_xor_si128(c2[2908],simde_mm_xor_si128(c2[1101],simde_mm_xor_si128(c2[308],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[722],simde_mm_xor_si128(c2[2928],simde_mm_xor_si128(c2[2327],simde_mm_xor_si128(c2[1324],simde_mm_xor_si128(c2[3122],simde_mm_xor_si128(c2[2329],simde_mm_xor_si128(c2[2129],simde_mm_xor_si128(c2[742],simde_mm_xor_si128(c2[2948],simde_mm_xor_si128(c2[2545],simde_mm_xor_si128(c2[1742],simde_mm_xor_si128(c2[1542],simde_mm_xor_si128(c2[762],simde_mm_xor_si128(c2[2968],simde_mm_xor_si128(c2[3168],simde_mm_xor_si128(c2[2165],simde_mm_xor_si128(c2[1765],simde_mm_xor_si128(c2[962],simde_mm_xor_si128(c2[762],simde_mm_xor_si128(c2[782],simde_mm_xor_si128(c2[2988],simde_mm_xor_si128(c2[2185],simde_mm_xor_si128(c2[1182],simde_mm_xor_si128(c2[385],simde_mm_xor_si128(c2[2781],c2[2581]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 41
+     d2[410]=simde_mm_xor_si128(c2[1403],simde_mm_xor_si128(c2[1203],simde_mm_xor_si128(c2[3001],simde_mm_xor_si128(c2[2004],simde_mm_xor_si128(c2[1423],simde_mm_xor_si128(c2[1223],simde_mm_xor_si128(c2[829],simde_mm_xor_si128(c2[2425],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[1443],simde_mm_xor_si128(c2[1243],simde_mm_xor_si128(c2[1044],simde_mm_xor_si128(c2[1463],simde_mm_xor_si128(c2[1263],simde_mm_xor_si128(c2[461],simde_mm_xor_si128(c2[1869],simde_mm_xor_si128(c2[1283],simde_mm_xor_si128(c2[3083],simde_mm_xor_si128(c2[2489],simde_mm_xor_si128(c2[1303],simde_mm_xor_si128(c2[1702],simde_mm_xor_si128(c2[500],simde_mm_xor_si128(c2[1523],simde_mm_xor_si128(c2[1323],simde_mm_xor_si128(c2[2928],simde_mm_xor_si128(c2[524],simde_mm_xor_si128(c2[1343],simde_mm_xor_si128(c2[3146],simde_mm_xor_si128(c2[1363],simde_mm_xor_si128(c2[560],simde_mm_xor_si128(c2[2366],simde_mm_xor_si128(c2[1583],simde_mm_xor_si128(c2[1383],simde_mm_xor_si128(c2[2786],c2[986]))))))))))))))))))))))))))))))))));
+  }
+}
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc16_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc16_byte.c
index 7fb64cfc4cc96c35cf31361c4575f9663691e5c3..f56459b85703d40c9a21ef9cec6014230f0ee195 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc16_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc16_byte.c
@@ -1,9 +1,8 @@
 #include "PHY/sse_intrin.h"
 // generated code for Zc=16, byte encoding
 static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG2_Zc16_byte(uint8_t *c,uint8_t *d) {
-  __m128i *csimd=(__m128i *)c,*dsimd=(__m128i *)d;
-
-  __m128i *c2,*d2;
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
+  simde__m128i *c2,*d2;
 
   int i2;
   for (i2=0; i2<1; i2++) {
@@ -11,129 +10,129 @@ static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG
      d2=&dsimd[i2];
 
 //row: 0
-     d2[0]=_mm_xor_si128(c2[96],_mm_xor_si128(c2[72],_mm_xor_si128(c2[0],_mm_xor_si128(c2[50],_mm_xor_si128(c2[14],_mm_xor_si128(c2[86],_mm_xor_si128(c2[136],_mm_xor_si128(c2[112],_mm_xor_si128(c2[114],_mm_xor_si128(c2[66],_mm_xor_si128(c2[138],_mm_xor_si128(c2[152],_mm_xor_si128(c2[44],_mm_xor_si128(c2[164],_mm_xor_si128(c2[154],_mm_xor_si128(c2[94],_mm_xor_si128(c2[156],_mm_xor_si128(c2[24],_mm_xor_si128(c2[12],_mm_xor_si128(c2[146],_mm_xor_si128(c2[74],_mm_xor_si128(c2[196],_mm_xor_si128(c2[196],_mm_xor_si128(c2[28],_mm_xor_si128(c2[162],_mm_xor_si128(c2[150],c2[198]))))))))))))))))))))))))));
+     d2[0]=simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[50],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[114],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[138],simde_mm_xor_si128(c2[152],simde_mm_xor_si128(c2[44],simde_mm_xor_si128(c2[164],simde_mm_xor_si128(c2[154],simde_mm_xor_si128(c2[94],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[74],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[150],c2[198]))))))))))))))))))))))))));
 
 //row: 1
-     d2[1]=_mm_xor_si128(c2[108],_mm_xor_si128(c2[96],_mm_xor_si128(c2[72],_mm_xor_si128(c2[0],_mm_xor_si128(c2[62],_mm_xor_si128(c2[50],_mm_xor_si128(c2[14],_mm_xor_si128(c2[86],_mm_xor_si128(c2[148],_mm_xor_si128(c2[136],_mm_xor_si128(c2[112],_mm_xor_si128(c2[126],_mm_xor_si128(c2[114],_mm_xor_si128(c2[66],_mm_xor_si128(c2[138],_mm_xor_si128(c2[152],_mm_xor_si128(c2[44],_mm_xor_si128(c2[164],_mm_xor_si128(c2[154],_mm_xor_si128(c2[94],_mm_xor_si128(c2[168],_mm_xor_si128(c2[156],_mm_xor_si128(c2[24],_mm_xor_si128(c2[12],_mm_xor_si128(c2[146],_mm_xor_si128(c2[74],_mm_xor_si128(c2[196],_mm_xor_si128(c2[196],_mm_xor_si128(c2[28],_mm_xor_si128(c2[174],_mm_xor_si128(c2[162],_mm_xor_si128(c2[150],c2[198]))))))))))))))))))))))))))))))));
+     d2[1]=simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[62],simde_mm_xor_si128(c2[50],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[148],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[126],simde_mm_xor_si128(c2[114],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[138],simde_mm_xor_si128(c2[152],simde_mm_xor_si128(c2[44],simde_mm_xor_si128(c2[164],simde_mm_xor_si128(c2[154],simde_mm_xor_si128(c2[94],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[74],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[174],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[150],c2[198]))))))))))))))))))))))))))))))));
 
 //row: 2
-     d2[2]=_mm_xor_si128(c2[108],_mm_xor_si128(c2[96],_mm_xor_si128(c2[84],_mm_xor_si128(c2[72],_mm_xor_si128(c2[0],_mm_xor_si128(c2[62],_mm_xor_si128(c2[50],_mm_xor_si128(c2[14],_mm_xor_si128(c2[86],_mm_xor_si128(c2[148],_mm_xor_si128(c2[136],_mm_xor_si128(c2[112],_mm_xor_si128(c2[126],_mm_xor_si128(c2[114],_mm_xor_si128(c2[78],_mm_xor_si128(c2[66],_mm_xor_si128(c2[138],_mm_xor_si128(c2[164],_mm_xor_si128(c2[152],_mm_xor_si128(c2[44],_mm_xor_si128(c2[164],_mm_xor_si128(c2[166],_mm_xor_si128(c2[154],_mm_xor_si128(c2[94],_mm_xor_si128(c2[168],_mm_xor_si128(c2[156],_mm_xor_si128(c2[36],_mm_xor_si128(c2[24],_mm_xor_si128(c2[12],_mm_xor_si128(c2[158],_mm_xor_si128(c2[146],_mm_xor_si128(c2[74],_mm_xor_si128(c2[16],_mm_xor_si128(c2[196],_mm_xor_si128(c2[196],_mm_xor_si128(c2[28],_mm_xor_si128(c2[174],_mm_xor_si128(c2[162],_mm_xor_si128(c2[162],_mm_xor_si128(c2[150],c2[198]))))))))))))))))))))))))))))))))))))))));
+     d2[2]=simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[62],simde_mm_xor_si128(c2[50],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[148],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[126],simde_mm_xor_si128(c2[114],simde_mm_xor_si128(c2[78],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[138],simde_mm_xor_si128(c2[164],simde_mm_xor_si128(c2[152],simde_mm_xor_si128(c2[44],simde_mm_xor_si128(c2[164],simde_mm_xor_si128(c2[166],simde_mm_xor_si128(c2[154],simde_mm_xor_si128(c2[94],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[158],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[74],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[174],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[150],c2[198]))))))))))))))))))))))))))))))))))))))));
 
 //row: 3
-     d2[3]=_mm_xor_si128(c2[96],_mm_xor_si128(c2[72],_mm_xor_si128(c2[0],_mm_xor_si128(c2[50],_mm_xor_si128(c2[14],_mm_xor_si128(c2[98],_mm_xor_si128(c2[86],_mm_xor_si128(c2[136],_mm_xor_si128(c2[124],_mm_xor_si128(c2[112],_mm_xor_si128(c2[114],_mm_xor_si128(c2[66],_mm_xor_si128(c2[138],_mm_xor_si128(c2[152],_mm_xor_si128(c2[44],_mm_xor_si128(c2[176],_mm_xor_si128(c2[164],_mm_xor_si128(c2[154],_mm_xor_si128(c2[106],_mm_xor_si128(c2[94],_mm_xor_si128(c2[156],_mm_xor_si128(c2[24],_mm_xor_si128(c2[24],_mm_xor_si128(c2[12],_mm_xor_si128(c2[146],_mm_xor_si128(c2[86],_mm_xor_si128(c2[74],_mm_xor_si128(c2[196],_mm_xor_si128(c2[196],_mm_xor_si128(c2[40],_mm_xor_si128(c2[28],_mm_xor_si128(c2[162],_mm_xor_si128(c2[150],_mm_xor_si128(c2[18],c2[198]))))))))))))))))))))))))))))))))));
+     d2[3]=simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[50],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[98],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[124],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[114],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[138],simde_mm_xor_si128(c2[152],simde_mm_xor_si128(c2[44],simde_mm_xor_si128(c2[176],simde_mm_xor_si128(c2[164],simde_mm_xor_si128(c2[154],simde_mm_xor_si128(c2[106],simde_mm_xor_si128(c2[94],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[74],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[150],simde_mm_xor_si128(c2[18],c2[198]))))))))))))))))))))))))))))))))));
 
 //row: 4
-     d2[4]=_mm_xor_si128(c2[0],_mm_xor_si128(c2[180],_mm_xor_si128(c2[156],_mm_xor_si128(c2[84],_mm_xor_si128(c2[36],_mm_xor_si128(c2[146],_mm_xor_si128(c2[134],_mm_xor_si128(c2[98],_mm_xor_si128(c2[170],_mm_xor_si128(c2[74],_mm_xor_si128(c2[40],_mm_xor_si128(c2[28],_mm_xor_si128(c2[4],_mm_xor_si128(c2[18],_mm_xor_si128(c2[6],_mm_xor_si128(c2[150],_mm_xor_si128(c2[30],_mm_xor_si128(c2[44],_mm_xor_si128(c2[128],_mm_xor_si128(c2[56],_mm_xor_si128(c2[46],_mm_xor_si128(c2[178],_mm_xor_si128(c2[60],_mm_xor_si128(c2[48],_mm_xor_si128(c2[108],_mm_xor_si128(c2[96],_mm_xor_si128(c2[38],_mm_xor_si128(c2[158],_mm_xor_si128(c2[88],_mm_xor_si128(c2[88],_mm_xor_si128(c2[112],_mm_xor_si128(c2[66],_mm_xor_si128(c2[54],_mm_xor_si128(c2[42],c2[90]))))))))))))))))))))))))))))))))));
+     d2[4]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[134],simde_mm_xor_si128(c2[98],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[74],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[4],simde_mm_xor_si128(c2[18],simde_mm_xor_si128(c2[6],simde_mm_xor_si128(c2[150],simde_mm_xor_si128(c2[30],simde_mm_xor_si128(c2[44],simde_mm_xor_si128(c2[128],simde_mm_xor_si128(c2[56],simde_mm_xor_si128(c2[46],simde_mm_xor_si128(c2[178],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[38],simde_mm_xor_si128(c2[158],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[42],c2[90]))))))))))))))))))))))))))))))))));
 
 //row: 5
-     d2[5]=_mm_xor_si128(c2[0],_mm_xor_si128(c2[180],_mm_xor_si128(c2[156],_mm_xor_si128(c2[84],_mm_xor_si128(c2[84],_mm_xor_si128(c2[146],_mm_xor_si128(c2[134],_mm_xor_si128(c2[98],_mm_xor_si128(c2[170],_mm_xor_si128(c2[110],_mm_xor_si128(c2[40],_mm_xor_si128(c2[28],_mm_xor_si128(c2[4],_mm_xor_si128(c2[18],_mm_xor_si128(c2[6],_mm_xor_si128(c2[150],_mm_xor_si128(c2[30],_mm_xor_si128(c2[44],_mm_xor_si128(c2[128],_mm_xor_si128(c2[56],_mm_xor_si128(c2[46],_mm_xor_si128(c2[178],_mm_xor_si128(c2[34],_mm_xor_si128(c2[60],_mm_xor_si128(c2[48],_mm_xor_si128(c2[108],_mm_xor_si128(c2[96],_mm_xor_si128(c2[38],_mm_xor_si128(c2[158],_mm_xor_si128(c2[194],_mm_xor_si128(c2[88],_mm_xor_si128(c2[88],_mm_xor_si128(c2[112],_mm_xor_si128(c2[66],_mm_xor_si128(c2[54],_mm_xor_si128(c2[42],c2[90]))))))))))))))))))))))))))))))))))));
+     d2[5]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[134],simde_mm_xor_si128(c2[98],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[110],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[4],simde_mm_xor_si128(c2[18],simde_mm_xor_si128(c2[6],simde_mm_xor_si128(c2[150],simde_mm_xor_si128(c2[30],simde_mm_xor_si128(c2[44],simde_mm_xor_si128(c2[128],simde_mm_xor_si128(c2[56],simde_mm_xor_si128(c2[46],simde_mm_xor_si128(c2[178],simde_mm_xor_si128(c2[34],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[38],simde_mm_xor_si128(c2[158],simde_mm_xor_si128(c2[194],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[42],c2[90]))))))))))))))))))))))))))))))))))));
 
 //row: 6
-     d2[6]=_mm_xor_si128(c2[84],_mm_xor_si128(c2[72],_mm_xor_si128(c2[48],_mm_xor_si128(c2[168],_mm_xor_si128(c2[132],_mm_xor_si128(c2[38],_mm_xor_si128(c2[26],_mm_xor_si128(c2[182],_mm_xor_si128(c2[62],_mm_xor_si128(c2[124],_mm_xor_si128(c2[112],_mm_xor_si128(c2[88],_mm_xor_si128(c2[102],_mm_xor_si128(c2[90],_mm_xor_si128(c2[42],_mm_xor_si128(c2[114],_mm_xor_si128(c2[128],_mm_xor_si128(c2[20],_mm_xor_si128(c2[140],_mm_xor_si128(c2[130],_mm_xor_si128(c2[70],_mm_xor_si128(c2[58],_mm_xor_si128(c2[144],_mm_xor_si128(c2[132],_mm_xor_si128(c2[192],_mm_xor_si128(c2[180],_mm_xor_si128(c2[122],_mm_xor_si128(c2[50],_mm_xor_si128(c2[170],_mm_xor_si128(c2[172],_mm_xor_si128(c2[172],_mm_xor_si128(c2[196],_mm_xor_si128(c2[150],_mm_xor_si128(c2[138],_mm_xor_si128(c2[126],_mm_xor_si128(c2[174],c2[162]))))))))))))))))))))))))))))))))))));
+     d2[6]=simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[38],simde_mm_xor_si128(c2[26],simde_mm_xor_si128(c2[182],simde_mm_xor_si128(c2[62],simde_mm_xor_si128(c2[124],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[102],simde_mm_xor_si128(c2[90],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[114],simde_mm_xor_si128(c2[128],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[140],simde_mm_xor_si128(c2[130],simde_mm_xor_si128(c2[70],simde_mm_xor_si128(c2[58],simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[192],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[50],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[172],simde_mm_xor_si128(c2[172],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[150],simde_mm_xor_si128(c2[138],simde_mm_xor_si128(c2[126],simde_mm_xor_si128(c2[174],c2[162]))))))))))))))))))))))))))))))))))));
 
 //row: 7
-     d2[7]=_mm_xor_si128(c2[144],_mm_xor_si128(c2[132],_mm_xor_si128(c2[144],_mm_xor_si128(c2[108],_mm_xor_si128(c2[120],_mm_xor_si128(c2[36],_mm_xor_si128(c2[48],_mm_xor_si128(c2[98],_mm_xor_si128(c2[86],_mm_xor_si128(c2[98],_mm_xor_si128(c2[50],_mm_xor_si128(c2[62],_mm_xor_si128(c2[146],_mm_xor_si128(c2[122],_mm_xor_si128(c2[134],_mm_xor_si128(c2[14],_mm_xor_si128(c2[184],_mm_xor_si128(c2[172],_mm_xor_si128(c2[184],_mm_xor_si128(c2[172],_mm_xor_si128(c2[148],_mm_xor_si128(c2[160],_mm_xor_si128(c2[162],_mm_xor_si128(c2[150],_mm_xor_si128(c2[162],_mm_xor_si128(c2[102],_mm_xor_si128(c2[114],_mm_xor_si128(c2[174],_mm_xor_si128(c2[186],_mm_xor_si128(c2[188],_mm_xor_si128(c2[8],_mm_xor_si128(c2[80],_mm_xor_si128(c2[92],_mm_xor_si128(c2[32],_mm_xor_si128(c2[8],_mm_xor_si128(c2[20],_mm_xor_si128(c2[190],_mm_xor_si128(c2[10],_mm_xor_si128(c2[154],_mm_xor_si128(c2[130],_mm_xor_si128(c2[142],_mm_xor_si128(c2[46],_mm_xor_si128(c2[12],_mm_xor_si128(c2[192],_mm_xor_si128(c2[12],_mm_xor_si128(c2[60],_mm_xor_si128(c2[72],_mm_xor_si128(c2[72],_mm_xor_si128(c2[48],_mm_xor_si128(c2[60],_mm_xor_si128(c2[182],_mm_xor_si128(c2[194],_mm_xor_si128(c2[134],_mm_xor_si128(c2[110],_mm_xor_si128(c2[122],_mm_xor_si128(c2[158],_mm_xor_si128(c2[40],_mm_xor_si128(c2[52],_mm_xor_si128(c2[40],_mm_xor_si128(c2[52],_mm_xor_si128(c2[88],_mm_xor_si128(c2[64],_mm_xor_si128(c2[76],_mm_xor_si128(c2[18],_mm_xor_si128(c2[198],_mm_xor_si128(c2[18],_mm_xor_si128(c2[186],_mm_xor_si128(c2[198],_mm_xor_si128(c2[66],_mm_xor_si128(c2[42],c2[54]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[7]=simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[120],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[98],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[98],simde_mm_xor_si128(c2[50],simde_mm_xor_si128(c2[62],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[134],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[172],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[172],simde_mm_xor_si128(c2[148],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[150],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[102],simde_mm_xor_si128(c2[114],simde_mm_xor_si128(c2[174],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[188],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[80],simde_mm_xor_si128(c2[92],simde_mm_xor_si128(c2[32],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[190],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[154],simde_mm_xor_si128(c2[130],simde_mm_xor_si128(c2[142],simde_mm_xor_si128(c2[46],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[192],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[182],simde_mm_xor_si128(c2[194],simde_mm_xor_si128(c2[134],simde_mm_xor_si128(c2[110],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[158],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[52],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[52],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[64],simde_mm_xor_si128(c2[76],simde_mm_xor_si128(c2[18],simde_mm_xor_si128(c2[198],simde_mm_xor_si128(c2[18],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[198],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[42],c2[54]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 8
-     d2[8]=_mm_xor_si128(c2[180],_mm_xor_si128(c2[168],_mm_xor_si128(c2[156],_mm_xor_si128(c2[144],_mm_xor_si128(c2[72],_mm_xor_si128(c2[168],_mm_xor_si128(c2[134],_mm_xor_si128(c2[122],_mm_xor_si128(c2[86],_mm_xor_si128(c2[158],_mm_xor_si128(c2[170],_mm_xor_si128(c2[28],_mm_xor_si128(c2[16],_mm_xor_si128(c2[184],_mm_xor_si128(c2[6],_mm_xor_si128(c2[186],_mm_xor_si128(c2[150],_mm_xor_si128(c2[138],_mm_xor_si128(c2[18],_mm_xor_si128(c2[44],_mm_xor_si128(c2[32],_mm_xor_si128(c2[116],_mm_xor_si128(c2[44],_mm_xor_si128(c2[46],_mm_xor_si128(c2[34],_mm_xor_si128(c2[166],_mm_xor_si128(c2[48],_mm_xor_si128(c2[36],_mm_xor_si128(c2[108],_mm_xor_si128(c2[96],_mm_xor_si128(c2[84],_mm_xor_si128(c2[38],_mm_xor_si128(c2[26],_mm_xor_si128(c2[146],_mm_xor_si128(c2[88],_mm_xor_si128(c2[76],_mm_xor_si128(c2[76],_mm_xor_si128(c2[100],_mm_xor_si128(c2[54],_mm_xor_si128(c2[42],_mm_xor_si128(c2[42],_mm_xor_si128(c2[30],c2[78]))))))))))))))))))))))))))))))))))))))))));
+     d2[8]=simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[134],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[158],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[6],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[150],simde_mm_xor_si128(c2[138],simde_mm_xor_si128(c2[18],simde_mm_xor_si128(c2[44],simde_mm_xor_si128(c2[32],simde_mm_xor_si128(c2[116],simde_mm_xor_si128(c2[44],simde_mm_xor_si128(c2[46],simde_mm_xor_si128(c2[34],simde_mm_xor_si128(c2[166],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[38],simde_mm_xor_si128(c2[26],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[76],simde_mm_xor_si128(c2[76],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[30],c2[78]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 9
-     d2[9]=_mm_xor_si128(c2[0],_mm_xor_si128(c2[60],_mm_xor_si128(c2[180],_mm_xor_si128(c2[36],_mm_xor_si128(c2[156],_mm_xor_si128(c2[156],_mm_xor_si128(c2[84],_mm_xor_si128(c2[146],_mm_xor_si128(c2[14],_mm_xor_si128(c2[134],_mm_xor_si128(c2[170],_mm_xor_si128(c2[98],_mm_xor_si128(c2[50],_mm_xor_si128(c2[170],_mm_xor_si128(c2[134],_mm_xor_si128(c2[40],_mm_xor_si128(c2[100],_mm_xor_si128(c2[28],_mm_xor_si128(c2[76],_mm_xor_si128(c2[4],_mm_xor_si128(c2[18],_mm_xor_si128(c2[78],_mm_xor_si128(c2[6],_mm_xor_si128(c2[30],_mm_xor_si128(c2[150],_mm_xor_si128(c2[102],_mm_xor_si128(c2[30],_mm_xor_si128(c2[116],_mm_xor_si128(c2[44],_mm_xor_si128(c2[8],_mm_xor_si128(c2[128],_mm_xor_si128(c2[128],_mm_xor_si128(c2[56],_mm_xor_si128(c2[118],_mm_xor_si128(c2[46],_mm_xor_si128(c2[58],_mm_xor_si128(c2[178],_mm_xor_si128(c2[60],_mm_xor_si128(c2[120],_mm_xor_si128(c2[48],_mm_xor_si128(c2[180],_mm_xor_si128(c2[108],_mm_xor_si128(c2[168],_mm_xor_si128(c2[96],_mm_xor_si128(c2[110],_mm_xor_si128(c2[38],_mm_xor_si128(c2[38],_mm_xor_si128(c2[158],_mm_xor_si128(c2[160],_mm_xor_si128(c2[88],_mm_xor_si128(c2[160],_mm_xor_si128(c2[88],_mm_xor_si128(c2[184],_mm_xor_si128(c2[112],_mm_xor_si128(c2[172],_mm_xor_si128(c2[66],_mm_xor_si128(c2[126],_mm_xor_si128(c2[54],_mm_xor_si128(c2[114],_mm_xor_si128(c2[42],_mm_xor_si128(c2[162],c2[90])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[9]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[134],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[98],simde_mm_xor_si128(c2[50],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[134],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[76],simde_mm_xor_si128(c2[4],simde_mm_xor_si128(c2[18],simde_mm_xor_si128(c2[78],simde_mm_xor_si128(c2[6],simde_mm_xor_si128(c2[30],simde_mm_xor_si128(c2[150],simde_mm_xor_si128(c2[102],simde_mm_xor_si128(c2[30],simde_mm_xor_si128(c2[116],simde_mm_xor_si128(c2[44],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[128],simde_mm_xor_si128(c2[128],simde_mm_xor_si128(c2[56],simde_mm_xor_si128(c2[118],simde_mm_xor_si128(c2[46],simde_mm_xor_si128(c2[58],simde_mm_xor_si128(c2[178],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[120],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[110],simde_mm_xor_si128(c2[38],simde_mm_xor_si128(c2[38],simde_mm_xor_si128(c2[158],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[172],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[126],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[114],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[162],c2[90])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 10
-     d2[10]=_mm_xor_si128(c2[132],_mm_xor_si128(c2[110],_mm_xor_si128(c2[12],c2[74])));
+     d2[10]=simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[110],simde_mm_xor_si128(c2[12],c2[74])));
 
 //row: 11
-     d2[11]=_mm_xor_si128(c2[0],_mm_xor_si128(c2[168],_mm_xor_si128(c2[96],_mm_xor_si128(c2[132],_mm_xor_si128(c2[146],_mm_xor_si128(c2[110],_mm_xor_si128(c2[2],_mm_xor_si128(c2[182],_mm_xor_si128(c2[40],_mm_xor_si128(c2[28],_mm_xor_si128(c2[16],_mm_xor_si128(c2[18],_mm_xor_si128(c2[162],_mm_xor_si128(c2[42],_mm_xor_si128(c2[56],_mm_xor_si128(c2[140],_mm_xor_si128(c2[80],_mm_xor_si128(c2[68],_mm_xor_si128(c2[58],_mm_xor_si128(c2[10],_mm_xor_si128(c2[190],_mm_xor_si128(c2[60],_mm_xor_si128(c2[120],_mm_xor_si128(c2[120],_mm_xor_si128(c2[108],_mm_xor_si128(c2[50],_mm_xor_si128(c2[182],_mm_xor_si128(c2[170],_mm_xor_si128(c2[158],_mm_xor_si128(c2[100],_mm_xor_si128(c2[100],_mm_xor_si128(c2[136],_mm_xor_si128(c2[124],_mm_xor_si128(c2[66],_mm_xor_si128(c2[54],_mm_xor_si128(c2[114],_mm_xor_si128(c2[102],c2[42])))))))))))))))))))))))))))))))))))));
+     d2[11]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[110],simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[182],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[18],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[56],simde_mm_xor_si128(c2[140],simde_mm_xor_si128(c2[80],simde_mm_xor_si128(c2[68],simde_mm_xor_si128(c2[58],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[190],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[120],simde_mm_xor_si128(c2[120],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[50],simde_mm_xor_si128(c2[182],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[158],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[124],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[114],simde_mm_xor_si128(c2[102],c2[42])))))))))))))))))))))))))))))))))))));
 
 //row: 12
-     d2[12]=_mm_xor_si128(c2[84],_mm_xor_si128(c2[72],_mm_xor_si128(c2[48],_mm_xor_si128(c2[168],_mm_xor_si128(c2[38],_mm_xor_si128(c2[26],_mm_xor_si128(c2[182],_mm_xor_si128(c2[62],_mm_xor_si128(c2[182],_mm_xor_si128(c2[124],_mm_xor_si128(c2[112],_mm_xor_si128(c2[88],_mm_xor_si128(c2[102],_mm_xor_si128(c2[90],_mm_xor_si128(c2[42],_mm_xor_si128(c2[114],_mm_xor_si128(c2[186],_mm_xor_si128(c2[128],_mm_xor_si128(c2[20],_mm_xor_si128(c2[140],_mm_xor_si128(c2[130],_mm_xor_si128(c2[70],_mm_xor_si128(c2[144],_mm_xor_si128(c2[132],_mm_xor_si128(c2[192],_mm_xor_si128(c2[180],_mm_xor_si128(c2[122],_mm_xor_si128(c2[50],_mm_xor_si128(c2[172],_mm_xor_si128(c2[172],_mm_xor_si128(c2[196],_mm_xor_si128(c2[150],_mm_xor_si128(c2[138],_mm_xor_si128(c2[126],c2[174]))))))))))))))))))))))))))))))))));
+     d2[12]=simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[38],simde_mm_xor_si128(c2[26],simde_mm_xor_si128(c2[182],simde_mm_xor_si128(c2[62],simde_mm_xor_si128(c2[182],simde_mm_xor_si128(c2[124],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[102],simde_mm_xor_si128(c2[90],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[114],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[128],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[140],simde_mm_xor_si128(c2[130],simde_mm_xor_si128(c2[70],simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[192],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[50],simde_mm_xor_si128(c2[172],simde_mm_xor_si128(c2[172],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[150],simde_mm_xor_si128(c2[138],simde_mm_xor_si128(c2[126],c2[174]))))))))))))))))))))))))))))))))));
 
 //row: 13
-     d2[13]=_mm_xor_si128(c2[72],_mm_xor_si128(c2[48],_mm_xor_si128(c2[168],_mm_xor_si128(c2[36],_mm_xor_si128(c2[26],_mm_xor_si128(c2[182],_mm_xor_si128(c2[74],_mm_xor_si128(c2[62],_mm_xor_si128(c2[26],_mm_xor_si128(c2[112],_mm_xor_si128(c2[100],_mm_xor_si128(c2[88],_mm_xor_si128(c2[90],_mm_xor_si128(c2[42],_mm_xor_si128(c2[114],_mm_xor_si128(c2[128],_mm_xor_si128(c2[20],_mm_xor_si128(c2[152],_mm_xor_si128(c2[140],_mm_xor_si128(c2[130],_mm_xor_si128(c2[82],_mm_xor_si128(c2[70],_mm_xor_si128(c2[132],_mm_xor_si128(c2[192],_mm_xor_si128(c2[192],_mm_xor_si128(c2[180],_mm_xor_si128(c2[122],_mm_xor_si128(c2[62],_mm_xor_si128(c2[50],_mm_xor_si128(c2[172],_mm_xor_si128(c2[172],_mm_xor_si128(c2[16],_mm_xor_si128(c2[196],_mm_xor_si128(c2[88],_mm_xor_si128(c2[138],_mm_xor_si128(c2[126],_mm_xor_si128(c2[186],c2[174])))))))))))))))))))))))))))))))))))));
+     d2[13]=simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[26],simde_mm_xor_si128(c2[182],simde_mm_xor_si128(c2[74],simde_mm_xor_si128(c2[62],simde_mm_xor_si128(c2[26],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[90],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[114],simde_mm_xor_si128(c2[128],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[152],simde_mm_xor_si128(c2[140],simde_mm_xor_si128(c2[130],simde_mm_xor_si128(c2[82],simde_mm_xor_si128(c2[70],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[192],simde_mm_xor_si128(c2[192],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[62],simde_mm_xor_si128(c2[50],simde_mm_xor_si128(c2[172],simde_mm_xor_si128(c2[172],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[138],simde_mm_xor_si128(c2[126],simde_mm_xor_si128(c2[186],c2[174])))))))))))))))))))))))))))))))))))));
 
 //row: 14
-     d2[14]=_mm_xor_si128(c2[144],_mm_xor_si128(c2[132],_mm_xor_si128(c2[0],_mm_xor_si128(c2[108],_mm_xor_si128(c2[168],_mm_xor_si128(c2[36],_mm_xor_si128(c2[96],_mm_xor_si128(c2[98],_mm_xor_si128(c2[86],_mm_xor_si128(c2[146],_mm_xor_si128(c2[50],_mm_xor_si128(c2[110],_mm_xor_si128(c2[2],_mm_xor_si128(c2[122],_mm_xor_si128(c2[182],_mm_xor_si128(c2[38],_mm_xor_si128(c2[184],_mm_xor_si128(c2[172],_mm_xor_si128(c2[40],_mm_xor_si128(c2[28],_mm_xor_si128(c2[148],_mm_xor_si128(c2[16],_mm_xor_si128(c2[162],_mm_xor_si128(c2[150],_mm_xor_si128(c2[18],_mm_xor_si128(c2[102],_mm_xor_si128(c2[162],_mm_xor_si128(c2[174],_mm_xor_si128(c2[42],_mm_xor_si128(c2[188],_mm_xor_si128(c2[56],_mm_xor_si128(c2[80],_mm_xor_si128(c2[140],_mm_xor_si128(c2[80],_mm_xor_si128(c2[8],_mm_xor_si128(c2[68],_mm_xor_si128(c2[190],_mm_xor_si128(c2[58],_mm_xor_si128(c2[10],_mm_xor_si128(c2[130],_mm_xor_si128(c2[190],_mm_xor_si128(c2[12],_mm_xor_si128(c2[192],_mm_xor_si128(c2[60],_mm_xor_si128(c2[60],_mm_xor_si128(c2[120],_mm_xor_si128(c2[120],_mm_xor_si128(c2[48],_mm_xor_si128(c2[108],_mm_xor_si128(c2[24],_mm_xor_si128(c2[182],_mm_xor_si128(c2[50],_mm_xor_si128(c2[182],_mm_xor_si128(c2[110],_mm_xor_si128(c2[170],_mm_xor_si128(c2[40],_mm_xor_si128(c2[100],_mm_xor_si128(c2[40],_mm_xor_si128(c2[100],_mm_xor_si128(c2[136],_mm_xor_si128(c2[64],_mm_xor_si128(c2[124],_mm_xor_si128(c2[18],_mm_xor_si128(c2[198],_mm_xor_si128(c2[66],_mm_xor_si128(c2[186],_mm_xor_si128(c2[54],_mm_xor_si128(c2[114],_mm_xor_si128(c2[42],c2[102])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[14]=simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[98],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[50],simde_mm_xor_si128(c2[110],simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[182],simde_mm_xor_si128(c2[38],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[172],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[148],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[150],simde_mm_xor_si128(c2[18],simde_mm_xor_si128(c2[102],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[174],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[188],simde_mm_xor_si128(c2[56],simde_mm_xor_si128(c2[80],simde_mm_xor_si128(c2[140],simde_mm_xor_si128(c2[80],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[68],simde_mm_xor_si128(c2[190],simde_mm_xor_si128(c2[58],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[130],simde_mm_xor_si128(c2[190],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[192],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[120],simde_mm_xor_si128(c2[120],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[182],simde_mm_xor_si128(c2[50],simde_mm_xor_si128(c2[182],simde_mm_xor_si128(c2[110],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[64],simde_mm_xor_si128(c2[124],simde_mm_xor_si128(c2[18],simde_mm_xor_si128(c2[198],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[114],simde_mm_xor_si128(c2[42],c2[102])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 15
-     d2[15]=_mm_xor_si128(c2[168],_mm_xor_si128(c2[84],_mm_xor_si128(c2[156],_mm_xor_si128(c2[60],_mm_xor_si128(c2[132],_mm_xor_si128(c2[180],_mm_xor_si128(c2[60],_mm_xor_si128(c2[36],_mm_xor_si128(c2[122],_mm_xor_si128(c2[38],_mm_xor_si128(c2[110],_mm_xor_si128(c2[2],_mm_xor_si128(c2[74],_mm_xor_si128(c2[74],_mm_xor_si128(c2[146],_mm_xor_si128(c2[16],_mm_xor_si128(c2[124],_mm_xor_si128(c2[4],_mm_xor_si128(c2[100],_mm_xor_si128(c2[172],_mm_xor_si128(c2[186],_mm_xor_si128(c2[102],_mm_xor_si128(c2[174],_mm_xor_si128(c2[54],_mm_xor_si128(c2[126],_mm_xor_si128(c2[126],_mm_xor_si128(c2[6],_mm_xor_si128(c2[140],_mm_xor_si128(c2[20],_mm_xor_si128(c2[32],_mm_xor_si128(c2[104],_mm_xor_si128(c2[152],_mm_xor_si128(c2[32],_mm_xor_si128(c2[142],_mm_xor_si128(c2[22],_mm_xor_si128(c2[82],_mm_xor_si128(c2[154],_mm_xor_si128(c2[36],_mm_xor_si128(c2[144],_mm_xor_si128(c2[24],_mm_xor_si128(c2[12],_mm_xor_si128(c2[84],_mm_xor_si128(c2[192],_mm_xor_si128(c2[72],_mm_xor_si128(c2[134],_mm_xor_si128(c2[14],_mm_xor_si128(c2[62],_mm_xor_si128(c2[134],_mm_xor_si128(c2[184],_mm_xor_si128(c2[64],_mm_xor_si128(c2[184],_mm_xor_si128(c2[64],_mm_xor_si128(c2[16],_mm_xor_si128(c2[88],_mm_xor_si128(c2[42],_mm_xor_si128(c2[150],_mm_xor_si128(c2[30],_mm_xor_si128(c2[138],_mm_xor_si128(c2[18],_mm_xor_si128(c2[186],c2[66]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[15]=simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[38],simde_mm_xor_si128(c2[110],simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[74],simde_mm_xor_si128(c2[74],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[124],simde_mm_xor_si128(c2[4],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[172],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[102],simde_mm_xor_si128(c2[174],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[126],simde_mm_xor_si128(c2[126],simde_mm_xor_si128(c2[6],simde_mm_xor_si128(c2[140],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[32],simde_mm_xor_si128(c2[104],simde_mm_xor_si128(c2[152],simde_mm_xor_si128(c2[32],simde_mm_xor_si128(c2[142],simde_mm_xor_si128(c2[22],simde_mm_xor_si128(c2[82],simde_mm_xor_si128(c2[154],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[192],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[134],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[62],simde_mm_xor_si128(c2[134],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[64],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[64],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[150],simde_mm_xor_si128(c2[30],simde_mm_xor_si128(c2[138],simde_mm_xor_si128(c2[18],simde_mm_xor_si128(c2[186],c2[66]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 16
-     d2[16]=_mm_xor_si128(c2[12],_mm_xor_si128(c2[132],_mm_xor_si128(c2[0],_mm_xor_si128(c2[120],_mm_xor_si128(c2[108],_mm_xor_si128(c2[168],_mm_xor_si128(c2[96],_mm_xor_si128(c2[96],_mm_xor_si128(c2[24],_mm_xor_si128(c2[158],_mm_xor_si128(c2[86],_mm_xor_si128(c2[146],_mm_xor_si128(c2[74],_mm_xor_si128(c2[110],_mm_xor_si128(c2[38],_mm_xor_si128(c2[182],_mm_xor_si128(c2[110],_mm_xor_si128(c2[134],_mm_xor_si128(c2[52],_mm_xor_si128(c2[172],_mm_xor_si128(c2[40],_mm_xor_si128(c2[160],_mm_xor_si128(c2[16],_mm_xor_si128(c2[136],_mm_xor_si128(c2[30],_mm_xor_si128(c2[150],_mm_xor_si128(c2[18],_mm_xor_si128(c2[138],_mm_xor_si128(c2[102],_mm_xor_si128(c2[162],_mm_xor_si128(c2[90],_mm_xor_si128(c2[42],_mm_xor_si128(c2[162],_mm_xor_si128(c2[188],_mm_xor_si128(c2[56],_mm_xor_si128(c2[176],_mm_xor_si128(c2[140],_mm_xor_si128(c2[68],_mm_xor_si128(c2[68],_mm_xor_si128(c2[188],_mm_xor_si128(c2[190],_mm_xor_si128(c2[58],_mm_xor_si128(c2[178],_mm_xor_si128(c2[190],_mm_xor_si128(c2[118],_mm_xor_si128(c2[72],_mm_xor_si128(c2[192],_mm_xor_si128(c2[60],_mm_xor_si128(c2[180],_mm_xor_si128(c2[60],_mm_xor_si128(c2[120],_mm_xor_si128(c2[48],_mm_xor_si128(c2[108],_mm_xor_si128(c2[36],_mm_xor_si128(c2[182],_mm_xor_si128(c2[50],_mm_xor_si128(c2[170],_mm_xor_si128(c2[170],_mm_xor_si128(c2[98],_mm_xor_si128(c2[40],_mm_xor_si128(c2[100],_mm_xor_si128(c2[28],_mm_xor_si128(c2[100],_mm_xor_si128(c2[28],_mm_xor_si128(c2[124],_mm_xor_si128(c2[52],_mm_xor_si128(c2[78],_mm_xor_si128(c2[198],_mm_xor_si128(c2[66],_mm_xor_si128(c2[186],_mm_xor_si128(c2[186],_mm_xor_si128(c2[54],_mm_xor_si128(c2[174],_mm_xor_si128(c2[102],_mm_xor_si128(c2[30],c2[186])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[16]=simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[120],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[158],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[74],simde_mm_xor_si128(c2[110],simde_mm_xor_si128(c2[38],simde_mm_xor_si128(c2[182],simde_mm_xor_si128(c2[110],simde_mm_xor_si128(c2[134],simde_mm_xor_si128(c2[52],simde_mm_xor_si128(c2[172],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[30],simde_mm_xor_si128(c2[150],simde_mm_xor_si128(c2[18],simde_mm_xor_si128(c2[138],simde_mm_xor_si128(c2[102],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[90],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[188],simde_mm_xor_si128(c2[56],simde_mm_xor_si128(c2[176],simde_mm_xor_si128(c2[140],simde_mm_xor_si128(c2[68],simde_mm_xor_si128(c2[68],simde_mm_xor_si128(c2[188],simde_mm_xor_si128(c2[190],simde_mm_xor_si128(c2[58],simde_mm_xor_si128(c2[178],simde_mm_xor_si128(c2[190],simde_mm_xor_si128(c2[118],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[192],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[120],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[182],simde_mm_xor_si128(c2[50],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[98],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[124],simde_mm_xor_si128(c2[52],simde_mm_xor_si128(c2[78],simde_mm_xor_si128(c2[198],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[174],simde_mm_xor_si128(c2[102],simde_mm_xor_si128(c2[30],c2[186])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 17
-     d2[17]=_mm_xor_si128(c2[132],_mm_xor_si128(c2[108],_mm_xor_si128(c2[120],_mm_xor_si128(c2[96],_mm_xor_si128(c2[84],_mm_xor_si128(c2[96],_mm_xor_si128(c2[72],_mm_xor_si128(c2[24],_mm_xor_si128(c2[0],_mm_xor_si128(c2[86],_mm_xor_si128(c2[62],_mm_xor_si128(c2[74],_mm_xor_si128(c2[50],_mm_xor_si128(c2[38],_mm_xor_si128(c2[14],_mm_xor_si128(c2[110],_mm_xor_si128(c2[86],_mm_xor_si128(c2[170],_mm_xor_si128(c2[172],_mm_xor_si128(c2[148],_mm_xor_si128(c2[160],_mm_xor_si128(c2[136],_mm_xor_si128(c2[136],_mm_xor_si128(c2[112],_mm_xor_si128(c2[150],_mm_xor_si128(c2[126],_mm_xor_si128(c2[138],_mm_xor_si128(c2[114],_mm_xor_si128(c2[78],_mm_xor_si128(c2[90],_mm_xor_si128(c2[66],_mm_xor_si128(c2[162],_mm_xor_si128(c2[138],_mm_xor_si128(c2[164],_mm_xor_si128(c2[176],_mm_xor_si128(c2[152],_mm_xor_si128(c2[68],_mm_xor_si128(c2[44],_mm_xor_si128(c2[188],_mm_xor_si128(c2[164],_mm_xor_si128(c2[166],_mm_xor_si128(c2[178],_mm_xor_si128(c2[154],_mm_xor_si128(c2[118],_mm_xor_si128(c2[94],_mm_xor_si128(c2[154],_mm_xor_si128(c2[192],_mm_xor_si128(c2[168],_mm_xor_si128(c2[180],_mm_xor_si128(c2[156],_mm_xor_si128(c2[36],_mm_xor_si128(c2[48],_mm_xor_si128(c2[24],_mm_xor_si128(c2[36],_mm_xor_si128(c2[12],_mm_xor_si128(c2[158],_mm_xor_si128(c2[170],_mm_xor_si128(c2[146],_mm_xor_si128(c2[98],_mm_xor_si128(c2[74],_mm_xor_si128(c2[16],_mm_xor_si128(c2[28],_mm_xor_si128(c2[196],_mm_xor_si128(c2[28],_mm_xor_si128(c2[196],_mm_xor_si128(c2[52],_mm_xor_si128(c2[28],_mm_xor_si128(c2[198],_mm_xor_si128(c2[174],_mm_xor_si128(c2[186],_mm_xor_si128(c2[162],_mm_xor_si128(c2[162],_mm_xor_si128(c2[174],_mm_xor_si128(c2[150],_mm_xor_si128(c2[30],c2[198])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[17]=simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[120],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[62],simde_mm_xor_si128(c2[74],simde_mm_xor_si128(c2[50],simde_mm_xor_si128(c2[38],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[110],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[172],simde_mm_xor_si128(c2[148],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[150],simde_mm_xor_si128(c2[126],simde_mm_xor_si128(c2[138],simde_mm_xor_si128(c2[114],simde_mm_xor_si128(c2[78],simde_mm_xor_si128(c2[90],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[138],simde_mm_xor_si128(c2[164],simde_mm_xor_si128(c2[176],simde_mm_xor_si128(c2[152],simde_mm_xor_si128(c2[68],simde_mm_xor_si128(c2[44],simde_mm_xor_si128(c2[188],simde_mm_xor_si128(c2[164],simde_mm_xor_si128(c2[166],simde_mm_xor_si128(c2[178],simde_mm_xor_si128(c2[154],simde_mm_xor_si128(c2[118],simde_mm_xor_si128(c2[94],simde_mm_xor_si128(c2[154],simde_mm_xor_si128(c2[192],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[158],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[98],simde_mm_xor_si128(c2[74],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[52],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[198],simde_mm_xor_si128(c2[174],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[174],simde_mm_xor_si128(c2[150],simde_mm_xor_si128(c2[30],c2[198])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 18
-     d2[18]=_mm_xor_si128(c2[144],_mm_xor_si128(c2[36],c2[38]));
+     d2[18]=simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[36],c2[38]));
 
 //row: 19
-     d2[19]=_mm_xor_si128(c2[12],_mm_xor_si128(c2[180],_mm_xor_si128(c2[108],_mm_xor_si128(c2[84],_mm_xor_si128(c2[158],_mm_xor_si128(c2[122],_mm_xor_si128(c2[2],_mm_xor_si128(c2[50],_mm_xor_si128(c2[52],_mm_xor_si128(c2[28],_mm_xor_si128(c2[30],_mm_xor_si128(c2[174],_mm_xor_si128(c2[54],_mm_xor_si128(c2[68],_mm_xor_si128(c2[152],_mm_xor_si128(c2[80],_mm_xor_si128(c2[70],_mm_xor_si128(c2[10],_mm_xor_si128(c2[72],_mm_xor_si128(c2[132],_mm_xor_si128(c2[120],_mm_xor_si128(c2[62],_mm_xor_si128(c2[182],_mm_xor_si128(c2[112],_mm_xor_si128(c2[112],_mm_xor_si128(c2[136],_mm_xor_si128(c2[78],_mm_xor_si128(c2[66],c2[114]))))))))))))))))))))))))))));
+     d2[19]=simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[158],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[50],simde_mm_xor_si128(c2[52],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[30],simde_mm_xor_si128(c2[174],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[68],simde_mm_xor_si128(c2[152],simde_mm_xor_si128(c2[80],simde_mm_xor_si128(c2[70],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[120],simde_mm_xor_si128(c2[62],simde_mm_xor_si128(c2[182],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[78],simde_mm_xor_si128(c2[66],c2[114]))))))))))))))))))))))))))));
 
 //row: 20
-     d2[20]=_mm_xor_si128(c2[72],_mm_xor_si128(c2[60],_mm_xor_si128(c2[36],_mm_xor_si128(c2[156],_mm_xor_si128(c2[26],_mm_xor_si128(c2[14],_mm_xor_si128(c2[170],_mm_xor_si128(c2[50],_mm_xor_si128(c2[122],_mm_xor_si128(c2[112],_mm_xor_si128(c2[100],_mm_xor_si128(c2[76],_mm_xor_si128(c2[90],_mm_xor_si128(c2[78],_mm_xor_si128(c2[30],_mm_xor_si128(c2[102],_mm_xor_si128(c2[116],_mm_xor_si128(c2[8],_mm_xor_si128(c2[128],_mm_xor_si128(c2[116],_mm_xor_si128(c2[118],_mm_xor_si128(c2[58],_mm_xor_si128(c2[132],_mm_xor_si128(c2[120],_mm_xor_si128(c2[180],_mm_xor_si128(c2[168],_mm_xor_si128(c2[110],_mm_xor_si128(c2[38],_mm_xor_si128(c2[160],_mm_xor_si128(c2[160],_mm_xor_si128(c2[184],_mm_xor_si128(c2[138],_mm_xor_si128(c2[126],_mm_xor_si128(c2[114],c2[162]))))))))))))))))))))))))))))))))));
+     d2[20]=simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[26],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[50],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[76],simde_mm_xor_si128(c2[90],simde_mm_xor_si128(c2[78],simde_mm_xor_si128(c2[30],simde_mm_xor_si128(c2[102],simde_mm_xor_si128(c2[116],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[128],simde_mm_xor_si128(c2[116],simde_mm_xor_si128(c2[118],simde_mm_xor_si128(c2[58],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[120],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[110],simde_mm_xor_si128(c2[38],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[138],simde_mm_xor_si128(c2[126],simde_mm_xor_si128(c2[114],c2[162]))))))))))))))))))))))))))))))))));
 
 //row: 21
-     d2[21]=_mm_xor_si128(c2[120],_mm_xor_si128(c2[96],_mm_xor_si128(c2[24],_mm_xor_si128(c2[144],_mm_xor_si128(c2[74],_mm_xor_si128(c2[38],_mm_xor_si128(c2[122],_mm_xor_si128(c2[110],_mm_xor_si128(c2[160],_mm_xor_si128(c2[148],_mm_xor_si128(c2[136],_mm_xor_si128(c2[138],_mm_xor_si128(c2[90],_mm_xor_si128(c2[162],_mm_xor_si128(c2[176],_mm_xor_si128(c2[68],_mm_xor_si128(c2[8],_mm_xor_si128(c2[188],_mm_xor_si128(c2[178],_mm_xor_si128(c2[130],_mm_xor_si128(c2[118],_mm_xor_si128(c2[180],_mm_xor_si128(c2[48],_mm_xor_si128(c2[48],_mm_xor_si128(c2[36],_mm_xor_si128(c2[170],_mm_xor_si128(c2[110],_mm_xor_si128(c2[98],_mm_xor_si128(c2[28],_mm_xor_si128(c2[28],_mm_xor_si128(c2[64],_mm_xor_si128(c2[52],_mm_xor_si128(c2[136],_mm_xor_si128(c2[186],_mm_xor_si128(c2[174],_mm_xor_si128(c2[42],c2[30]))))))))))))))))))))))))))))))))))));
+     d2[21]=simde_mm_xor_si128(c2[120],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[74],simde_mm_xor_si128(c2[38],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[110],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[148],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[138],simde_mm_xor_si128(c2[90],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[176],simde_mm_xor_si128(c2[68],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[188],simde_mm_xor_si128(c2[178],simde_mm_xor_si128(c2[130],simde_mm_xor_si128(c2[118],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[110],simde_mm_xor_si128(c2[98],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[64],simde_mm_xor_si128(c2[52],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[174],simde_mm_xor_si128(c2[42],c2[30]))))))))))))))))))))))))))))))))))));
 
 //row: 22
-     d2[22]=_mm_xor_si128(c2[170],c2[184]);
+     d2[22]=simde_mm_xor_si128(c2[170],c2[184]);
 
 //row: 23
-     d2[23]=_mm_xor_si128(c2[84],_mm_xor_si128(c2[138],c2[178]));
+     d2[23]=simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[138],c2[178]));
 
 //row: 24
-     d2[24]=_mm_xor_si128(c2[170],_mm_xor_si128(c2[136],c2[114]));
+     d2[24]=simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[136],c2[114]));
 
 //row: 25
-     d2[25]=_mm_xor_si128(c2[48],c2[154]);
+     d2[25]=simde_mm_xor_si128(c2[48],c2[154]);
 
 //row: 26
-     d2[26]=_mm_xor_si128(c2[108],_mm_xor_si128(c2[96],_mm_xor_si128(c2[24],_mm_xor_si128(c2[84],_mm_xor_si128(c2[72],_mm_xor_si128(c2[0],_mm_xor_si128(c2[0],_mm_xor_si128(c2[120],_mm_xor_si128(c2[62],_mm_xor_si128(c2[50],_mm_xor_si128(c2[170],_mm_xor_si128(c2[14],_mm_xor_si128(c2[134],_mm_xor_si128(c2[26],_mm_xor_si128(c2[86],_mm_xor_si128(c2[14],_mm_xor_si128(c2[148],_mm_xor_si128(c2[136],_mm_xor_si128(c2[64],_mm_xor_si128(c2[52],_mm_xor_si128(c2[112],_mm_xor_si128(c2[40],_mm_xor_si128(c2[160],_mm_xor_si128(c2[126],_mm_xor_si128(c2[114],_mm_xor_si128(c2[42],_mm_xor_si128(c2[78],_mm_xor_si128(c2[66],_mm_xor_si128(c2[186],_mm_xor_si128(c2[138],_mm_xor_si128(c2[66],_mm_xor_si128(c2[164],_mm_xor_si128(c2[152],_mm_xor_si128(c2[80],_mm_xor_si128(c2[44],_mm_xor_si128(c2[164],_mm_xor_si128(c2[104],_mm_xor_si128(c2[164],_mm_xor_si128(c2[92],_mm_xor_si128(c2[166],_mm_xor_si128(c2[154],_mm_xor_si128(c2[82],_mm_xor_si128(c2[34],_mm_xor_si128(c2[94],_mm_xor_si128(c2[22],_mm_xor_si128(c2[168],_mm_xor_si128(c2[156],_mm_xor_si128(c2[84],_mm_xor_si128(c2[36],_mm_xor_si128(c2[24],_mm_xor_si128(c2[144],_mm_xor_si128(c2[144],_mm_xor_si128(c2[12],_mm_xor_si128(c2[132],_mm_xor_si128(c2[158],_mm_xor_si128(c2[146],_mm_xor_si128(c2[74],_mm_xor_si128(c2[14],_mm_xor_si128(c2[74],_mm_xor_si128(c2[194],_mm_xor_si128(c2[194],_mm_xor_si128(c2[16],_mm_xor_si128(c2[196],_mm_xor_si128(c2[124],_mm_xor_si128(c2[196],_mm_xor_si128(c2[124],_mm_xor_si128(c2[160],_mm_xor_si128(c2[28],_mm_xor_si128(c2[148],_mm_xor_si128(c2[174],_mm_xor_si128(c2[162],_mm_xor_si128(c2[90],_mm_xor_si128(c2[162],_mm_xor_si128(c2[150],_mm_xor_si128(c2[78],_mm_xor_si128(c2[138],_mm_xor_si128(c2[198],c2[126])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[26]=simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[120],simde_mm_xor_si128(c2[62],simde_mm_xor_si128(c2[50],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[134],simde_mm_xor_si128(c2[26],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[148],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[64],simde_mm_xor_si128(c2[52],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[126],simde_mm_xor_si128(c2[114],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[78],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[138],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[164],simde_mm_xor_si128(c2[152],simde_mm_xor_si128(c2[80],simde_mm_xor_si128(c2[44],simde_mm_xor_si128(c2[164],simde_mm_xor_si128(c2[104],simde_mm_xor_si128(c2[164],simde_mm_xor_si128(c2[92],simde_mm_xor_si128(c2[166],simde_mm_xor_si128(c2[154],simde_mm_xor_si128(c2[82],simde_mm_xor_si128(c2[34],simde_mm_xor_si128(c2[94],simde_mm_xor_si128(c2[22],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[158],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[74],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[74],simde_mm_xor_si128(c2[194],simde_mm_xor_si128(c2[194],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[124],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[124],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[148],simde_mm_xor_si128(c2[174],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[90],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[150],simde_mm_xor_si128(c2[78],simde_mm_xor_si128(c2[138],simde_mm_xor_si128(c2[198],c2[126])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 27
-     d2[27]=_mm_xor_si128(c2[96],c2[96]);
+     d2[27]=simde_mm_xor_si128(c2[96],c2[96]);
 
 //row: 28
-     d2[28]=_mm_xor_si128(c2[26],_mm_xor_si128(c2[64],c2[94]));
+     d2[28]=simde_mm_xor_si128(c2[26],simde_mm_xor_si128(c2[64],c2[94]));
 
 //row: 29
-     d2[29]=_mm_xor_si128(c2[24],c2[152]);
+     d2[29]=simde_mm_xor_si128(c2[24],c2[152]);
 
 //row: 30
-     d2[30]=_mm_xor_si128(c2[88],_mm_xor_si128(c2[10],_mm_xor_si128(c2[122],c2[66])));
+     d2[30]=simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[122],c2[66])));
 
 //row: 31
-     d2[31]=_mm_xor_si128(c2[108],_mm_xor_si128(c2[84],_mm_xor_si128(c2[12],_mm_xor_si128(c2[62],_mm_xor_si128(c2[26],_mm_xor_si128(c2[110],_mm_xor_si128(c2[98],_mm_xor_si128(c2[122],_mm_xor_si128(c2[148],_mm_xor_si128(c2[136],_mm_xor_si128(c2[124],_mm_xor_si128(c2[126],_mm_xor_si128(c2[78],_mm_xor_si128(c2[150],_mm_xor_si128(c2[164],_mm_xor_si128(c2[56],_mm_xor_si128(c2[188],_mm_xor_si128(c2[176],_mm_xor_si128(c2[166],_mm_xor_si128(c2[118],_mm_xor_si128(c2[106],_mm_xor_si128(c2[168],_mm_xor_si128(c2[36],_mm_xor_si128(c2[36],_mm_xor_si128(c2[24],_mm_xor_si128(c2[158],_mm_xor_si128(c2[98],_mm_xor_si128(c2[86],_mm_xor_si128(c2[16],_mm_xor_si128(c2[16],_mm_xor_si128(c2[52],_mm_xor_si128(c2[40],_mm_xor_si128(c2[174],_mm_xor_si128(c2[162],_mm_xor_si128(c2[30],c2[18])))))))))))))))))))))))))))))))))));
+     d2[31]=simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[62],simde_mm_xor_si128(c2[26],simde_mm_xor_si128(c2[110],simde_mm_xor_si128(c2[98],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[148],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[124],simde_mm_xor_si128(c2[126],simde_mm_xor_si128(c2[78],simde_mm_xor_si128(c2[150],simde_mm_xor_si128(c2[164],simde_mm_xor_si128(c2[56],simde_mm_xor_si128(c2[188],simde_mm_xor_si128(c2[176],simde_mm_xor_si128(c2[166],simde_mm_xor_si128(c2[118],simde_mm_xor_si128(c2[106],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[158],simde_mm_xor_si128(c2[98],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[52],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[174],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[30],c2[18])))))))))))))))))))))))))))))))))));
 
 //row: 32
-     d2[32]=_mm_xor_si128(c2[180],_mm_xor_si128(c2[168],_mm_xor_si128(c2[156],_mm_xor_si128(c2[144],_mm_xor_si128(c2[72],_mm_xor_si128(c2[24],_mm_xor_si128(c2[134],_mm_xor_si128(c2[122],_mm_xor_si128(c2[86],_mm_xor_si128(c2[158],_mm_xor_si128(c2[28],_mm_xor_si128(c2[16],_mm_xor_si128(c2[184],_mm_xor_si128(c2[6],_mm_xor_si128(c2[186],_mm_xor_si128(c2[150],_mm_xor_si128(c2[138],_mm_xor_si128(c2[18],_mm_xor_si128(c2[44],_mm_xor_si128(c2[32],_mm_xor_si128(c2[116],_mm_xor_si128(c2[44],_mm_xor_si128(c2[46],_mm_xor_si128(c2[34],_mm_xor_si128(c2[166],_mm_xor_si128(c2[154],_mm_xor_si128(c2[48],_mm_xor_si128(c2[36],_mm_xor_si128(c2[108],_mm_xor_si128(c2[96],_mm_xor_si128(c2[84],_mm_xor_si128(c2[38],_mm_xor_si128(c2[26],_mm_xor_si128(c2[146],_mm_xor_si128(c2[88],_mm_xor_si128(c2[76],_mm_xor_si128(c2[76],_mm_xor_si128(c2[100],_mm_xor_si128(c2[54],_mm_xor_si128(c2[42],_mm_xor_si128(c2[42],_mm_xor_si128(c2[30],c2[78]))))))))))))))))))))))))))))))))))))))))));
+     d2[32]=simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[134],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[158],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[6],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[150],simde_mm_xor_si128(c2[138],simde_mm_xor_si128(c2[18],simde_mm_xor_si128(c2[44],simde_mm_xor_si128(c2[32],simde_mm_xor_si128(c2[116],simde_mm_xor_si128(c2[44],simde_mm_xor_si128(c2[46],simde_mm_xor_si128(c2[34],simde_mm_xor_si128(c2[166],simde_mm_xor_si128(c2[154],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[38],simde_mm_xor_si128(c2[26],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[76],simde_mm_xor_si128(c2[76],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[30],c2[78]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 33
-     d2[33]=_mm_xor_si128(c2[36],_mm_xor_si128(c2[12],_mm_xor_si128(c2[132],_mm_xor_si128(c2[182],_mm_xor_si128(c2[146],_mm_xor_si128(c2[26],_mm_xor_si128(c2[76],_mm_xor_si128(c2[52],_mm_xor_si128(c2[52],_mm_xor_si128(c2[54],_mm_xor_si128(c2[6],_mm_xor_si128(c2[78],_mm_xor_si128(c2[92],_mm_xor_si128(c2[176],_mm_xor_si128(c2[104],_mm_xor_si128(c2[94],_mm_xor_si128(c2[34],_mm_xor_si128(c2[96],_mm_xor_si128(c2[156],_mm_xor_si128(c2[144],_mm_xor_si128(c2[86],_mm_xor_si128(c2[14],_mm_xor_si128(c2[62],_mm_xor_si128(c2[136],_mm_xor_si128(c2[136],_mm_xor_si128(c2[160],_mm_xor_si128(c2[102],_mm_xor_si128(c2[90],c2[138]))))))))))))))))))))))))))));
+     d2[33]=simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[182],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[26],simde_mm_xor_si128(c2[76],simde_mm_xor_si128(c2[52],simde_mm_xor_si128(c2[52],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[6],simde_mm_xor_si128(c2[78],simde_mm_xor_si128(c2[92],simde_mm_xor_si128(c2[176],simde_mm_xor_si128(c2[104],simde_mm_xor_si128(c2[94],simde_mm_xor_si128(c2[34],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[62],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[102],simde_mm_xor_si128(c2[90],c2[138]))))))))))))))))))))))))))));
 
 //row: 34
-     d2[34]=_mm_xor_si128(c2[168],_mm_xor_si128(c2[156],_mm_xor_si128(c2[144],_mm_xor_si128(c2[144],_mm_xor_si128(c2[132],_mm_xor_si128(c2[120],_mm_xor_si128(c2[60],_mm_xor_si128(c2[48],_mm_xor_si128(c2[36],_mm_xor_si128(c2[122],_mm_xor_si128(c2[110],_mm_xor_si128(c2[98],_mm_xor_si128(c2[74],_mm_xor_si128(c2[62],_mm_xor_si128(c2[146],_mm_xor_si128(c2[146],_mm_xor_si128(c2[134],_mm_xor_si128(c2[16],_mm_xor_si128(c2[4],_mm_xor_si128(c2[184],_mm_xor_si128(c2[172],_mm_xor_si128(c2[172],_mm_xor_si128(c2[160],_mm_xor_si128(c2[186],_mm_xor_si128(c2[174],_mm_xor_si128(c2[162],_mm_xor_si128(c2[138],_mm_xor_si128(c2[126],_mm_xor_si128(c2[114],_mm_xor_si128(c2[6],_mm_xor_si128(c2[186],_mm_xor_si128(c2[32],_mm_xor_si128(c2[20],_mm_xor_si128(c2[8],_mm_xor_si128(c2[104],_mm_xor_si128(c2[92],_mm_xor_si128(c2[32],_mm_xor_si128(c2[32],_mm_xor_si128(c2[20],_mm_xor_si128(c2[34],_mm_xor_si128(c2[22],_mm_xor_si128(c2[10],_mm_xor_si128(c2[154],_mm_xor_si128(c2[154],_mm_xor_si128(c2[142],_mm_xor_si128(c2[36],_mm_xor_si128(c2[24],_mm_xor_si128(c2[12],_mm_xor_si128(c2[96],_mm_xor_si128(c2[84],_mm_xor_si128(c2[72],_mm_xor_si128(c2[72],_mm_xor_si128(c2[72],_mm_xor_si128(c2[60],_mm_xor_si128(c2[26],_mm_xor_si128(c2[14],_mm_xor_si128(c2[194],_mm_xor_si128(c2[134],_mm_xor_si128(c2[134],_mm_xor_si128(c2[122],_mm_xor_si128(c2[76],_mm_xor_si128(c2[64],_mm_xor_si128(c2[52],_mm_xor_si128(c2[64],_mm_xor_si128(c2[52],_mm_xor_si128(c2[88],_mm_xor_si128(c2[88],_mm_xor_si128(c2[76],_mm_xor_si128(c2[42],_mm_xor_si128(c2[30],_mm_xor_si128(c2[18],_mm_xor_si128(c2[30],_mm_xor_si128(c2[18],_mm_xor_si128(c2[198],_mm_xor_si128(c2[66],_mm_xor_si128(c2[66],c2[54]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[34]=simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[120],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[110],simde_mm_xor_si128(c2[98],simde_mm_xor_si128(c2[74],simde_mm_xor_si128(c2[62],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[134],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[4],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[172],simde_mm_xor_si128(c2[172],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[174],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[138],simde_mm_xor_si128(c2[126],simde_mm_xor_si128(c2[114],simde_mm_xor_si128(c2[6],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[32],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[104],simde_mm_xor_si128(c2[92],simde_mm_xor_si128(c2[32],simde_mm_xor_si128(c2[32],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[34],simde_mm_xor_si128(c2[22],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[154],simde_mm_xor_si128(c2[154],simde_mm_xor_si128(c2[142],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[26],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[194],simde_mm_xor_si128(c2[134],simde_mm_xor_si128(c2[134],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[76],simde_mm_xor_si128(c2[64],simde_mm_xor_si128(c2[52],simde_mm_xor_si128(c2[64],simde_mm_xor_si128(c2[52],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[76],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[30],simde_mm_xor_si128(c2[18],simde_mm_xor_si128(c2[30],simde_mm_xor_si128(c2[18],simde_mm_xor_si128(c2[198],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[66],c2[54]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 35
-     d2[35]=_mm_xor_si128(c2[96],_mm_xor_si128(c2[84],_mm_xor_si128(c2[60],_mm_xor_si128(c2[180],_mm_xor_si128(c2[50],_mm_xor_si128(c2[38],_mm_xor_si128(c2[2],_mm_xor_si128(c2[74],_mm_xor_si128(c2[110],_mm_xor_si128(c2[136],_mm_xor_si128(c2[124],_mm_xor_si128(c2[100],_mm_xor_si128(c2[114],_mm_xor_si128(c2[102],_mm_xor_si128(c2[54],_mm_xor_si128(c2[126],_mm_xor_si128(c2[140],_mm_xor_si128(c2[32],_mm_xor_si128(c2[152],_mm_xor_si128(c2[142],_mm_xor_si128(c2[82],_mm_xor_si128(c2[106],_mm_xor_si128(c2[156],_mm_xor_si128(c2[144],_mm_xor_si128(c2[12],_mm_xor_si128(c2[192],_mm_xor_si128(c2[134],_mm_xor_si128(c2[62],_mm_xor_si128(c2[184],_mm_xor_si128(c2[184],_mm_xor_si128(c2[16],_mm_xor_si128(c2[162],_mm_xor_si128(c2[150],_mm_xor_si128(c2[138],c2[186]))))))))))))))))))))))))))))))))));
+     d2[35]=simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[50],simde_mm_xor_si128(c2[38],simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[74],simde_mm_xor_si128(c2[110],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[124],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[114],simde_mm_xor_si128(c2[102],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[126],simde_mm_xor_si128(c2[140],simde_mm_xor_si128(c2[32],simde_mm_xor_si128(c2[152],simde_mm_xor_si128(c2[142],simde_mm_xor_si128(c2[82],simde_mm_xor_si128(c2[106],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[192],simde_mm_xor_si128(c2[134],simde_mm_xor_si128(c2[62],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[150],simde_mm_xor_si128(c2[138],c2[186]))))))))))))))))))))))))))))))))));
 
 //row: 36
-     d2[36]=_mm_xor_si128(c2[144],_mm_xor_si128(c2[76],c2[134]));
+     d2[36]=simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[76],c2[134]));
 
 //row: 37
-     d2[37]=_mm_xor_si128(c2[36],_mm_xor_si128(c2[180],_mm_xor_si128(c2[12],_mm_xor_si128(c2[156],_mm_xor_si128(c2[132],_mm_xor_si128(c2[84],_mm_xor_si128(c2[182],_mm_xor_si128(c2[134],_mm_xor_si128(c2[146],_mm_xor_si128(c2[98],_mm_xor_si128(c2[182],_mm_xor_si128(c2[26],_mm_xor_si128(c2[170],_mm_xor_si128(c2[76],_mm_xor_si128(c2[28],_mm_xor_si128(c2[16],_mm_xor_si128(c2[52],_mm_xor_si128(c2[4],_mm_xor_si128(c2[54],_mm_xor_si128(c2[6],_mm_xor_si128(c2[6],_mm_xor_si128(c2[150],_mm_xor_si128(c2[78],_mm_xor_si128(c2[30],_mm_xor_si128(c2[92],_mm_xor_si128(c2[44],_mm_xor_si128(c2[176],_mm_xor_si128(c2[128],_mm_xor_si128(c2[68],_mm_xor_si128(c2[104],_mm_xor_si128(c2[56],_mm_xor_si128(c2[94],_mm_xor_si128(c2[46],_mm_xor_si128(c2[190],_mm_xor_si128(c2[34],_mm_xor_si128(c2[178],_mm_xor_si128(c2[96],_mm_xor_si128(c2[48],_mm_xor_si128(c2[156],_mm_xor_si128(c2[108],_mm_xor_si128(c2[108],_mm_xor_si128(c2[144],_mm_xor_si128(c2[96],_mm_xor_si128(c2[86],_mm_xor_si128(c2[38],_mm_xor_si128(c2[170],_mm_xor_si128(c2[14],_mm_xor_si128(c2[158],_mm_xor_si128(c2[136],_mm_xor_si128(c2[88],_mm_xor_si128(c2[136],_mm_xor_si128(c2[88],_mm_xor_si128(c2[124],_mm_xor_si128(c2[160],_mm_xor_si128(c2[112],_mm_xor_si128(c2[102],_mm_xor_si128(c2[54],_mm_xor_si128(c2[90],_mm_xor_si128(c2[42],_mm_xor_si128(c2[102],_mm_xor_si128(c2[138],c2[90])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[37]=simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[182],simde_mm_xor_si128(c2[134],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[98],simde_mm_xor_si128(c2[182],simde_mm_xor_si128(c2[26],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[76],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[52],simde_mm_xor_si128(c2[4],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[6],simde_mm_xor_si128(c2[6],simde_mm_xor_si128(c2[150],simde_mm_xor_si128(c2[78],simde_mm_xor_si128(c2[30],simde_mm_xor_si128(c2[92],simde_mm_xor_si128(c2[44],simde_mm_xor_si128(c2[176],simde_mm_xor_si128(c2[128],simde_mm_xor_si128(c2[68],simde_mm_xor_si128(c2[104],simde_mm_xor_si128(c2[56],simde_mm_xor_si128(c2[94],simde_mm_xor_si128(c2[46],simde_mm_xor_si128(c2[190],simde_mm_xor_si128(c2[34],simde_mm_xor_si128(c2[178],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[38],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[158],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[124],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[102],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[90],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[102],simde_mm_xor_si128(c2[138],c2[90])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 38
-     d2[38]=_mm_xor_si128(c2[180],_mm_xor_si128(c2[168],_mm_xor_si128(c2[144],_mm_xor_si128(c2[72],_mm_xor_si128(c2[134],_mm_xor_si128(c2[122],_mm_xor_si128(c2[86],_mm_xor_si128(c2[158],_mm_xor_si128(c2[182],_mm_xor_si128(c2[28],_mm_xor_si128(c2[16],_mm_xor_si128(c2[184],_mm_xor_si128(c2[6],_mm_xor_si128(c2[186],_mm_xor_si128(c2[138],_mm_xor_si128(c2[18],_mm_xor_si128(c2[32],_mm_xor_si128(c2[116],_mm_xor_si128(c2[44],_mm_xor_si128(c2[34],_mm_xor_si128(c2[166],_mm_xor_si128(c2[34],_mm_xor_si128(c2[48],_mm_xor_si128(c2[36],_mm_xor_si128(c2[96],_mm_xor_si128(c2[84],_mm_xor_si128(c2[26],_mm_xor_si128(c2[146],_mm_xor_si128(c2[76],_mm_xor_si128(c2[76],_mm_xor_si128(c2[100],_mm_xor_si128(c2[54],_mm_xor_si128(c2[42],_mm_xor_si128(c2[30],c2[78]))))))))))))))))))))))))))))))))));
+     d2[38]=simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[134],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[158],simde_mm_xor_si128(c2[182],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[6],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[138],simde_mm_xor_si128(c2[18],simde_mm_xor_si128(c2[32],simde_mm_xor_si128(c2[116],simde_mm_xor_si128(c2[44],simde_mm_xor_si128(c2[34],simde_mm_xor_si128(c2[166],simde_mm_xor_si128(c2[34],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[26],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[76],simde_mm_xor_si128(c2[76],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[30],c2[78]))))))))))))))))))))))))))))))))));
 
 //row: 39
-     d2[39]=_mm_xor_si128(c2[132],_mm_xor_si128(c2[120],_mm_xor_si128(c2[108],_mm_xor_si128(c2[96],_mm_xor_si128(c2[24],_mm_xor_si128(c2[180],_mm_xor_si128(c2[86],_mm_xor_si128(c2[74],_mm_xor_si128(c2[38],_mm_xor_si128(c2[110],_mm_xor_si128(c2[172],_mm_xor_si128(c2[160],_mm_xor_si128(c2[136],_mm_xor_si128(c2[150],_mm_xor_si128(c2[138],_mm_xor_si128(c2[102],_mm_xor_si128(c2[90],_mm_xor_si128(c2[162],_mm_xor_si128(c2[188],_mm_xor_si128(c2[176],_mm_xor_si128(c2[68],_mm_xor_si128(c2[188],_mm_xor_si128(c2[190],_mm_xor_si128(c2[178],_mm_xor_si128(c2[118],_mm_xor_si128(c2[192],_mm_xor_si128(c2[180],_mm_xor_si128(c2[60],_mm_xor_si128(c2[48],_mm_xor_si128(c2[36],_mm_xor_si128(c2[182],_mm_xor_si128(c2[170],_mm_xor_si128(c2[98],_mm_xor_si128(c2[158],_mm_xor_si128(c2[40],_mm_xor_si128(c2[28],_mm_xor_si128(c2[28],_mm_xor_si128(c2[52],_mm_xor_si128(c2[198],_mm_xor_si128(c2[186],_mm_xor_si128(c2[186],_mm_xor_si128(c2[174],c2[30]))))))))))))))))))))))))))))))))))))))))));
+     d2[39]=simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[120],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[74],simde_mm_xor_si128(c2[38],simde_mm_xor_si128(c2[110],simde_mm_xor_si128(c2[172],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[150],simde_mm_xor_si128(c2[138],simde_mm_xor_si128(c2[102],simde_mm_xor_si128(c2[90],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[188],simde_mm_xor_si128(c2[176],simde_mm_xor_si128(c2[68],simde_mm_xor_si128(c2[188],simde_mm_xor_si128(c2[190],simde_mm_xor_si128(c2[178],simde_mm_xor_si128(c2[118],simde_mm_xor_si128(c2[192],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[182],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[98],simde_mm_xor_si128(c2[158],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[52],simde_mm_xor_si128(c2[198],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[174],c2[30]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 40
-     d2[40]=_mm_xor_si128(c2[36],_mm_xor_si128(c2[0],_mm_xor_si128(c2[12],_mm_xor_si128(c2[168],_mm_xor_si128(c2[132],_mm_xor_si128(c2[96],_mm_xor_si128(c2[182],_mm_xor_si128(c2[146],_mm_xor_si128(c2[146],_mm_xor_si128(c2[110],_mm_xor_si128(c2[2],_mm_xor_si128(c2[26],_mm_xor_si128(c2[182],_mm_xor_si128(c2[76],_mm_xor_si128(c2[40],_mm_xor_si128(c2[28],_mm_xor_si128(c2[52],_mm_xor_si128(c2[16],_mm_xor_si128(c2[4],_mm_xor_si128(c2[54],_mm_xor_si128(c2[18],_mm_xor_si128(c2[6],_mm_xor_si128(c2[162],_mm_xor_si128(c2[78],_mm_xor_si128(c2[42],_mm_xor_si128(c2[92],_mm_xor_si128(c2[56],_mm_xor_si128(c2[176],_mm_xor_si128(c2[140],_mm_xor_si128(c2[80],_mm_xor_si128(c2[104],_mm_xor_si128(c2[68],_mm_xor_si128(c2[94],_mm_xor_si128(c2[58],_mm_xor_si128(c2[10],_mm_xor_si128(c2[34],_mm_xor_si128(c2[190],_mm_xor_si128(c2[96],_mm_xor_si128(c2[60],_mm_xor_si128(c2[156],_mm_xor_si128(c2[120],_mm_xor_si128(c2[120],_mm_xor_si128(c2[144],_mm_xor_si128(c2[108],_mm_xor_si128(c2[86],_mm_xor_si128(c2[50],_mm_xor_si128(c2[182],_mm_xor_si128(c2[14],_mm_xor_si128(c2[170],_mm_xor_si128(c2[136],_mm_xor_si128(c2[100],_mm_xor_si128(c2[136],_mm_xor_si128(c2[100],_mm_xor_si128(c2[136],_mm_xor_si128(c2[160],_mm_xor_si128(c2[124],_mm_xor_si128(c2[102],_mm_xor_si128(c2[66],_mm_xor_si128(c2[90],_mm_xor_si128(c2[54],_mm_xor_si128(c2[114],_mm_xor_si128(c2[138],c2[102]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[40]=simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[182],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[110],simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[26],simde_mm_xor_si128(c2[182],simde_mm_xor_si128(c2[76],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[52],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[4],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[18],simde_mm_xor_si128(c2[6],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[78],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[92],simde_mm_xor_si128(c2[56],simde_mm_xor_si128(c2[176],simde_mm_xor_si128(c2[140],simde_mm_xor_si128(c2[80],simde_mm_xor_si128(c2[104],simde_mm_xor_si128(c2[68],simde_mm_xor_si128(c2[94],simde_mm_xor_si128(c2[58],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[34],simde_mm_xor_si128(c2[190],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[120],simde_mm_xor_si128(c2[120],simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[50],simde_mm_xor_si128(c2[182],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[124],simde_mm_xor_si128(c2[102],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[90],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[114],simde_mm_xor_si128(c2[138],c2[102]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 41
-     d2[41]=_mm_xor_si128(c2[180],_mm_xor_si128(c2[168],_mm_xor_si128(c2[144],_mm_xor_si128(c2[72],_mm_xor_si128(c2[134],_mm_xor_si128(c2[122],_mm_xor_si128(c2[86],_mm_xor_si128(c2[158],_mm_xor_si128(c2[14],_mm_xor_si128(c2[28],_mm_xor_si128(c2[16],_mm_xor_si128(c2[184],_mm_xor_si128(c2[6],_mm_xor_si128(c2[186],_mm_xor_si128(c2[138],_mm_xor_si128(c2[18],_mm_xor_si128(c2[32],_mm_xor_si128(c2[116],_mm_xor_si128(c2[44],_mm_xor_si128(c2[34],_mm_xor_si128(c2[166],_mm_xor_si128(c2[70],_mm_xor_si128(c2[48],_mm_xor_si128(c2[36],_mm_xor_si128(c2[96],_mm_xor_si128(c2[84],_mm_xor_si128(c2[26],_mm_xor_si128(c2[146],_mm_xor_si128(c2[76],_mm_xor_si128(c2[76],_mm_xor_si128(c2[100],_mm_xor_si128(c2[54],_mm_xor_si128(c2[42],_mm_xor_si128(c2[30],c2[78]))))))))))))))))))))))))))))))))));
+     d2[41]=simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[134],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[158],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[6],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[138],simde_mm_xor_si128(c2[18],simde_mm_xor_si128(c2[32],simde_mm_xor_si128(c2[116],simde_mm_xor_si128(c2[44],simde_mm_xor_si128(c2[34],simde_mm_xor_si128(c2[166],simde_mm_xor_si128(c2[70],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[26],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[76],simde_mm_xor_si128(c2[76],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[30],c2[78]))))))))))))))))))))))))))))))))));
   }
 }
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc176_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc176_byte.c
index 090a394d60dc1e414166cd66b52127f0bb2ee00d..b7837aa6dc48ee82df9fa7b9d3caa5b01d2cf150 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc176_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc176_byte.c
@@ -1,9 +1,9 @@
 #include "PHY/sse_intrin.h"
 // generated code for Zc=176, byte encoding
 static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG2_Zc176_byte(uint8_t *c,uint8_t *d) {
-  __m128i *csimd=(__m128i *)c,*dsimd=(__m128i *)d;
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
 
-  __m128i *c2,*d2;
+  simde__m128i *c2,*d2;
 
   int i2;
   for (i2=0; i2<11; i2++) {
@@ -11,129 +11,129 @@ static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG
      d2=&dsimd[i2];
 
 //row: 0
-     d2[0]=_mm_xor_si128(c2[2429],_mm_xor_si128(c2[1],_mm_xor_si128(c2[226],_mm_xor_si128(c2[3110],_mm_xor_si128(c2[1572],_mm_xor_si128(c2[905],_mm_xor_si128(c2[2904],_mm_xor_si128(c2[2914],_mm_xor_si128(c2[506],_mm_xor_si128(c2[70],_mm_xor_si128(c2[2272],_mm_xor_si128(c2[3171],_mm_xor_si128(c2[313],_mm_xor_si128(c2[2958],_mm_xor_si128(c2[110],_mm_xor_si128(c2[3197],_mm_xor_si128(c2[1674],_mm_xor_si128(c2[1455],_mm_xor_si128(c2[133],_mm_xor_si128(c2[1036],_mm_xor_si128(c2[1919],_mm_xor_si128(c2[2386],_mm_xor_si128(c2[3044],_mm_xor_si128(c2[177],_mm_xor_si128(c2[2405],_mm_xor_si128(c2[1086],c2[1964]))))))))))))))))))))))))));
+     d2[0]=simde_mm_xor_si128(c2[2429],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[226],simde_mm_xor_si128(c2[3110],simde_mm_xor_si128(c2[1572],simde_mm_xor_si128(c2[905],simde_mm_xor_si128(c2[2904],simde_mm_xor_si128(c2[2914],simde_mm_xor_si128(c2[506],simde_mm_xor_si128(c2[70],simde_mm_xor_si128(c2[2272],simde_mm_xor_si128(c2[3171],simde_mm_xor_si128(c2[313],simde_mm_xor_si128(c2[2958],simde_mm_xor_si128(c2[110],simde_mm_xor_si128(c2[3197],simde_mm_xor_si128(c2[1674],simde_mm_xor_si128(c2[1455],simde_mm_xor_si128(c2[133],simde_mm_xor_si128(c2[1036],simde_mm_xor_si128(c2[1919],simde_mm_xor_si128(c2[2386],simde_mm_xor_si128(c2[3044],simde_mm_xor_si128(c2[177],simde_mm_xor_si128(c2[2405],simde_mm_xor_si128(c2[1086],c2[1964]))))))))))))))))))))))))));
 
 //row: 1
-     d2[11]=_mm_xor_si128(c2[2649],_mm_xor_si128(c2[2429],_mm_xor_si128(c2[1],_mm_xor_si128(c2[226],_mm_xor_si128(c2[3330],_mm_xor_si128(c2[3110],_mm_xor_si128(c2[1572],_mm_xor_si128(c2[905],_mm_xor_si128(c2[3124],_mm_xor_si128(c2[2904],_mm_xor_si128(c2[2914],_mm_xor_si128(c2[726],_mm_xor_si128(c2[506],_mm_xor_si128(c2[70],_mm_xor_si128(c2[2272],_mm_xor_si128(c2[3171],_mm_xor_si128(c2[313],_mm_xor_si128(c2[2958],_mm_xor_si128(c2[110],_mm_xor_si128(c2[3197],_mm_xor_si128(c2[1894],_mm_xor_si128(c2[1674],_mm_xor_si128(c2[1455],_mm_xor_si128(c2[133],_mm_xor_si128(c2[1036],_mm_xor_si128(c2[1919],_mm_xor_si128(c2[2386],_mm_xor_si128(c2[3044],_mm_xor_si128(c2[177],_mm_xor_si128(c2[2625],_mm_xor_si128(c2[2405],_mm_xor_si128(c2[1086],c2[1964]))))))))))))))))))))))))))))))));
+     d2[11]=simde_mm_xor_si128(c2[2649],simde_mm_xor_si128(c2[2429],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[226],simde_mm_xor_si128(c2[3330],simde_mm_xor_si128(c2[3110],simde_mm_xor_si128(c2[1572],simde_mm_xor_si128(c2[905],simde_mm_xor_si128(c2[3124],simde_mm_xor_si128(c2[2904],simde_mm_xor_si128(c2[2914],simde_mm_xor_si128(c2[726],simde_mm_xor_si128(c2[506],simde_mm_xor_si128(c2[70],simde_mm_xor_si128(c2[2272],simde_mm_xor_si128(c2[3171],simde_mm_xor_si128(c2[313],simde_mm_xor_si128(c2[2958],simde_mm_xor_si128(c2[110],simde_mm_xor_si128(c2[3197],simde_mm_xor_si128(c2[1894],simde_mm_xor_si128(c2[1674],simde_mm_xor_si128(c2[1455],simde_mm_xor_si128(c2[133],simde_mm_xor_si128(c2[1036],simde_mm_xor_si128(c2[1919],simde_mm_xor_si128(c2[2386],simde_mm_xor_si128(c2[3044],simde_mm_xor_si128(c2[177],simde_mm_xor_si128(c2[2625],simde_mm_xor_si128(c2[2405],simde_mm_xor_si128(c2[1086],c2[1964]))))))))))))))))))))))))))))))));
 
 //row: 2
-     d2[22]=_mm_xor_si128(c2[2649],_mm_xor_si128(c2[2429],_mm_xor_si128(c2[221],_mm_xor_si128(c2[1],_mm_xor_si128(c2[226],_mm_xor_si128(c2[3330],_mm_xor_si128(c2[3110],_mm_xor_si128(c2[1572],_mm_xor_si128(c2[905],_mm_xor_si128(c2[3124],_mm_xor_si128(c2[2904],_mm_xor_si128(c2[2914],_mm_xor_si128(c2[726],_mm_xor_si128(c2[506],_mm_xor_si128(c2[290],_mm_xor_si128(c2[70],_mm_xor_si128(c2[2272],_mm_xor_si128(c2[3391],_mm_xor_si128(c2[3171],_mm_xor_si128(c2[313],_mm_xor_si128(c2[2958],_mm_xor_si128(c2[330],_mm_xor_si128(c2[110],_mm_xor_si128(c2[3197],_mm_xor_si128(c2[1894],_mm_xor_si128(c2[1674],_mm_xor_si128(c2[1675],_mm_xor_si128(c2[1455],_mm_xor_si128(c2[133],_mm_xor_si128(c2[1256],_mm_xor_si128(c2[1036],_mm_xor_si128(c2[1919],_mm_xor_si128(c2[2606],_mm_xor_si128(c2[2386],_mm_xor_si128(c2[3044],_mm_xor_si128(c2[177],_mm_xor_si128(c2[2625],_mm_xor_si128(c2[2405],_mm_xor_si128(c2[1306],_mm_xor_si128(c2[1086],c2[1964]))))))))))))))))))))))))))))))))))))))));
+     d2[22]=simde_mm_xor_si128(c2[2649],simde_mm_xor_si128(c2[2429],simde_mm_xor_si128(c2[221],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[226],simde_mm_xor_si128(c2[3330],simde_mm_xor_si128(c2[3110],simde_mm_xor_si128(c2[1572],simde_mm_xor_si128(c2[905],simde_mm_xor_si128(c2[3124],simde_mm_xor_si128(c2[2904],simde_mm_xor_si128(c2[2914],simde_mm_xor_si128(c2[726],simde_mm_xor_si128(c2[506],simde_mm_xor_si128(c2[290],simde_mm_xor_si128(c2[70],simde_mm_xor_si128(c2[2272],simde_mm_xor_si128(c2[3391],simde_mm_xor_si128(c2[3171],simde_mm_xor_si128(c2[313],simde_mm_xor_si128(c2[2958],simde_mm_xor_si128(c2[330],simde_mm_xor_si128(c2[110],simde_mm_xor_si128(c2[3197],simde_mm_xor_si128(c2[1894],simde_mm_xor_si128(c2[1674],simde_mm_xor_si128(c2[1675],simde_mm_xor_si128(c2[1455],simde_mm_xor_si128(c2[133],simde_mm_xor_si128(c2[1256],simde_mm_xor_si128(c2[1036],simde_mm_xor_si128(c2[1919],simde_mm_xor_si128(c2[2606],simde_mm_xor_si128(c2[2386],simde_mm_xor_si128(c2[3044],simde_mm_xor_si128(c2[177],simde_mm_xor_si128(c2[2625],simde_mm_xor_si128(c2[2405],simde_mm_xor_si128(c2[1306],simde_mm_xor_si128(c2[1086],c2[1964]))))))))))))))))))))))))))))))))))))))));
 
 //row: 3
-     d2[33]=_mm_xor_si128(c2[2429],_mm_xor_si128(c2[1],_mm_xor_si128(c2[226],_mm_xor_si128(c2[3110],_mm_xor_si128(c2[1572],_mm_xor_si128(c2[1125],_mm_xor_si128(c2[905],_mm_xor_si128(c2[2904],_mm_xor_si128(c2[3134],_mm_xor_si128(c2[2914],_mm_xor_si128(c2[506],_mm_xor_si128(c2[70],_mm_xor_si128(c2[2272],_mm_xor_si128(c2[3171],_mm_xor_si128(c2[313],_mm_xor_si128(c2[3178],_mm_xor_si128(c2[2958],_mm_xor_si128(c2[110],_mm_xor_si128(c2[3417],_mm_xor_si128(c2[3197],_mm_xor_si128(c2[1674],_mm_xor_si128(c2[1455],_mm_xor_si128(c2[353],_mm_xor_si128(c2[133],_mm_xor_si128(c2[1036],_mm_xor_si128(c2[2139],_mm_xor_si128(c2[1919],_mm_xor_si128(c2[2386],_mm_xor_si128(c2[3044],_mm_xor_si128(c2[397],_mm_xor_si128(c2[177],_mm_xor_si128(c2[2405],_mm_xor_si128(c2[1086],_mm_xor_si128(c2[2184],c2[1964]))))))))))))))))))))))))))))))))));
+     d2[33]=simde_mm_xor_si128(c2[2429],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[226],simde_mm_xor_si128(c2[3110],simde_mm_xor_si128(c2[1572],simde_mm_xor_si128(c2[1125],simde_mm_xor_si128(c2[905],simde_mm_xor_si128(c2[2904],simde_mm_xor_si128(c2[3134],simde_mm_xor_si128(c2[2914],simde_mm_xor_si128(c2[506],simde_mm_xor_si128(c2[70],simde_mm_xor_si128(c2[2272],simde_mm_xor_si128(c2[3171],simde_mm_xor_si128(c2[313],simde_mm_xor_si128(c2[3178],simde_mm_xor_si128(c2[2958],simde_mm_xor_si128(c2[110],simde_mm_xor_si128(c2[3417],simde_mm_xor_si128(c2[3197],simde_mm_xor_si128(c2[1674],simde_mm_xor_si128(c2[1455],simde_mm_xor_si128(c2[353],simde_mm_xor_si128(c2[133],simde_mm_xor_si128(c2[1036],simde_mm_xor_si128(c2[2139],simde_mm_xor_si128(c2[1919],simde_mm_xor_si128(c2[2386],simde_mm_xor_si128(c2[3044],simde_mm_xor_si128(c2[397],simde_mm_xor_si128(c2[177],simde_mm_xor_si128(c2[2405],simde_mm_xor_si128(c2[1086],simde_mm_xor_si128(c2[2184],c2[1964]))))))))))))))))))))))))))))))))));
 
 //row: 4
-     d2[44]=_mm_xor_si128(c2[3303],_mm_xor_si128(c2[3083],_mm_xor_si128(c2[666],_mm_xor_si128(c2[880],_mm_xor_si128(c2[1325],_mm_xor_si128(c2[465],_mm_xor_si128(c2[245],_mm_xor_si128(c2[2226],_mm_xor_si128(c2[1570],_mm_xor_si128(c2[686],_mm_xor_si128(c2[270],_mm_xor_si128(c2[50],_mm_xor_si128(c2[49],_mm_xor_si128(c2[1391],_mm_xor_si128(c2[1171],_mm_xor_si128(c2[735],_mm_xor_si128(c2[2926],_mm_xor_si128(c2[317],_mm_xor_si128(c2[978],_mm_xor_si128(c2[93],_mm_xor_si128(c2[775],_mm_xor_si128(c2[332],_mm_xor_si128(c2[2559],_mm_xor_si128(c2[2339],_mm_xor_si128(c2[2120],_mm_xor_si128(c2[798],_mm_xor_si128(c2[1701],_mm_xor_si128(c2[2584],_mm_xor_si128(c2[3040],_mm_xor_si128(c2[179],_mm_xor_si128(c2[842],_mm_xor_si128(c2[3279],_mm_xor_si128(c2[3059],_mm_xor_si128(c2[1740],c2[2618]))))))))))))))))))))))))))))))))));
+     d2[44]=simde_mm_xor_si128(c2[3303],simde_mm_xor_si128(c2[3083],simde_mm_xor_si128(c2[666],simde_mm_xor_si128(c2[880],simde_mm_xor_si128(c2[1325],simde_mm_xor_si128(c2[465],simde_mm_xor_si128(c2[245],simde_mm_xor_si128(c2[2226],simde_mm_xor_si128(c2[1570],simde_mm_xor_si128(c2[686],simde_mm_xor_si128(c2[270],simde_mm_xor_si128(c2[50],simde_mm_xor_si128(c2[49],simde_mm_xor_si128(c2[1391],simde_mm_xor_si128(c2[1171],simde_mm_xor_si128(c2[735],simde_mm_xor_si128(c2[2926],simde_mm_xor_si128(c2[317],simde_mm_xor_si128(c2[978],simde_mm_xor_si128(c2[93],simde_mm_xor_si128(c2[775],simde_mm_xor_si128(c2[332],simde_mm_xor_si128(c2[2559],simde_mm_xor_si128(c2[2339],simde_mm_xor_si128(c2[2120],simde_mm_xor_si128(c2[798],simde_mm_xor_si128(c2[1701],simde_mm_xor_si128(c2[2584],simde_mm_xor_si128(c2[3040],simde_mm_xor_si128(c2[179],simde_mm_xor_si128(c2[842],simde_mm_xor_si128(c2[3279],simde_mm_xor_si128(c2[3059],simde_mm_xor_si128(c2[1740],c2[2618]))))))))))))))))))))))))))))))))));
 
 //row: 5
-     d2[55]=_mm_xor_si128(c2[1762],_mm_xor_si128(c2[1542],_mm_xor_si128(c2[2644],_mm_xor_si128(c2[2869],_mm_xor_si128(c2[3304],_mm_xor_si128(c2[2443],_mm_xor_si128(c2[2223],_mm_xor_si128(c2[685],_mm_xor_si128(c2[29],_mm_xor_si128(c2[907],_mm_xor_si128(c2[2248],_mm_xor_si128(c2[2028],_mm_xor_si128(c2[2027],_mm_xor_si128(c2[3369],_mm_xor_si128(c2[3149],_mm_xor_si128(c2[2713],_mm_xor_si128(c2[1396],_mm_xor_si128(c2[2295],_mm_xor_si128(c2[2956],_mm_xor_si128(c2[2071],_mm_xor_si128(c2[2753],_mm_xor_si128(c2[2310],_mm_xor_si128(c2[772],_mm_xor_si128(c2[1018],_mm_xor_si128(c2[798],_mm_xor_si128(c2[579],_mm_xor_si128(c2[2776],_mm_xor_si128(c2[160],_mm_xor_si128(c2[1043],_mm_xor_si128(c2[1700],_mm_xor_si128(c2[1499],_mm_xor_si128(c2[2157],_mm_xor_si128(c2[2820],_mm_xor_si128(c2[1738],_mm_xor_si128(c2[1518],_mm_xor_si128(c2[199],c2[1088]))))))))))))))))))))))))))))))))))));
+     d2[55]=simde_mm_xor_si128(c2[1762],simde_mm_xor_si128(c2[1542],simde_mm_xor_si128(c2[2644],simde_mm_xor_si128(c2[2869],simde_mm_xor_si128(c2[3304],simde_mm_xor_si128(c2[2443],simde_mm_xor_si128(c2[2223],simde_mm_xor_si128(c2[685],simde_mm_xor_si128(c2[29],simde_mm_xor_si128(c2[907],simde_mm_xor_si128(c2[2248],simde_mm_xor_si128(c2[2028],simde_mm_xor_si128(c2[2027],simde_mm_xor_si128(c2[3369],simde_mm_xor_si128(c2[3149],simde_mm_xor_si128(c2[2713],simde_mm_xor_si128(c2[1396],simde_mm_xor_si128(c2[2295],simde_mm_xor_si128(c2[2956],simde_mm_xor_si128(c2[2071],simde_mm_xor_si128(c2[2753],simde_mm_xor_si128(c2[2310],simde_mm_xor_si128(c2[772],simde_mm_xor_si128(c2[1018],simde_mm_xor_si128(c2[798],simde_mm_xor_si128(c2[579],simde_mm_xor_si128(c2[2776],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[1043],simde_mm_xor_si128(c2[1700],simde_mm_xor_si128(c2[1499],simde_mm_xor_si128(c2[2157],simde_mm_xor_si128(c2[2820],simde_mm_xor_si128(c2[1738],simde_mm_xor_si128(c2[1518],simde_mm_xor_si128(c2[199],c2[1088]))))))))))))))))))))))))))))))))))));
 
 //row: 6
-     d2[66]=_mm_xor_si128(c2[1980],_mm_xor_si128(c2[1760],_mm_xor_si128(c2[2862],_mm_xor_si128(c2[3087],_mm_xor_si128(c2[3302],_mm_xor_si128(c2[2672],_mm_xor_si128(c2[2452],_mm_xor_si128(c2[903],_mm_xor_si128(c2[247],_mm_xor_si128(c2[2466],_mm_xor_si128(c2[2246],_mm_xor_si128(c2[2245],_mm_xor_si128(c2[68],_mm_xor_si128(c2[3367],_mm_xor_si128(c2[2931],_mm_xor_si128(c2[1614],_mm_xor_si128(c2[2513],_mm_xor_si128(c2[3174],_mm_xor_si128(c2[2289],_mm_xor_si128(c2[2971],_mm_xor_si128(c2[2539],_mm_xor_si128(c2[2319],_mm_xor_si128(c2[1236],_mm_xor_si128(c2[1016],_mm_xor_si128(c2[797],_mm_xor_si128(c2[2994],_mm_xor_si128(c2[378],_mm_xor_si128(c2[1261],_mm_xor_si128(c2[2354],_mm_xor_si128(c2[1717],_mm_xor_si128(c2[2386],_mm_xor_si128(c2[3038],_mm_xor_si128(c2[1967],_mm_xor_si128(c2[1747],_mm_xor_si128(c2[428],_mm_xor_si128(c2[1306],c2[2627]))))))))))))))))))))))))))))))))))));
+     d2[66]=simde_mm_xor_si128(c2[1980],simde_mm_xor_si128(c2[1760],simde_mm_xor_si128(c2[2862],simde_mm_xor_si128(c2[3087],simde_mm_xor_si128(c2[3302],simde_mm_xor_si128(c2[2672],simde_mm_xor_si128(c2[2452],simde_mm_xor_si128(c2[903],simde_mm_xor_si128(c2[247],simde_mm_xor_si128(c2[2466],simde_mm_xor_si128(c2[2246],simde_mm_xor_si128(c2[2245],simde_mm_xor_si128(c2[68],simde_mm_xor_si128(c2[3367],simde_mm_xor_si128(c2[2931],simde_mm_xor_si128(c2[1614],simde_mm_xor_si128(c2[2513],simde_mm_xor_si128(c2[3174],simde_mm_xor_si128(c2[2289],simde_mm_xor_si128(c2[2971],simde_mm_xor_si128(c2[2539],simde_mm_xor_si128(c2[2319],simde_mm_xor_si128(c2[1236],simde_mm_xor_si128(c2[1016],simde_mm_xor_si128(c2[797],simde_mm_xor_si128(c2[2994],simde_mm_xor_si128(c2[378],simde_mm_xor_si128(c2[1261],simde_mm_xor_si128(c2[2354],simde_mm_xor_si128(c2[1717],simde_mm_xor_si128(c2[2386],simde_mm_xor_si128(c2[3038],simde_mm_xor_si128(c2[1967],simde_mm_xor_si128(c2[1747],simde_mm_xor_si128(c2[428],simde_mm_xor_si128(c2[1306],c2[2627]))))))))))))))))))))))))))))))))))));
 
 //row: 7
-     d2[77]=_mm_xor_si128(c2[2421],_mm_xor_si128(c2[2201],_mm_xor_si128(c2[442],_mm_xor_si128(c2[3303],_mm_xor_si128(c2[1544],_mm_xor_si128(c2[9],_mm_xor_si128(c2[1769],_mm_xor_si128(c2[3102],_mm_xor_si128(c2[2882],_mm_xor_si128(c2[1123],_mm_xor_si128(c2[1344],_mm_xor_si128(c2[3104],_mm_xor_si128(c2[688],_mm_xor_si128(c2[2668],_mm_xor_si128(c2[2448],_mm_xor_si128(c2[25],_mm_xor_si128(c2[2907],_mm_xor_si128(c2[2687],_mm_xor_si128(c2[928],_mm_xor_si128(c2[2686],_mm_xor_si128(c2[1147],_mm_xor_si128(c2[927],_mm_xor_si128(c2[509],_mm_xor_si128(c2[289],_mm_xor_si128(c2[2049],_mm_xor_si128(c2[3372],_mm_xor_si128(c2[1613],_mm_xor_si128(c2[2055],_mm_xor_si128(c2[296],_mm_xor_si128(c2[2954],_mm_xor_si128(c2[1195],_mm_xor_si128(c2[96],_mm_xor_si128(c2[1856],_mm_xor_si128(c2[2730],_mm_xor_si128(c2[1191],_mm_xor_si128(c2[971],_mm_xor_si128(c2[3412],_mm_xor_si128(c2[1653],_mm_xor_si128(c2[2980],_mm_xor_si128(c2[1430],_mm_xor_si128(c2[1210],_mm_xor_si128(c2[2977],_mm_xor_si128(c2[1677],_mm_xor_si128(c2[1457],_mm_xor_si128(c2[3217],_mm_xor_si128(c2[1238],_mm_xor_si128(c2[2998],_mm_xor_si128(c2[3435],_mm_xor_si128(c2[1896],_mm_xor_si128(c2[1676],_mm_xor_si128(c2[819],_mm_xor_si128(c2[2579],_mm_xor_si128(c2[1702],_mm_xor_si128(c2[163],_mm_xor_si128(c2[3462],_mm_xor_si128(c2[1915],_mm_xor_si128(c2[2158],_mm_xor_si128(c2[399],_mm_xor_si128(c2[2816],_mm_xor_si128(c2[1057],_mm_xor_si128(c2[3479],_mm_xor_si128(c2[1940],_mm_xor_si128(c2[1720],_mm_xor_si128(c2[2408],_mm_xor_si128(c2[2188],_mm_xor_si128(c2[418],_mm_xor_si128(c2[858],_mm_xor_si128(c2[2618],_mm_xor_si128(c2[1747],_mm_xor_si128(c2[208],c2[3507]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[77]=simde_mm_xor_si128(c2[2421],simde_mm_xor_si128(c2[2201],simde_mm_xor_si128(c2[442],simde_mm_xor_si128(c2[3303],simde_mm_xor_si128(c2[1544],simde_mm_xor_si128(c2[9],simde_mm_xor_si128(c2[1769],simde_mm_xor_si128(c2[3102],simde_mm_xor_si128(c2[2882],simde_mm_xor_si128(c2[1123],simde_mm_xor_si128(c2[1344],simde_mm_xor_si128(c2[3104],simde_mm_xor_si128(c2[688],simde_mm_xor_si128(c2[2668],simde_mm_xor_si128(c2[2448],simde_mm_xor_si128(c2[25],simde_mm_xor_si128(c2[2907],simde_mm_xor_si128(c2[2687],simde_mm_xor_si128(c2[928],simde_mm_xor_si128(c2[2686],simde_mm_xor_si128(c2[1147],simde_mm_xor_si128(c2[927],simde_mm_xor_si128(c2[509],simde_mm_xor_si128(c2[289],simde_mm_xor_si128(c2[2049],simde_mm_xor_si128(c2[3372],simde_mm_xor_si128(c2[1613],simde_mm_xor_si128(c2[2055],simde_mm_xor_si128(c2[296],simde_mm_xor_si128(c2[2954],simde_mm_xor_si128(c2[1195],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[1856],simde_mm_xor_si128(c2[2730],simde_mm_xor_si128(c2[1191],simde_mm_xor_si128(c2[971],simde_mm_xor_si128(c2[3412],simde_mm_xor_si128(c2[1653],simde_mm_xor_si128(c2[2980],simde_mm_xor_si128(c2[1430],simde_mm_xor_si128(c2[1210],simde_mm_xor_si128(c2[2977],simde_mm_xor_si128(c2[1677],simde_mm_xor_si128(c2[1457],simde_mm_xor_si128(c2[3217],simde_mm_xor_si128(c2[1238],simde_mm_xor_si128(c2[2998],simde_mm_xor_si128(c2[3435],simde_mm_xor_si128(c2[1896],simde_mm_xor_si128(c2[1676],simde_mm_xor_si128(c2[819],simde_mm_xor_si128(c2[2579],simde_mm_xor_si128(c2[1702],simde_mm_xor_si128(c2[163],simde_mm_xor_si128(c2[3462],simde_mm_xor_si128(c2[1915],simde_mm_xor_si128(c2[2158],simde_mm_xor_si128(c2[399],simde_mm_xor_si128(c2[2816],simde_mm_xor_si128(c2[1057],simde_mm_xor_si128(c2[3479],simde_mm_xor_si128(c2[1940],simde_mm_xor_si128(c2[1720],simde_mm_xor_si128(c2[2408],simde_mm_xor_si128(c2[2188],simde_mm_xor_si128(c2[418],simde_mm_xor_si128(c2[858],simde_mm_xor_si128(c2[2618],simde_mm_xor_si128(c2[1747],simde_mm_xor_si128(c2[208],c2[3507]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 8
-     d2[88]=_mm_xor_si128(c2[2868],_mm_xor_si128(c2[2648],_mm_xor_si128(c2[440],_mm_xor_si128(c2[220],_mm_xor_si128(c2[445],_mm_xor_si128(c2[1103],_mm_xor_si128(c2[30],_mm_xor_si128(c2[3329],_mm_xor_si128(c2[1791],_mm_xor_si128(c2[1124],_mm_xor_si128(c2[3323],_mm_xor_si128(c2[3354],_mm_xor_si128(c2[3134],_mm_xor_si128(c2[3133],_mm_xor_si128(c2[956],_mm_xor_si128(c2[736],_mm_xor_si128(c2[509],_mm_xor_si128(c2[289],_mm_xor_si128(c2[2491],_mm_xor_si128(c2[91],_mm_xor_si128(c2[3390],_mm_xor_si128(c2[532],_mm_xor_si128(c2[3177],_mm_xor_si128(c2[560],_mm_xor_si128(c2[340],_mm_xor_si128(c2[3416],_mm_xor_si128(c2[2113],_mm_xor_si128(c2[1893],_mm_xor_si128(c2[1894],_mm_xor_si128(c2[1674],_mm_xor_si128(c2[352],_mm_xor_si128(c2[1475],_mm_xor_si128(c2[1255],_mm_xor_si128(c2[2138],_mm_xor_si128(c2[2825],_mm_xor_si128(c2[2605],_mm_xor_si128(c2[3263],_mm_xor_si128(c2[396],_mm_xor_si128(c2[2844],_mm_xor_si128(c2[2624],_mm_xor_si128(c2[1525],_mm_xor_si128(c2[1305],c2[2183]))))))))))))))))))))))))))))))))))))))))));
+     d2[88]=simde_mm_xor_si128(c2[2868],simde_mm_xor_si128(c2[2648],simde_mm_xor_si128(c2[440],simde_mm_xor_si128(c2[220],simde_mm_xor_si128(c2[445],simde_mm_xor_si128(c2[1103],simde_mm_xor_si128(c2[30],simde_mm_xor_si128(c2[3329],simde_mm_xor_si128(c2[1791],simde_mm_xor_si128(c2[1124],simde_mm_xor_si128(c2[3323],simde_mm_xor_si128(c2[3354],simde_mm_xor_si128(c2[3134],simde_mm_xor_si128(c2[3133],simde_mm_xor_si128(c2[956],simde_mm_xor_si128(c2[736],simde_mm_xor_si128(c2[509],simde_mm_xor_si128(c2[289],simde_mm_xor_si128(c2[2491],simde_mm_xor_si128(c2[91],simde_mm_xor_si128(c2[3390],simde_mm_xor_si128(c2[532],simde_mm_xor_si128(c2[3177],simde_mm_xor_si128(c2[560],simde_mm_xor_si128(c2[340],simde_mm_xor_si128(c2[3416],simde_mm_xor_si128(c2[2113],simde_mm_xor_si128(c2[1893],simde_mm_xor_si128(c2[1894],simde_mm_xor_si128(c2[1674],simde_mm_xor_si128(c2[352],simde_mm_xor_si128(c2[1475],simde_mm_xor_si128(c2[1255],simde_mm_xor_si128(c2[2138],simde_mm_xor_si128(c2[2825],simde_mm_xor_si128(c2[2605],simde_mm_xor_si128(c2[3263],simde_mm_xor_si128(c2[396],simde_mm_xor_si128(c2[2844],simde_mm_xor_si128(c2[2624],simde_mm_xor_si128(c2[1525],simde_mm_xor_si128(c2[1305],c2[2183]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 9
-     d2[99]=_mm_xor_si128(c2[3084],_mm_xor_si128(c2[2642],_mm_xor_si128(c2[2422],_mm_xor_si128(c2[667],_mm_xor_si128(c2[5],_mm_xor_si128(c2[881],_mm_xor_si128(c2[230],_mm_xor_si128(c2[246],_mm_xor_si128(c2[3323],_mm_xor_si128(c2[3103],_mm_xor_si128(c2[2227],_mm_xor_si128(c2[1565],_mm_xor_si128(c2[1571],_mm_xor_si128(c2[909],_mm_xor_si128(c2[1788],_mm_xor_si128(c2[51],_mm_xor_si128(c2[3128],_mm_xor_si128(c2[2908],_mm_xor_si128(c2[50],_mm_xor_si128(c2[2907],_mm_xor_si128(c2[1172],_mm_xor_si128(c2[730],_mm_xor_si128(c2[510],_mm_xor_si128(c2[736],_mm_xor_si128(c2[74],_mm_xor_si128(c2[2927],_mm_xor_si128(c2[2276],_mm_xor_si128(c2[318],_mm_xor_si128(c2[3175],_mm_xor_si128(c2[968],_mm_xor_si128(c2[317],_mm_xor_si128(c2[94],_mm_xor_si128(c2[2951],_mm_xor_si128(c2[776],_mm_xor_si128(c2[114],_mm_xor_si128(c2[333],_mm_xor_si128(c2[3190],_mm_xor_si128(c2[2340],_mm_xor_si128(c2[1898],_mm_xor_si128(c2[1678],_mm_xor_si128(c2[2121],_mm_xor_si128(c2[1459],_mm_xor_si128(c2[799],_mm_xor_si128(c2[137],_mm_xor_si128(c2[1702],_mm_xor_si128(c2[1040],_mm_xor_si128(c2[2574],_mm_xor_si128(c2[1923],_mm_xor_si128(c2[3041],_mm_xor_si128(c2[2379],_mm_xor_si128(c2[180],_mm_xor_si128(c2[3037],_mm_xor_si128(c2[843],_mm_xor_si128(c2[181],_mm_xor_si128(c2[3264],_mm_xor_si128(c2[3060],_mm_xor_si128(c2[2618],_mm_xor_si128(c2[2398],_mm_xor_si128(c2[1741],_mm_xor_si128(c2[1079],_mm_xor_si128(c2[2619],c2[1968])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[99]=simde_mm_xor_si128(c2[3084],simde_mm_xor_si128(c2[2642],simde_mm_xor_si128(c2[2422],simde_mm_xor_si128(c2[667],simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[881],simde_mm_xor_si128(c2[230],simde_mm_xor_si128(c2[246],simde_mm_xor_si128(c2[3323],simde_mm_xor_si128(c2[3103],simde_mm_xor_si128(c2[2227],simde_mm_xor_si128(c2[1565],simde_mm_xor_si128(c2[1571],simde_mm_xor_si128(c2[909],simde_mm_xor_si128(c2[1788],simde_mm_xor_si128(c2[51],simde_mm_xor_si128(c2[3128],simde_mm_xor_si128(c2[2908],simde_mm_xor_si128(c2[50],simde_mm_xor_si128(c2[2907],simde_mm_xor_si128(c2[1172],simde_mm_xor_si128(c2[730],simde_mm_xor_si128(c2[510],simde_mm_xor_si128(c2[736],simde_mm_xor_si128(c2[74],simde_mm_xor_si128(c2[2927],simde_mm_xor_si128(c2[2276],simde_mm_xor_si128(c2[318],simde_mm_xor_si128(c2[3175],simde_mm_xor_si128(c2[968],simde_mm_xor_si128(c2[317],simde_mm_xor_si128(c2[94],simde_mm_xor_si128(c2[2951],simde_mm_xor_si128(c2[776],simde_mm_xor_si128(c2[114],simde_mm_xor_si128(c2[333],simde_mm_xor_si128(c2[3190],simde_mm_xor_si128(c2[2340],simde_mm_xor_si128(c2[1898],simde_mm_xor_si128(c2[1678],simde_mm_xor_si128(c2[2121],simde_mm_xor_si128(c2[1459],simde_mm_xor_si128(c2[799],simde_mm_xor_si128(c2[137],simde_mm_xor_si128(c2[1702],simde_mm_xor_si128(c2[1040],simde_mm_xor_si128(c2[2574],simde_mm_xor_si128(c2[1923],simde_mm_xor_si128(c2[3041],simde_mm_xor_si128(c2[2379],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[3037],simde_mm_xor_si128(c2[843],simde_mm_xor_si128(c2[181],simde_mm_xor_si128(c2[3264],simde_mm_xor_si128(c2[3060],simde_mm_xor_si128(c2[2618],simde_mm_xor_si128(c2[2398],simde_mm_xor_si128(c2[1741],simde_mm_xor_si128(c2[1079],simde_mm_xor_si128(c2[2619],c2[1968])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 10
-     d2[110]=_mm_xor_si128(c2[3306],_mm_xor_si128(c2[2003],_mm_xor_si128(c2[3222],c2[1695])));
+     d2[110]=simde_mm_xor_si128(c2[3306],simde_mm_xor_si128(c2[2003],simde_mm_xor_si128(c2[3222],c2[1695])));
 
 //row: 11
-     d2[121]=_mm_xor_si128(c2[1767],_mm_xor_si128(c2[2869],_mm_xor_si128(c2[3083],_mm_xor_si128(c2[2425],_mm_xor_si128(c2[2448],_mm_xor_si128(c2[910],_mm_xor_si128(c2[463],_mm_xor_si128(c2[243],_mm_xor_si128(c2[2253],_mm_xor_si128(c2[2472],_mm_xor_si128(c2[2252],_mm_xor_si128(c2[3374],_mm_xor_si128(c2[2927],_mm_xor_si128(c2[1610],_mm_xor_si128(c2[2509],_mm_xor_si128(c2[3170],_mm_xor_si128(c2[2516],_mm_xor_si128(c2[2296],_mm_xor_si128(c2[2978],_mm_xor_si128(c2[2755],_mm_xor_si128(c2[2535],_mm_xor_si128(c2[1012],_mm_xor_si128(c2[793],_mm_xor_si128(c2[3221],_mm_xor_si128(c2[3001],_mm_xor_si128(c2[374],_mm_xor_si128(c2[1477],_mm_xor_si128(c2[1257],_mm_xor_si128(c2[3464],_mm_xor_si128(c2[1724],_mm_xor_si128(c2[2382],_mm_xor_si128(c2[3265],_mm_xor_si128(c2[3045],_mm_xor_si128(c2[1743],_mm_xor_si128(c2[424],_mm_xor_si128(c2[1522],_mm_xor_si128(c2[1302],c2[1959])))))))))))))))))))))))))))))))))))));
+     d2[121]=simde_mm_xor_si128(c2[1767],simde_mm_xor_si128(c2[2869],simde_mm_xor_si128(c2[3083],simde_mm_xor_si128(c2[2425],simde_mm_xor_si128(c2[2448],simde_mm_xor_si128(c2[910],simde_mm_xor_si128(c2[463],simde_mm_xor_si128(c2[243],simde_mm_xor_si128(c2[2253],simde_mm_xor_si128(c2[2472],simde_mm_xor_si128(c2[2252],simde_mm_xor_si128(c2[3374],simde_mm_xor_si128(c2[2927],simde_mm_xor_si128(c2[1610],simde_mm_xor_si128(c2[2509],simde_mm_xor_si128(c2[3170],simde_mm_xor_si128(c2[2516],simde_mm_xor_si128(c2[2296],simde_mm_xor_si128(c2[2978],simde_mm_xor_si128(c2[2755],simde_mm_xor_si128(c2[2535],simde_mm_xor_si128(c2[1012],simde_mm_xor_si128(c2[793],simde_mm_xor_si128(c2[3221],simde_mm_xor_si128(c2[3001],simde_mm_xor_si128(c2[374],simde_mm_xor_si128(c2[1477],simde_mm_xor_si128(c2[1257],simde_mm_xor_si128(c2[3464],simde_mm_xor_si128(c2[1724],simde_mm_xor_si128(c2[2382],simde_mm_xor_si128(c2[3265],simde_mm_xor_si128(c2[3045],simde_mm_xor_si128(c2[1743],simde_mm_xor_si128(c2[424],simde_mm_xor_si128(c2[1522],simde_mm_xor_si128(c2[1302],c2[1959])))))))))))))))))))))))))))))))))))));
 
 //row: 12
-     d2[132]=_mm_xor_si128(c2[10],_mm_xor_si128(c2[3309],_mm_xor_si128(c2[881],_mm_xor_si128(c2[1106],_mm_xor_si128(c2[691],_mm_xor_si128(c2[471],_mm_xor_si128(c2[2452],_mm_xor_si128(c2[1785],_mm_xor_si128(c2[2229],_mm_xor_si128(c2[485],_mm_xor_si128(c2[265],_mm_xor_si128(c2[264],_mm_xor_si128(c2[1606],_mm_xor_si128(c2[1386],_mm_xor_si128(c2[950],_mm_xor_si128(c2[3152],_mm_xor_si128(c2[2486],_mm_xor_si128(c2[532],_mm_xor_si128(c2[1193],_mm_xor_si128(c2[308],_mm_xor_si128(c2[990],_mm_xor_si128(c2[558],_mm_xor_si128(c2[2774],_mm_xor_si128(c2[2554],_mm_xor_si128(c2[2335],_mm_xor_si128(c2[1013],_mm_xor_si128(c2[1916],_mm_xor_si128(c2[2799],_mm_xor_si128(c2[3266],_mm_xor_si128(c2[405],_mm_xor_si128(c2[1057],_mm_xor_si128(c2[3505],_mm_xor_si128(c2[3285],_mm_xor_si128(c2[1966],c2[2844]))))))))))))))))))))))))))))))))));
+     d2[132]=simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[3309],simde_mm_xor_si128(c2[881],simde_mm_xor_si128(c2[1106],simde_mm_xor_si128(c2[691],simde_mm_xor_si128(c2[471],simde_mm_xor_si128(c2[2452],simde_mm_xor_si128(c2[1785],simde_mm_xor_si128(c2[2229],simde_mm_xor_si128(c2[485],simde_mm_xor_si128(c2[265],simde_mm_xor_si128(c2[264],simde_mm_xor_si128(c2[1606],simde_mm_xor_si128(c2[1386],simde_mm_xor_si128(c2[950],simde_mm_xor_si128(c2[3152],simde_mm_xor_si128(c2[2486],simde_mm_xor_si128(c2[532],simde_mm_xor_si128(c2[1193],simde_mm_xor_si128(c2[308],simde_mm_xor_si128(c2[990],simde_mm_xor_si128(c2[558],simde_mm_xor_si128(c2[2774],simde_mm_xor_si128(c2[2554],simde_mm_xor_si128(c2[2335],simde_mm_xor_si128(c2[1013],simde_mm_xor_si128(c2[1916],simde_mm_xor_si128(c2[2799],simde_mm_xor_si128(c2[3266],simde_mm_xor_si128(c2[405],simde_mm_xor_si128(c2[1057],simde_mm_xor_si128(c2[3505],simde_mm_xor_si128(c2[3285],simde_mm_xor_si128(c2[1966],c2[2844]))))))))))))))))))))))))))))))))));
 
 //row: 13
-     d2[143]=_mm_xor_si128(c2[2206],_mm_xor_si128(c2[3308],_mm_xor_si128(c2[3],_mm_xor_si128(c2[2861],_mm_xor_si128(c2[2887],_mm_xor_si128(c2[1349],_mm_xor_si128(c2[902],_mm_xor_si128(c2[682],_mm_xor_si128(c2[2447],_mm_xor_si128(c2[2692],_mm_xor_si128(c2[2911],_mm_xor_si128(c2[2691],_mm_xor_si128(c2[294],_mm_xor_si128(c2[3366],_mm_xor_si128(c2[2049],_mm_xor_si128(c2[2948],_mm_xor_si128(c2[90],_mm_xor_si128(c2[2955],_mm_xor_si128(c2[2735],_mm_xor_si128(c2[3417],_mm_xor_si128(c2[3194],_mm_xor_si128(c2[2974],_mm_xor_si128(c2[1462],_mm_xor_si128(c2[1232],_mm_xor_si128(c2[141],_mm_xor_si128(c2[3440],_mm_xor_si128(c2[824],_mm_xor_si128(c2[1916],_mm_xor_si128(c2[1696],_mm_xor_si128(c2[2163],_mm_xor_si128(c2[2821],_mm_xor_si128(c2[185],_mm_xor_si128(c2[3484],_mm_xor_si128(c2[2597],_mm_xor_si128(c2[2182],_mm_xor_si128(c2[863],_mm_xor_si128(c2[1961],c2[1741])))))))))))))))))))))))))))))))))))));
+     d2[143]=simde_mm_xor_si128(c2[2206],simde_mm_xor_si128(c2[3308],simde_mm_xor_si128(c2[3],simde_mm_xor_si128(c2[2861],simde_mm_xor_si128(c2[2887],simde_mm_xor_si128(c2[1349],simde_mm_xor_si128(c2[902],simde_mm_xor_si128(c2[682],simde_mm_xor_si128(c2[2447],simde_mm_xor_si128(c2[2692],simde_mm_xor_si128(c2[2911],simde_mm_xor_si128(c2[2691],simde_mm_xor_si128(c2[294],simde_mm_xor_si128(c2[3366],simde_mm_xor_si128(c2[2049],simde_mm_xor_si128(c2[2948],simde_mm_xor_si128(c2[90],simde_mm_xor_si128(c2[2955],simde_mm_xor_si128(c2[2735],simde_mm_xor_si128(c2[3417],simde_mm_xor_si128(c2[3194],simde_mm_xor_si128(c2[2974],simde_mm_xor_si128(c2[1462],simde_mm_xor_si128(c2[1232],simde_mm_xor_si128(c2[141],simde_mm_xor_si128(c2[3440],simde_mm_xor_si128(c2[824],simde_mm_xor_si128(c2[1916],simde_mm_xor_si128(c2[1696],simde_mm_xor_si128(c2[2163],simde_mm_xor_si128(c2[2821],simde_mm_xor_si128(c2[185],simde_mm_xor_si128(c2[3484],simde_mm_xor_si128(c2[2597],simde_mm_xor_si128(c2[2182],simde_mm_xor_si128(c2[863],simde_mm_xor_si128(c2[1961],c2[1741])))))))))))))))))))))))))))))))))))));
 
 //row: 14
-     d2[154]=_mm_xor_si128(c2[890],_mm_xor_si128(c2[670],_mm_xor_si128(c2[229],_mm_xor_si128(c2[1761],_mm_xor_si128(c2[1320],_mm_xor_si128(c2[1986],_mm_xor_si128(c2[1545],_mm_xor_si128(c2[1571],_mm_xor_si128(c2[1351],_mm_xor_si128(c2[910],_mm_xor_si128(c2[3332],_mm_xor_si128(c2[2891],_mm_xor_si128(c2[2665],_mm_xor_si128(c2[2444],_mm_xor_si128(c2[2224],_mm_xor_si128(c2[2442],_mm_xor_si128(c2[1365],_mm_xor_si128(c2[1145],_mm_xor_si128(c2[704],_mm_xor_si128(c2[1144],_mm_xor_si128(c2[934],_mm_xor_si128(c2[714],_mm_xor_si128(c2[2486],_mm_xor_si128(c2[2266],_mm_xor_si128(c2[1836],_mm_xor_si128(c2[1830],_mm_xor_si128(c2[1389],_mm_xor_si128(c2[513],_mm_xor_si128(c2[72],_mm_xor_si128(c2[1412],_mm_xor_si128(c2[971],_mm_xor_si128(c2[2073],_mm_xor_si128(c2[1632],_mm_xor_si128(c2[1188],_mm_xor_si128(c2[978],_mm_xor_si128(c2[758],_mm_xor_si128(c2[1870],_mm_xor_si128(c2[1440],_mm_xor_si128(c2[1438],_mm_xor_si128(c2[1217],_mm_xor_si128(c2[997],_mm_xor_si128(c2[135],_mm_xor_si128(c2[3434],_mm_xor_si128(c2[2993],_mm_xor_si128(c2[3215],_mm_xor_si128(c2[2774],_mm_xor_si128(c2[1893],_mm_xor_si128(c2[1672],_mm_xor_si128(c2[1452],_mm_xor_si128(c2[361],_mm_xor_si128(c2[2796],_mm_xor_si128(c2[2355],_mm_xor_si128(c2[160],_mm_xor_si128(c2[3458],_mm_xor_si128(c2[3238],_mm_xor_si128(c2[616],_mm_xor_si128(c2[186],_mm_xor_si128(c2[1285],_mm_xor_si128(c2[844],_mm_xor_si128(c2[1937],_mm_xor_si128(c2[1716],_mm_xor_si128(c2[1496],_mm_xor_si128(c2[866],_mm_xor_si128(c2[646],_mm_xor_si128(c2[205],_mm_xor_si128(c2[2846],_mm_xor_si128(c2[2405],_mm_xor_si128(c2[205],_mm_xor_si128(c2[3503],c2[3283])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[154]=simde_mm_xor_si128(c2[890],simde_mm_xor_si128(c2[670],simde_mm_xor_si128(c2[229],simde_mm_xor_si128(c2[1761],simde_mm_xor_si128(c2[1320],simde_mm_xor_si128(c2[1986],simde_mm_xor_si128(c2[1545],simde_mm_xor_si128(c2[1571],simde_mm_xor_si128(c2[1351],simde_mm_xor_si128(c2[910],simde_mm_xor_si128(c2[3332],simde_mm_xor_si128(c2[2891],simde_mm_xor_si128(c2[2665],simde_mm_xor_si128(c2[2444],simde_mm_xor_si128(c2[2224],simde_mm_xor_si128(c2[2442],simde_mm_xor_si128(c2[1365],simde_mm_xor_si128(c2[1145],simde_mm_xor_si128(c2[704],simde_mm_xor_si128(c2[1144],simde_mm_xor_si128(c2[934],simde_mm_xor_si128(c2[714],simde_mm_xor_si128(c2[2486],simde_mm_xor_si128(c2[2266],simde_mm_xor_si128(c2[1836],simde_mm_xor_si128(c2[1830],simde_mm_xor_si128(c2[1389],simde_mm_xor_si128(c2[513],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[1412],simde_mm_xor_si128(c2[971],simde_mm_xor_si128(c2[2073],simde_mm_xor_si128(c2[1632],simde_mm_xor_si128(c2[1188],simde_mm_xor_si128(c2[978],simde_mm_xor_si128(c2[758],simde_mm_xor_si128(c2[1870],simde_mm_xor_si128(c2[1440],simde_mm_xor_si128(c2[1438],simde_mm_xor_si128(c2[1217],simde_mm_xor_si128(c2[997],simde_mm_xor_si128(c2[135],simde_mm_xor_si128(c2[3434],simde_mm_xor_si128(c2[2993],simde_mm_xor_si128(c2[3215],simde_mm_xor_si128(c2[2774],simde_mm_xor_si128(c2[1893],simde_mm_xor_si128(c2[1672],simde_mm_xor_si128(c2[1452],simde_mm_xor_si128(c2[361],simde_mm_xor_si128(c2[2796],simde_mm_xor_si128(c2[2355],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[3458],simde_mm_xor_si128(c2[3238],simde_mm_xor_si128(c2[616],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[1285],simde_mm_xor_si128(c2[844],simde_mm_xor_si128(c2[1937],simde_mm_xor_si128(c2[1716],simde_mm_xor_si128(c2[1496],simde_mm_xor_si128(c2[866],simde_mm_xor_si128(c2[646],simde_mm_xor_si128(c2[205],simde_mm_xor_si128(c2[2846],simde_mm_xor_si128(c2[2405],simde_mm_xor_si128(c2[205],simde_mm_xor_si128(c2[3503],c2[3283])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 15
-     d2[165]=_mm_xor_si128(c2[445],_mm_xor_si128(c2[881],_mm_xor_si128(c2[661],_mm_xor_si128(c2[1547],_mm_xor_si128(c2[1763],_mm_xor_si128(c2[1761],_mm_xor_si128(c2[1988],_mm_xor_si128(c2[1988],_mm_xor_si128(c2[1126],_mm_xor_si128(c2[1562],_mm_xor_si128(c2[1342],_mm_xor_si128(c2[3107],_mm_xor_si128(c2[3323],_mm_xor_si128(c2[2451],_mm_xor_si128(c2[2667],_mm_xor_si128(c2[931],_mm_xor_si128(c2[1367],_mm_xor_si128(c2[1147],_mm_xor_si128(c2[930],_mm_xor_si128(c2[1146],_mm_xor_si128(c2[2052],_mm_xor_si128(c2[2488],_mm_xor_si128(c2[2268],_mm_xor_si128(c2[1616],_mm_xor_si128(c2[1832],_mm_xor_si128(c2[288],_mm_xor_si128(c2[515],_mm_xor_si128(c2[1198],_mm_xor_si128(c2[1414],_mm_xor_si128(c2[1848],_mm_xor_si128(c2[2075],_mm_xor_si128(c2[974],_mm_xor_si128(c2[1190],_mm_xor_si128(c2[1656],_mm_xor_si128(c2[1872],_mm_xor_si128(c2[1213],_mm_xor_si128(c2[1440],_mm_xor_si128(c2[3220],_mm_xor_si128(c2[137],_mm_xor_si128(c2[3436],_mm_xor_si128(c2[3001],_mm_xor_si128(c2[3217],_mm_xor_si128(c2[1679],_mm_xor_si128(c2[1895],_mm_xor_si128(c2[2582],_mm_xor_si128(c2[2798],_mm_xor_si128(c2[3454],_mm_xor_si128(c2[162],_mm_xor_si128(c2[402],_mm_xor_si128(c2[618],_mm_xor_si128(c2[1060],_mm_xor_si128(c2[1276],_mm_xor_si128(c2[1723],_mm_xor_si128(c2[1939],_mm_xor_si128(c2[421],_mm_xor_si128(c2[868],_mm_xor_si128(c2[648],_mm_xor_si128(c2[2621],_mm_xor_si128(c2[2848],_mm_xor_si128(c2[3499],c2[207]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[165]=simde_mm_xor_si128(c2[445],simde_mm_xor_si128(c2[881],simde_mm_xor_si128(c2[661],simde_mm_xor_si128(c2[1547],simde_mm_xor_si128(c2[1763],simde_mm_xor_si128(c2[1761],simde_mm_xor_si128(c2[1988],simde_mm_xor_si128(c2[1988],simde_mm_xor_si128(c2[1126],simde_mm_xor_si128(c2[1562],simde_mm_xor_si128(c2[1342],simde_mm_xor_si128(c2[3107],simde_mm_xor_si128(c2[3323],simde_mm_xor_si128(c2[2451],simde_mm_xor_si128(c2[2667],simde_mm_xor_si128(c2[931],simde_mm_xor_si128(c2[1367],simde_mm_xor_si128(c2[1147],simde_mm_xor_si128(c2[930],simde_mm_xor_si128(c2[1146],simde_mm_xor_si128(c2[2052],simde_mm_xor_si128(c2[2488],simde_mm_xor_si128(c2[2268],simde_mm_xor_si128(c2[1616],simde_mm_xor_si128(c2[1832],simde_mm_xor_si128(c2[288],simde_mm_xor_si128(c2[515],simde_mm_xor_si128(c2[1198],simde_mm_xor_si128(c2[1414],simde_mm_xor_si128(c2[1848],simde_mm_xor_si128(c2[2075],simde_mm_xor_si128(c2[974],simde_mm_xor_si128(c2[1190],simde_mm_xor_si128(c2[1656],simde_mm_xor_si128(c2[1872],simde_mm_xor_si128(c2[1213],simde_mm_xor_si128(c2[1440],simde_mm_xor_si128(c2[3220],simde_mm_xor_si128(c2[137],simde_mm_xor_si128(c2[3436],simde_mm_xor_si128(c2[3001],simde_mm_xor_si128(c2[3217],simde_mm_xor_si128(c2[1679],simde_mm_xor_si128(c2[1895],simde_mm_xor_si128(c2[2582],simde_mm_xor_si128(c2[2798],simde_mm_xor_si128(c2[3454],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[402],simde_mm_xor_si128(c2[618],simde_mm_xor_si128(c2[1060],simde_mm_xor_si128(c2[1276],simde_mm_xor_si128(c2[1723],simde_mm_xor_si128(c2[1939],simde_mm_xor_si128(c2[421],simde_mm_xor_si128(c2[868],simde_mm_xor_si128(c2[648],simde_mm_xor_si128(c2[2621],simde_mm_xor_si128(c2[2848],simde_mm_xor_si128(c2[3499],c2[207]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 16
-     d2[176]=_mm_xor_si128(c2[2870],_mm_xor_si128(c2[2650],_mm_xor_si128(c2[229],_mm_xor_si128(c2[9],_mm_xor_si128(c2[222],_mm_xor_si128(c2[1320],_mm_xor_si128(c2[1100],_mm_xor_si128(c2[447],_mm_xor_si128(c2[1325],_mm_xor_si128(c2[32],_mm_xor_si128(c2[3331],_mm_xor_si128(c2[910],_mm_xor_si128(c2[690],_mm_xor_si128(c2[1782],_mm_xor_si128(c2[2671],_mm_xor_si128(c2[1126],_mm_xor_si128(c2[2004],_mm_xor_si128(c2[3106],_mm_xor_si128(c2[3345],_mm_xor_si128(c2[3125],_mm_xor_si128(c2[704],_mm_xor_si128(c2[484],_mm_xor_si128(c2[3124],_mm_xor_si128(c2[494],_mm_xor_si128(c2[947],_mm_xor_si128(c2[727],_mm_xor_si128(c2[1836],_mm_xor_si128(c2[1616],_mm_xor_si128(c2[291],_mm_xor_si128(c2[1389],_mm_xor_si128(c2[1169],_mm_xor_si128(c2[2493],_mm_xor_si128(c2[3371],_mm_xor_si128(c2[3392],_mm_xor_si128(c2[971],_mm_xor_si128(c2[751],_mm_xor_si128(c2[534],_mm_xor_si128(c2[1412],_mm_xor_si128(c2[3168],_mm_xor_si128(c2[538],_mm_xor_si128(c2[331],_mm_xor_si128(c2[1440],_mm_xor_si128(c2[1220],_mm_xor_si128(c2[3418],_mm_xor_si128(c2[777],_mm_xor_si128(c2[2115],_mm_xor_si128(c2[1895],_mm_xor_si128(c2[2993],_mm_xor_si128(c2[2773],_mm_xor_si128(c2[1676],_mm_xor_si128(c2[2774],_mm_xor_si128(c2[2554],_mm_xor_si128(c2[354],_mm_xor_si128(c2[1232],_mm_xor_si128(c2[1257],_mm_xor_si128(c2[2355],_mm_xor_si128(c2[2135],_mm_xor_si128(c2[2140],_mm_xor_si128(c2[3018],_mm_xor_si128(c2[2596],_mm_xor_si128(c2[186],_mm_xor_si128(c2[3485],_mm_xor_si128(c2[3265],_mm_xor_si128(c2[624],_mm_xor_si128(c2[398],_mm_xor_si128(c2[1276],_mm_xor_si128(c2[2846],_mm_xor_si128(c2[2626],_mm_xor_si128(c2[205],_mm_xor_si128(c2[3504],_mm_xor_si128(c2[1307],_mm_xor_si128(c2[2405],_mm_xor_si128(c2[2185],_mm_xor_si128(c2[2185],_mm_xor_si128(c2[3063],c2[3287])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[176]=simde_mm_xor_si128(c2[2870],simde_mm_xor_si128(c2[2650],simde_mm_xor_si128(c2[229],simde_mm_xor_si128(c2[9],simde_mm_xor_si128(c2[222],simde_mm_xor_si128(c2[1320],simde_mm_xor_si128(c2[1100],simde_mm_xor_si128(c2[447],simde_mm_xor_si128(c2[1325],simde_mm_xor_si128(c2[32],simde_mm_xor_si128(c2[3331],simde_mm_xor_si128(c2[910],simde_mm_xor_si128(c2[690],simde_mm_xor_si128(c2[1782],simde_mm_xor_si128(c2[2671],simde_mm_xor_si128(c2[1126],simde_mm_xor_si128(c2[2004],simde_mm_xor_si128(c2[3106],simde_mm_xor_si128(c2[3345],simde_mm_xor_si128(c2[3125],simde_mm_xor_si128(c2[704],simde_mm_xor_si128(c2[484],simde_mm_xor_si128(c2[3124],simde_mm_xor_si128(c2[494],simde_mm_xor_si128(c2[947],simde_mm_xor_si128(c2[727],simde_mm_xor_si128(c2[1836],simde_mm_xor_si128(c2[1616],simde_mm_xor_si128(c2[291],simde_mm_xor_si128(c2[1389],simde_mm_xor_si128(c2[1169],simde_mm_xor_si128(c2[2493],simde_mm_xor_si128(c2[3371],simde_mm_xor_si128(c2[3392],simde_mm_xor_si128(c2[971],simde_mm_xor_si128(c2[751],simde_mm_xor_si128(c2[534],simde_mm_xor_si128(c2[1412],simde_mm_xor_si128(c2[3168],simde_mm_xor_si128(c2[538],simde_mm_xor_si128(c2[331],simde_mm_xor_si128(c2[1440],simde_mm_xor_si128(c2[1220],simde_mm_xor_si128(c2[3418],simde_mm_xor_si128(c2[777],simde_mm_xor_si128(c2[2115],simde_mm_xor_si128(c2[1895],simde_mm_xor_si128(c2[2993],simde_mm_xor_si128(c2[2773],simde_mm_xor_si128(c2[1676],simde_mm_xor_si128(c2[2774],simde_mm_xor_si128(c2[2554],simde_mm_xor_si128(c2[354],simde_mm_xor_si128(c2[1232],simde_mm_xor_si128(c2[1257],simde_mm_xor_si128(c2[2355],simde_mm_xor_si128(c2[2135],simde_mm_xor_si128(c2[2140],simde_mm_xor_si128(c2[3018],simde_mm_xor_si128(c2[2596],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[3485],simde_mm_xor_si128(c2[3265],simde_mm_xor_si128(c2[624],simde_mm_xor_si128(c2[398],simde_mm_xor_si128(c2[1276],simde_mm_xor_si128(c2[2846],simde_mm_xor_si128(c2[2626],simde_mm_xor_si128(c2[205],simde_mm_xor_si128(c2[3504],simde_mm_xor_si128(c2[1307],simde_mm_xor_si128(c2[2405],simde_mm_xor_si128(c2[2185],simde_mm_xor_si128(c2[2185],simde_mm_xor_si128(c2[3063],c2[3287])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 17
-     d2[187]=_mm_xor_si128(c2[2202],_mm_xor_si128(c2[1982],_mm_xor_si128(c2[3308],_mm_xor_si128(c2[3088],_mm_xor_si128(c2[3084],_mm_xor_si128(c2[880],_mm_xor_si128(c2[660],_mm_xor_si128(c2[3309],_mm_xor_si128(c2[885],_mm_xor_si128(c2[2883],_mm_xor_si128(c2[2663],_mm_xor_si128(c2[470],_mm_xor_si128(c2[250],_mm_xor_si128(c2[1125],_mm_xor_si128(c2[2231],_mm_xor_si128(c2[469],_mm_xor_si128(c2[1564],_mm_xor_si128(c2[1350],_mm_xor_si128(c2[2688],_mm_xor_si128(c2[2468],_mm_xor_si128(c2[264],_mm_xor_si128(c2[44],_mm_xor_si128(c2[2467],_mm_xor_si128(c2[54],_mm_xor_si128(c2[290],_mm_xor_si128(c2[70],_mm_xor_si128(c2[1396],_mm_xor_si128(c2[1176],_mm_xor_si128(c2[3153],_mm_xor_si128(c2[949],_mm_xor_si128(c2[729],_mm_xor_si128(c2[1836],_mm_xor_si128(c2[2931],_mm_xor_si128(c2[2735],_mm_xor_si128(c2[531],_mm_xor_si128(c2[311],_mm_xor_si128(c2[3396],_mm_xor_si128(c2[972],_mm_xor_si128(c2[2511],_mm_xor_si128(c2[98],_mm_xor_si128(c2[3193],_mm_xor_si128(c2[1000],_mm_xor_si128(c2[780],_mm_xor_si128(c2[2750],_mm_xor_si128(c2[337],_mm_xor_si128(c2[1651],_mm_xor_si128(c2[1458],_mm_xor_si128(c2[1238],_mm_xor_si128(c2[2553],_mm_xor_si128(c2[2333],_mm_xor_si128(c2[1019],_mm_xor_si128(c2[2334],_mm_xor_si128(c2[2114],_mm_xor_si128(c2[3216],_mm_xor_si128(c2[792],_mm_xor_si128(c2[600],_mm_xor_si128(c2[1915],_mm_xor_si128(c2[1695],_mm_xor_si128(c2[1483],_mm_xor_si128(c2[2578],_mm_xor_si128(c2[1939],_mm_xor_si128(c2[3265],_mm_xor_si128(c2[3045],_mm_xor_si128(c2[2597],_mm_xor_si128(c2[184],_mm_xor_si128(c2[3260],_mm_xor_si128(c2[836],_mm_xor_si128(c2[2178],_mm_xor_si128(c2[1958],_mm_xor_si128(c2[3284],_mm_xor_si128(c2[3064],_mm_xor_si128(c2[639],_mm_xor_si128(c2[1965],_mm_xor_si128(c2[1745],_mm_xor_si128(c2[1528],c2[2623])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[187]=simde_mm_xor_si128(c2[2202],simde_mm_xor_si128(c2[1982],simde_mm_xor_si128(c2[3308],simde_mm_xor_si128(c2[3088],simde_mm_xor_si128(c2[3084],simde_mm_xor_si128(c2[880],simde_mm_xor_si128(c2[660],simde_mm_xor_si128(c2[3309],simde_mm_xor_si128(c2[885],simde_mm_xor_si128(c2[2883],simde_mm_xor_si128(c2[2663],simde_mm_xor_si128(c2[470],simde_mm_xor_si128(c2[250],simde_mm_xor_si128(c2[1125],simde_mm_xor_si128(c2[2231],simde_mm_xor_si128(c2[469],simde_mm_xor_si128(c2[1564],simde_mm_xor_si128(c2[1350],simde_mm_xor_si128(c2[2688],simde_mm_xor_si128(c2[2468],simde_mm_xor_si128(c2[264],simde_mm_xor_si128(c2[44],simde_mm_xor_si128(c2[2467],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[290],simde_mm_xor_si128(c2[70],simde_mm_xor_si128(c2[1396],simde_mm_xor_si128(c2[1176],simde_mm_xor_si128(c2[3153],simde_mm_xor_si128(c2[949],simde_mm_xor_si128(c2[729],simde_mm_xor_si128(c2[1836],simde_mm_xor_si128(c2[2931],simde_mm_xor_si128(c2[2735],simde_mm_xor_si128(c2[531],simde_mm_xor_si128(c2[311],simde_mm_xor_si128(c2[3396],simde_mm_xor_si128(c2[972],simde_mm_xor_si128(c2[2511],simde_mm_xor_si128(c2[98],simde_mm_xor_si128(c2[3193],simde_mm_xor_si128(c2[1000],simde_mm_xor_si128(c2[780],simde_mm_xor_si128(c2[2750],simde_mm_xor_si128(c2[337],simde_mm_xor_si128(c2[1651],simde_mm_xor_si128(c2[1458],simde_mm_xor_si128(c2[1238],simde_mm_xor_si128(c2[2553],simde_mm_xor_si128(c2[2333],simde_mm_xor_si128(c2[1019],simde_mm_xor_si128(c2[2334],simde_mm_xor_si128(c2[2114],simde_mm_xor_si128(c2[3216],simde_mm_xor_si128(c2[792],simde_mm_xor_si128(c2[600],simde_mm_xor_si128(c2[1915],simde_mm_xor_si128(c2[1695],simde_mm_xor_si128(c2[1483],simde_mm_xor_si128(c2[2578],simde_mm_xor_si128(c2[1939],simde_mm_xor_si128(c2[3265],simde_mm_xor_si128(c2[3045],simde_mm_xor_si128(c2[2597],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[3260],simde_mm_xor_si128(c2[836],simde_mm_xor_si128(c2[2178],simde_mm_xor_si128(c2[1958],simde_mm_xor_si128(c2[3284],simde_mm_xor_si128(c2[3064],simde_mm_xor_si128(c2[639],simde_mm_xor_si128(c2[1965],simde_mm_xor_si128(c2[1745],simde_mm_xor_si128(c2[1528],c2[2623])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 18
-     d2[198]=_mm_xor_si128(c2[2870],_mm_xor_si128(c2[3433],c2[1475]));
+     d2[198]=simde_mm_xor_si128(c2[2870],simde_mm_xor_si128(c2[3433],c2[1475]));
 
 //row: 19
-     d2[209]=_mm_xor_si128(c2[2647],_mm_xor_si128(c2[230],_mm_xor_si128(c2[444],_mm_xor_si128(c2[2860],_mm_xor_si128(c2[3328],_mm_xor_si128(c2[1790],_mm_xor_si128(c2[1123],_mm_xor_si128(c2[1570],_mm_xor_si128(c2[3133],_mm_xor_si128(c2[3132],_mm_xor_si128(c2[735],_mm_xor_si128(c2[288],_mm_xor_si128(c2[2490],_mm_xor_si128(c2[3389],_mm_xor_si128(c2[531],_mm_xor_si128(c2[3176],_mm_xor_si128(c2[339],_mm_xor_si128(c2[3415],_mm_xor_si128(c2[1892],_mm_xor_si128(c2[1673],_mm_xor_si128(c2[362],_mm_xor_si128(c2[1254],_mm_xor_si128(c2[2137],_mm_xor_si128(c2[2604],_mm_xor_si128(c2[3262],_mm_xor_si128(c2[406],_mm_xor_si128(c2[2623],_mm_xor_si128(c2[1304],c2[2182]))))))))))))))))))))))))))));
+     d2[209]=simde_mm_xor_si128(c2[2647],simde_mm_xor_si128(c2[230],simde_mm_xor_si128(c2[444],simde_mm_xor_si128(c2[2860],simde_mm_xor_si128(c2[3328],simde_mm_xor_si128(c2[1790],simde_mm_xor_si128(c2[1123],simde_mm_xor_si128(c2[1570],simde_mm_xor_si128(c2[3133],simde_mm_xor_si128(c2[3132],simde_mm_xor_si128(c2[735],simde_mm_xor_si128(c2[288],simde_mm_xor_si128(c2[2490],simde_mm_xor_si128(c2[3389],simde_mm_xor_si128(c2[531],simde_mm_xor_si128(c2[3176],simde_mm_xor_si128(c2[339],simde_mm_xor_si128(c2[3415],simde_mm_xor_si128(c2[1892],simde_mm_xor_si128(c2[1673],simde_mm_xor_si128(c2[362],simde_mm_xor_si128(c2[1254],simde_mm_xor_si128(c2[2137],simde_mm_xor_si128(c2[2604],simde_mm_xor_si128(c2[3262],simde_mm_xor_si128(c2[406],simde_mm_xor_si128(c2[2623],simde_mm_xor_si128(c2[1304],c2[2182]))))))))))))))))))))))))))));
 
 //row: 20
-     d2[220]=_mm_xor_si128(c2[1989],_mm_xor_si128(c2[1769],_mm_xor_si128(c2[2860],_mm_xor_si128(c2[3085],_mm_xor_si128(c2[2670],_mm_xor_si128(c2[2450],_mm_xor_si128(c2[912],_mm_xor_si128(c2[245],_mm_xor_si128(c2[30],_mm_xor_si128(c2[2464],_mm_xor_si128(c2[2244],_mm_xor_si128(c2[2254],_mm_xor_si128(c2[66],_mm_xor_si128(c2[3376],_mm_xor_si128(c2[2929],_mm_xor_si128(c2[1612],_mm_xor_si128(c2[2511],_mm_xor_si128(c2[3172],_mm_xor_si128(c2[2298],_mm_xor_si128(c2[971],_mm_xor_si128(c2[2980],_mm_xor_si128(c2[2537],_mm_xor_si128(c2[1234],_mm_xor_si128(c2[1014],_mm_xor_si128(c2[795],_mm_xor_si128(c2[2992],_mm_xor_si128(c2[376],_mm_xor_si128(c2[1259],_mm_xor_si128(c2[1726],_mm_xor_si128(c2[2384],_mm_xor_si128(c2[3036],_mm_xor_si128(c2[1965],_mm_xor_si128(c2[1745],_mm_xor_si128(c2[426],c2[1304]))))))))))))))))))))))))))))))))));
+     d2[220]=simde_mm_xor_si128(c2[1989],simde_mm_xor_si128(c2[1769],simde_mm_xor_si128(c2[2860],simde_mm_xor_si128(c2[3085],simde_mm_xor_si128(c2[2670],simde_mm_xor_si128(c2[2450],simde_mm_xor_si128(c2[912],simde_mm_xor_si128(c2[245],simde_mm_xor_si128(c2[30],simde_mm_xor_si128(c2[2464],simde_mm_xor_si128(c2[2244],simde_mm_xor_si128(c2[2254],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[3376],simde_mm_xor_si128(c2[2929],simde_mm_xor_si128(c2[1612],simde_mm_xor_si128(c2[2511],simde_mm_xor_si128(c2[3172],simde_mm_xor_si128(c2[2298],simde_mm_xor_si128(c2[971],simde_mm_xor_si128(c2[2980],simde_mm_xor_si128(c2[2537],simde_mm_xor_si128(c2[1234],simde_mm_xor_si128(c2[1014],simde_mm_xor_si128(c2[795],simde_mm_xor_si128(c2[2992],simde_mm_xor_si128(c2[376],simde_mm_xor_si128(c2[1259],simde_mm_xor_si128(c2[1726],simde_mm_xor_si128(c2[2384],simde_mm_xor_si128(c2[3036],simde_mm_xor_si128(c2[1965],simde_mm_xor_si128(c2[1745],simde_mm_xor_si128(c2[426],c2[1304]))))))))))))))))))))))))))))))))));
 
 //row: 21
-     d2[231]=_mm_xor_si128(c2[661],_mm_xor_si128(c2[1763],_mm_xor_si128(c2[1988],_mm_xor_si128(c2[2649],_mm_xor_si128(c2[1342],_mm_xor_si128(c2[3323],_mm_xor_si128(c2[2887],_mm_xor_si128(c2[2667],_mm_xor_si128(c2[1147],_mm_xor_si128(c2[1366],_mm_xor_si128(c2[1146],_mm_xor_si128(c2[2268],_mm_xor_si128(c2[1832],_mm_xor_si128(c2[515],_mm_xor_si128(c2[1414],_mm_xor_si128(c2[2075],_mm_xor_si128(c2[1410],_mm_xor_si128(c2[1190],_mm_xor_si128(c2[1872],_mm_xor_si128(c2[1660],_mm_xor_si128(c2[1440],_mm_xor_si128(c2[3436],_mm_xor_si128(c2[3217],_mm_xor_si128(c2[2115],_mm_xor_si128(c2[1895],_mm_xor_si128(c2[2798],_mm_xor_si128(c2[382],_mm_xor_si128(c2[162],_mm_xor_si128(c2[618],_mm_xor_si128(c2[1276],_mm_xor_si128(c2[2159],_mm_xor_si128(c2[1939],_mm_xor_si128(c2[1506],_mm_xor_si128(c2[648],_mm_xor_si128(c2[2848],_mm_xor_si128(c2[427],c2[207]))))))))))))))))))))))))))))))))))));
+     d2[231]=simde_mm_xor_si128(c2[661],simde_mm_xor_si128(c2[1763],simde_mm_xor_si128(c2[1988],simde_mm_xor_si128(c2[2649],simde_mm_xor_si128(c2[1342],simde_mm_xor_si128(c2[3323],simde_mm_xor_si128(c2[2887],simde_mm_xor_si128(c2[2667],simde_mm_xor_si128(c2[1147],simde_mm_xor_si128(c2[1366],simde_mm_xor_si128(c2[1146],simde_mm_xor_si128(c2[2268],simde_mm_xor_si128(c2[1832],simde_mm_xor_si128(c2[515],simde_mm_xor_si128(c2[1414],simde_mm_xor_si128(c2[2075],simde_mm_xor_si128(c2[1410],simde_mm_xor_si128(c2[1190],simde_mm_xor_si128(c2[1872],simde_mm_xor_si128(c2[1660],simde_mm_xor_si128(c2[1440],simde_mm_xor_si128(c2[3436],simde_mm_xor_si128(c2[3217],simde_mm_xor_si128(c2[2115],simde_mm_xor_si128(c2[1895],simde_mm_xor_si128(c2[2798],simde_mm_xor_si128(c2[382],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[618],simde_mm_xor_si128(c2[1276],simde_mm_xor_si128(c2[2159],simde_mm_xor_si128(c2[1939],simde_mm_xor_si128(c2[1506],simde_mm_xor_si128(c2[648],simde_mm_xor_si128(c2[2848],simde_mm_xor_si128(c2[427],c2[207]))))))))))))))))))))))))))))))))))));
 
 //row: 22
-     d2[242]=_mm_xor_si128(c2[463],c2[714]);
+     d2[242]=simde_mm_xor_si128(c2[463],c2[714]);
 
 //row: 23
-     d2[253]=_mm_xor_si128(c2[3086],_mm_xor_si128(c2[954],c2[1439]));
+     d2[253]=simde_mm_xor_si128(c2[3086],simde_mm_xor_si128(c2[954],c2[1439]));
 
 //row: 24
-     d2[264]=_mm_xor_si128(c2[249],_mm_xor_si128(c2[2690],c2[3061]));
+     d2[264]=simde_mm_xor_si128(c2[249],simde_mm_xor_si128(c2[2690],c2[3061]));
 
 //row: 25
-     d2[275]=_mm_xor_si128(c2[1764],c2[1878]);
+     d2[275]=simde_mm_xor_si128(c2[1764],c2[1878]);
 
 //row: 26
-     d2[286]=_mm_xor_si128(c2[222],_mm_xor_si128(c2[2],_mm_xor_si128(c2[2647],_mm_xor_si128(c2[1324],_mm_xor_si128(c2[1104],_mm_xor_si128(c2[230],_mm_xor_si128(c2[1329],_mm_xor_si128(c2[444],_mm_xor_si128(c2[903],_mm_xor_si128(c2[683],_mm_xor_si128(c2[3328],_mm_xor_si128(c2[2664],_mm_xor_si128(c2[1790],_mm_xor_si128(c2[2008],_mm_xor_si128(c2[1343],_mm_xor_si128(c2[1123],_mm_xor_si128(c2[708],_mm_xor_si128(c2[488],_mm_xor_si128(c2[3133],_mm_xor_si128(c2[487],_mm_xor_si128(c2[3352],_mm_xor_si128(c2[3132],_mm_xor_si128(c2[926],_mm_xor_si128(c2[1829],_mm_xor_si128(c2[1609],_mm_xor_si128(c2[735],_mm_xor_si128(c2[1393],_mm_xor_si128(c2[1173],_mm_xor_si128(c2[288],_mm_xor_si128(c2[3375],_mm_xor_si128(c2[2490],_mm_xor_si128(c2[975],_mm_xor_si128(c2[755],_mm_xor_si128(c2[3389],_mm_xor_si128(c2[1416],_mm_xor_si128(c2[531],_mm_xor_si128(c2[531],_mm_xor_si128(c2[3396],_mm_xor_si128(c2[3176],_mm_xor_si128(c2[1433],_mm_xor_si128(c2[1213],_mm_xor_si128(c2[339],_mm_xor_si128(c2[770],_mm_xor_si128(c2[116],_mm_xor_si128(c2[3415],_mm_xor_si128(c2[2997],_mm_xor_si128(c2[2777],_mm_xor_si128(c2[1892],_mm_xor_si128(c2[2778],_mm_xor_si128(c2[2558],_mm_xor_si128(c2[1673],_mm_xor_si128(c2[1236],_mm_xor_si128(c2[582],_mm_xor_si128(c2[362],_mm_xor_si128(c2[2359],_mm_xor_si128(c2[2139],_mm_xor_si128(c2[1254],_mm_xor_si128(c2[3022],_mm_xor_si128(c2[2357],_mm_xor_si128(c2[2137],_mm_xor_si128(c2[1476],_mm_xor_si128(c2[179],_mm_xor_si128(c2[3478],_mm_xor_si128(c2[2604],_mm_xor_si128(c2[617],_mm_xor_si128(c2[3262],_mm_xor_si128(c2[1280],_mm_xor_si128(c2[626],_mm_xor_si128(c2[406],_mm_xor_si128(c2[198],_mm_xor_si128(c2[3508],_mm_xor_si128(c2[2623],_mm_xor_si128(c2[2398],_mm_xor_si128(c2[2178],_mm_xor_si128(c2[1304],_mm_xor_si128(c2[3067],_mm_xor_si128(c2[2402],c2[2182])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[286]=simde_mm_xor_si128(c2[222],simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[2647],simde_mm_xor_si128(c2[1324],simde_mm_xor_si128(c2[1104],simde_mm_xor_si128(c2[230],simde_mm_xor_si128(c2[1329],simde_mm_xor_si128(c2[444],simde_mm_xor_si128(c2[903],simde_mm_xor_si128(c2[683],simde_mm_xor_si128(c2[3328],simde_mm_xor_si128(c2[2664],simde_mm_xor_si128(c2[1790],simde_mm_xor_si128(c2[2008],simde_mm_xor_si128(c2[1343],simde_mm_xor_si128(c2[1123],simde_mm_xor_si128(c2[708],simde_mm_xor_si128(c2[488],simde_mm_xor_si128(c2[3133],simde_mm_xor_si128(c2[487],simde_mm_xor_si128(c2[3352],simde_mm_xor_si128(c2[3132],simde_mm_xor_si128(c2[926],simde_mm_xor_si128(c2[1829],simde_mm_xor_si128(c2[1609],simde_mm_xor_si128(c2[735],simde_mm_xor_si128(c2[1393],simde_mm_xor_si128(c2[1173],simde_mm_xor_si128(c2[288],simde_mm_xor_si128(c2[3375],simde_mm_xor_si128(c2[2490],simde_mm_xor_si128(c2[975],simde_mm_xor_si128(c2[755],simde_mm_xor_si128(c2[3389],simde_mm_xor_si128(c2[1416],simde_mm_xor_si128(c2[531],simde_mm_xor_si128(c2[531],simde_mm_xor_si128(c2[3396],simde_mm_xor_si128(c2[3176],simde_mm_xor_si128(c2[1433],simde_mm_xor_si128(c2[1213],simde_mm_xor_si128(c2[339],simde_mm_xor_si128(c2[770],simde_mm_xor_si128(c2[116],simde_mm_xor_si128(c2[3415],simde_mm_xor_si128(c2[2997],simde_mm_xor_si128(c2[2777],simde_mm_xor_si128(c2[1892],simde_mm_xor_si128(c2[2778],simde_mm_xor_si128(c2[2558],simde_mm_xor_si128(c2[1673],simde_mm_xor_si128(c2[1236],simde_mm_xor_si128(c2[582],simde_mm_xor_si128(c2[362],simde_mm_xor_si128(c2[2359],simde_mm_xor_si128(c2[2139],simde_mm_xor_si128(c2[1254],simde_mm_xor_si128(c2[3022],simde_mm_xor_si128(c2[2357],simde_mm_xor_si128(c2[2137],simde_mm_xor_si128(c2[1476],simde_mm_xor_si128(c2[179],simde_mm_xor_si128(c2[3478],simde_mm_xor_si128(c2[2604],simde_mm_xor_si128(c2[617],simde_mm_xor_si128(c2[3262],simde_mm_xor_si128(c2[1280],simde_mm_xor_si128(c2[626],simde_mm_xor_si128(c2[406],simde_mm_xor_si128(c2[198],simde_mm_xor_si128(c2[3508],simde_mm_xor_si128(c2[2623],simde_mm_xor_si128(c2[2398],simde_mm_xor_si128(c2[2178],simde_mm_xor_si128(c2[1304],simde_mm_xor_si128(c2[3067],simde_mm_xor_si128(c2[2402],c2[2182])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 27
-     d2[297]=_mm_xor_si128(c2[2202],c2[1898]);
+     d2[297]=simde_mm_xor_si128(c2[2202],c2[1898]);
 
 //row: 28
-     d2[308]=_mm_xor_si128(c2[26],_mm_xor_si128(c2[1805],c2[1219]));
+     d2[308]=simde_mm_xor_si128(c2[26],simde_mm_xor_si128(c2[1805],c2[1219]));
 
 //row: 29
-     d2[319]=_mm_xor_si128(c2[2428],c2[318]);
+     d2[319]=simde_mm_xor_si128(c2[2428],c2[318]);
 
 //row: 30
-     d2[330]=_mm_xor_si128(c2[929],_mm_xor_si128(c2[2980],_mm_xor_si128(c2[3019],c2[3059])));
+     d2[330]=simde_mm_xor_si128(c2[929],simde_mm_xor_si128(c2[2980],simde_mm_xor_si128(c2[3019],c2[3059])));
 
 //row: 31
-     d2[341]=_mm_xor_si128(c2[3307],_mm_xor_si128(c2[890],_mm_xor_si128(c2[1104],_mm_xor_si128(c2[469],_mm_xor_si128(c2[2450],_mm_xor_si128(c2[2003],_mm_xor_si128(c2[1783],_mm_xor_si128(c2[1129],_mm_xor_si128(c2[274],_mm_xor_si128(c2[493],_mm_xor_si128(c2[273],_mm_xor_si128(c2[1395],_mm_xor_si128(c2[948],_mm_xor_si128(c2[3150],_mm_xor_si128(c2[530],_mm_xor_si128(c2[1191],_mm_xor_si128(c2[537],_mm_xor_si128(c2[317],_mm_xor_si128(c2[999],_mm_xor_si128(c2[776],_mm_xor_si128(c2[556],_mm_xor_si128(c2[2552],_mm_xor_si128(c2[2333],_mm_xor_si128(c2[1242],_mm_xor_si128(c2[1022],_mm_xor_si128(c2[1914],_mm_xor_si128(c2[3017],_mm_xor_si128(c2[2797],_mm_xor_si128(c2[3264],_mm_xor_si128(c2[403],_mm_xor_si128(c2[1286],_mm_xor_si128(c2[1066],_mm_xor_si128(c2[3283],_mm_xor_si128(c2[1964],_mm_xor_si128(c2[3062],c2[2842])))))))))))))))))))))))))))))))))));
+     d2[341]=simde_mm_xor_si128(c2[3307],simde_mm_xor_si128(c2[890],simde_mm_xor_si128(c2[1104],simde_mm_xor_si128(c2[469],simde_mm_xor_si128(c2[2450],simde_mm_xor_si128(c2[2003],simde_mm_xor_si128(c2[1783],simde_mm_xor_si128(c2[1129],simde_mm_xor_si128(c2[274],simde_mm_xor_si128(c2[493],simde_mm_xor_si128(c2[273],simde_mm_xor_si128(c2[1395],simde_mm_xor_si128(c2[948],simde_mm_xor_si128(c2[3150],simde_mm_xor_si128(c2[530],simde_mm_xor_si128(c2[1191],simde_mm_xor_si128(c2[537],simde_mm_xor_si128(c2[317],simde_mm_xor_si128(c2[999],simde_mm_xor_si128(c2[776],simde_mm_xor_si128(c2[556],simde_mm_xor_si128(c2[2552],simde_mm_xor_si128(c2[2333],simde_mm_xor_si128(c2[1242],simde_mm_xor_si128(c2[1022],simde_mm_xor_si128(c2[1914],simde_mm_xor_si128(c2[3017],simde_mm_xor_si128(c2[2797],simde_mm_xor_si128(c2[3264],simde_mm_xor_si128(c2[403],simde_mm_xor_si128(c2[1286],simde_mm_xor_si128(c2[1066],simde_mm_xor_si128(c2[3283],simde_mm_xor_si128(c2[1964],simde_mm_xor_si128(c2[3062],c2[2842])))))))))))))))))))))))))))))))))));
 
 //row: 32
-     d2[352]=_mm_xor_si128(c2[2207],_mm_xor_si128(c2[1987],_mm_xor_si128(c2[3309],_mm_xor_si128(c2[3089],_mm_xor_si128(c2[3303],_mm_xor_si128(c2[887],_mm_xor_si128(c2[2888],_mm_xor_si128(c2[2668],_mm_xor_si128(c2[1130],_mm_xor_si128(c2[463],_mm_xor_si128(c2[2693],_mm_xor_si128(c2[2473],_mm_xor_si128(c2[2472],_mm_xor_si128(c2[295],_mm_xor_si128(c2[75],_mm_xor_si128(c2[3367],_mm_xor_si128(c2[3147],_mm_xor_si128(c2[1830],_mm_xor_si128(c2[2949],_mm_xor_si128(c2[2729],_mm_xor_si128(c2[3390],_mm_xor_si128(c2[2516],_mm_xor_si128(c2[3418],_mm_xor_si128(c2[3198],_mm_xor_si128(c2[2755],_mm_xor_si128(c2[2094],_mm_xor_si128(c2[1452],_mm_xor_si128(c2[1232],_mm_xor_si128(c2[1233],_mm_xor_si128(c2[1013],_mm_xor_si128(c2[3221],_mm_xor_si128(c2[814],_mm_xor_si128(c2[594],_mm_xor_si128(c2[1477],_mm_xor_si128(c2[2164],_mm_xor_si128(c2[1944],_mm_xor_si128(c2[2602],_mm_xor_si128(c2[3265],_mm_xor_si128(c2[2183],_mm_xor_si128(c2[1963],_mm_xor_si128(c2[864],_mm_xor_si128(c2[644],c2[1522]))))))))))))))))))))))))))))))))))))))))));
+     d2[352]=simde_mm_xor_si128(c2[2207],simde_mm_xor_si128(c2[1987],simde_mm_xor_si128(c2[3309],simde_mm_xor_si128(c2[3089],simde_mm_xor_si128(c2[3303],simde_mm_xor_si128(c2[887],simde_mm_xor_si128(c2[2888],simde_mm_xor_si128(c2[2668],simde_mm_xor_si128(c2[1130],simde_mm_xor_si128(c2[463],simde_mm_xor_si128(c2[2693],simde_mm_xor_si128(c2[2473],simde_mm_xor_si128(c2[2472],simde_mm_xor_si128(c2[295],simde_mm_xor_si128(c2[75],simde_mm_xor_si128(c2[3367],simde_mm_xor_si128(c2[3147],simde_mm_xor_si128(c2[1830],simde_mm_xor_si128(c2[2949],simde_mm_xor_si128(c2[2729],simde_mm_xor_si128(c2[3390],simde_mm_xor_si128(c2[2516],simde_mm_xor_si128(c2[3418],simde_mm_xor_si128(c2[3198],simde_mm_xor_si128(c2[2755],simde_mm_xor_si128(c2[2094],simde_mm_xor_si128(c2[1452],simde_mm_xor_si128(c2[1232],simde_mm_xor_si128(c2[1233],simde_mm_xor_si128(c2[1013],simde_mm_xor_si128(c2[3221],simde_mm_xor_si128(c2[814],simde_mm_xor_si128(c2[594],simde_mm_xor_si128(c2[1477],simde_mm_xor_si128(c2[2164],simde_mm_xor_si128(c2[1944],simde_mm_xor_si128(c2[2602],simde_mm_xor_si128(c2[3265],simde_mm_xor_si128(c2[2183],simde_mm_xor_si128(c2[1963],simde_mm_xor_si128(c2[864],simde_mm_xor_si128(c2[644],c2[1522]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 33
-     d2[363]=_mm_xor_si128(c2[1760],_mm_xor_si128(c2[2862],_mm_xor_si128(c2[3087],_mm_xor_si128(c2[2452],_mm_xor_si128(c2[903],_mm_xor_si128(c2[247],_mm_xor_si128(c2[2246],_mm_xor_si128(c2[2245],_mm_xor_si128(c2[2030],_mm_xor_si128(c2[3367],_mm_xor_si128(c2[2931],_mm_xor_si128(c2[1614],_mm_xor_si128(c2[2513],_mm_xor_si128(c2[3174],_mm_xor_si128(c2[2289],_mm_xor_si128(c2[2971],_mm_xor_si128(c2[2539],_mm_xor_si128(c2[1016],_mm_xor_si128(c2[797],_mm_xor_si128(c2[2994],_mm_xor_si128(c2[378],_mm_xor_si128(c2[1261],_mm_xor_si128(c2[2142],_mm_xor_si128(c2[1717],_mm_xor_si128(c2[2386],_mm_xor_si128(c2[3038],_mm_xor_si128(c2[1747],_mm_xor_si128(c2[428],c2[1306]))))))))))))))))))))))))))));
+     d2[363]=simde_mm_xor_si128(c2[1760],simde_mm_xor_si128(c2[2862],simde_mm_xor_si128(c2[3087],simde_mm_xor_si128(c2[2452],simde_mm_xor_si128(c2[903],simde_mm_xor_si128(c2[247],simde_mm_xor_si128(c2[2246],simde_mm_xor_si128(c2[2245],simde_mm_xor_si128(c2[2030],simde_mm_xor_si128(c2[3367],simde_mm_xor_si128(c2[2931],simde_mm_xor_si128(c2[1614],simde_mm_xor_si128(c2[2513],simde_mm_xor_si128(c2[3174],simde_mm_xor_si128(c2[2289],simde_mm_xor_si128(c2[2971],simde_mm_xor_si128(c2[2539],simde_mm_xor_si128(c2[1016],simde_mm_xor_si128(c2[797],simde_mm_xor_si128(c2[2994],simde_mm_xor_si128(c2[378],simde_mm_xor_si128(c2[1261],simde_mm_xor_si128(c2[2142],simde_mm_xor_si128(c2[1717],simde_mm_xor_si128(c2[2386],simde_mm_xor_si128(c2[3038],simde_mm_xor_si128(c2[1747],simde_mm_xor_si128(c2[428],c2[1306]))))))))))))))))))))))))))));
 
 //row: 34
-     d2[374]=_mm_xor_si128(c2[1101],_mm_xor_si128(c2[881],_mm_xor_si128(c2[2868],_mm_xor_si128(c2[2203],_mm_xor_si128(c2[1983],_mm_xor_si128(c2[440],_mm_xor_si128(c2[2208],_mm_xor_si128(c2[665],_mm_xor_si128(c2[2420],_mm_xor_si128(c2[1782],_mm_xor_si128(c2[1562],_mm_xor_si128(c2[30],_mm_xor_si128(c2[24],_mm_xor_si128(c2[2011],_mm_xor_si128(c2[2887],_mm_xor_si128(c2[1564],_mm_xor_si128(c2[1344],_mm_xor_si128(c2[1587],_mm_xor_si128(c2[1367],_mm_xor_si128(c2[3354],_mm_xor_si128(c2[1366],_mm_xor_si128(c2[54],_mm_xor_si128(c2[3353],_mm_xor_si128(c2[2708],_mm_xor_si128(c2[2488],_mm_xor_si128(c2[956],_mm_xor_si128(c2[2272],_mm_xor_si128(c2[2052],_mm_xor_si128(c2[509],_mm_xor_si128(c2[735],_mm_xor_si128(c2[2711],_mm_xor_si128(c2[1854],_mm_xor_si128(c2[1634],_mm_xor_si128(c2[91],_mm_xor_si128(c2[2295],_mm_xor_si128(c2[752],_mm_xor_si128(c2[1410],_mm_xor_si128(c2[98],_mm_xor_si128(c2[3397],_mm_xor_si128(c2[2312],_mm_xor_si128(c2[2092],_mm_xor_si128(c2[560],_mm_xor_si128(c2[1660],_mm_xor_si128(c2[337],_mm_xor_si128(c2[117],_mm_xor_si128(c2[357],_mm_xor_si128(c2[137],_mm_xor_si128(c2[2113],_mm_xor_si128(c2[138],_mm_xor_si128(c2[3437],_mm_xor_si128(c2[1894],_mm_xor_si128(c2[2115],_mm_xor_si128(c2[792],_mm_xor_si128(c2[572],_mm_xor_si128(c2[3238],_mm_xor_si128(c2[3018],_mm_xor_si128(c2[1475],_mm_xor_si128(c2[382],_mm_xor_si128(c2[2578],_mm_xor_si128(c2[2358],_mm_xor_si128(c2[1058],_mm_xor_si128(c2[838],_mm_xor_si128(c2[2825],_mm_xor_si128(c2[1496],_mm_xor_si128(c2[3483],_mm_xor_si128(c2[2159],_mm_xor_si128(c2[836],_mm_xor_si128(c2[616],_mm_xor_si128(c2[1088],_mm_xor_si128(c2[868],_mm_xor_si128(c2[2844],_mm_xor_si128(c2[3288],_mm_xor_si128(c2[3068],_mm_xor_si128(c2[1525],_mm_xor_si128(c2[427],_mm_xor_si128(c2[2623],c2[2403]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[374]=simde_mm_xor_si128(c2[1101],simde_mm_xor_si128(c2[881],simde_mm_xor_si128(c2[2868],simde_mm_xor_si128(c2[2203],simde_mm_xor_si128(c2[1983],simde_mm_xor_si128(c2[440],simde_mm_xor_si128(c2[2208],simde_mm_xor_si128(c2[665],simde_mm_xor_si128(c2[2420],simde_mm_xor_si128(c2[1782],simde_mm_xor_si128(c2[1562],simde_mm_xor_si128(c2[30],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[2011],simde_mm_xor_si128(c2[2887],simde_mm_xor_si128(c2[1564],simde_mm_xor_si128(c2[1344],simde_mm_xor_si128(c2[1587],simde_mm_xor_si128(c2[1367],simde_mm_xor_si128(c2[3354],simde_mm_xor_si128(c2[1366],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[3353],simde_mm_xor_si128(c2[2708],simde_mm_xor_si128(c2[2488],simde_mm_xor_si128(c2[956],simde_mm_xor_si128(c2[2272],simde_mm_xor_si128(c2[2052],simde_mm_xor_si128(c2[509],simde_mm_xor_si128(c2[735],simde_mm_xor_si128(c2[2711],simde_mm_xor_si128(c2[1854],simde_mm_xor_si128(c2[1634],simde_mm_xor_si128(c2[91],simde_mm_xor_si128(c2[2295],simde_mm_xor_si128(c2[752],simde_mm_xor_si128(c2[1410],simde_mm_xor_si128(c2[98],simde_mm_xor_si128(c2[3397],simde_mm_xor_si128(c2[2312],simde_mm_xor_si128(c2[2092],simde_mm_xor_si128(c2[560],simde_mm_xor_si128(c2[1660],simde_mm_xor_si128(c2[337],simde_mm_xor_si128(c2[117],simde_mm_xor_si128(c2[357],simde_mm_xor_si128(c2[137],simde_mm_xor_si128(c2[2113],simde_mm_xor_si128(c2[138],simde_mm_xor_si128(c2[3437],simde_mm_xor_si128(c2[1894],simde_mm_xor_si128(c2[2115],simde_mm_xor_si128(c2[792],simde_mm_xor_si128(c2[572],simde_mm_xor_si128(c2[3238],simde_mm_xor_si128(c2[3018],simde_mm_xor_si128(c2[1475],simde_mm_xor_si128(c2[382],simde_mm_xor_si128(c2[2578],simde_mm_xor_si128(c2[2358],simde_mm_xor_si128(c2[1058],simde_mm_xor_si128(c2[838],simde_mm_xor_si128(c2[2825],simde_mm_xor_si128(c2[1496],simde_mm_xor_si128(c2[3483],simde_mm_xor_si128(c2[2159],simde_mm_xor_si128(c2[836],simde_mm_xor_si128(c2[616],simde_mm_xor_si128(c2[1088],simde_mm_xor_si128(c2[868],simde_mm_xor_si128(c2[2844],simde_mm_xor_si128(c2[3288],simde_mm_xor_si128(c2[3068],simde_mm_xor_si128(c2[1525],simde_mm_xor_si128(c2[427],simde_mm_xor_si128(c2[2623],c2[2403]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 35
-     d2[385]=_mm_xor_si128(c2[1769],_mm_xor_si128(c2[1549],_mm_xor_si128(c2[2640],_mm_xor_si128(c2[2865],_mm_xor_si128(c2[2450],_mm_xor_si128(c2[2230],_mm_xor_si128(c2[692],_mm_xor_si128(c2[25],_mm_xor_si128(c2[3109],_mm_xor_si128(c2[2244],_mm_xor_si128(c2[2024],_mm_xor_si128(c2[2034],_mm_xor_si128(c2[3376],_mm_xor_si128(c2[3156],_mm_xor_si128(c2[2709],_mm_xor_si128(c2[1392],_mm_xor_si128(c2[2291],_mm_xor_si128(c2[2952],_mm_xor_si128(c2[2078],_mm_xor_si128(c2[2760],_mm_xor_si128(c2[2317],_mm_xor_si128(c2[1879],_mm_xor_si128(c2[1014],_mm_xor_si128(c2[794],_mm_xor_si128(c2[575],_mm_xor_si128(c2[2772],_mm_xor_si128(c2[156],_mm_xor_si128(c2[1039],_mm_xor_si128(c2[1506],_mm_xor_si128(c2[2164],_mm_xor_si128(c2[2816],_mm_xor_si128(c2[1745],_mm_xor_si128(c2[1525],_mm_xor_si128(c2[206],c2[1084]))))))))))))))))))))))))))))))))));
+     d2[385]=simde_mm_xor_si128(c2[1769],simde_mm_xor_si128(c2[1549],simde_mm_xor_si128(c2[2640],simde_mm_xor_si128(c2[2865],simde_mm_xor_si128(c2[2450],simde_mm_xor_si128(c2[2230],simde_mm_xor_si128(c2[692],simde_mm_xor_si128(c2[25],simde_mm_xor_si128(c2[3109],simde_mm_xor_si128(c2[2244],simde_mm_xor_si128(c2[2024],simde_mm_xor_si128(c2[2034],simde_mm_xor_si128(c2[3376],simde_mm_xor_si128(c2[3156],simde_mm_xor_si128(c2[2709],simde_mm_xor_si128(c2[1392],simde_mm_xor_si128(c2[2291],simde_mm_xor_si128(c2[2952],simde_mm_xor_si128(c2[2078],simde_mm_xor_si128(c2[2760],simde_mm_xor_si128(c2[2317],simde_mm_xor_si128(c2[1879],simde_mm_xor_si128(c2[1014],simde_mm_xor_si128(c2[794],simde_mm_xor_si128(c2[575],simde_mm_xor_si128(c2[2772],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[1039],simde_mm_xor_si128(c2[1506],simde_mm_xor_si128(c2[2164],simde_mm_xor_si128(c2[2816],simde_mm_xor_si128(c2[1745],simde_mm_xor_si128(c2[1525],simde_mm_xor_si128(c2[206],c2[1084]))))))))))))))))))))))))))))))))));
 
 //row: 36
-     d2[396]=_mm_xor_si128(c2[1984],_mm_xor_si128(c2[2253],c2[382]));
+     d2[396]=simde_mm_xor_si128(c2[1984],simde_mm_xor_si128(c2[2253],c2[382]));
 
 //row: 37
-     d2[407]=_mm_xor_si128(c2[449],_mm_xor_si128(c2[221],_mm_xor_si128(c2[1540],_mm_xor_si128(c2[1323],_mm_xor_si128(c2[1765],_mm_xor_si128(c2[1548],_mm_xor_si128(c2[1130],_mm_xor_si128(c2[902],_mm_xor_si128(c2[3111],_mm_xor_si128(c2[2883],_mm_xor_si128(c2[2444],_mm_xor_si128(c2[2447],_mm_xor_si128(c2[2227],_mm_xor_si128(c2[924],_mm_xor_si128(c2[707],_mm_xor_si128(c2[934],_mm_xor_si128(c2[926],_mm_xor_si128(c2[706],_mm_xor_si128(c2[2056],_mm_xor_si128(c2[1828],_mm_xor_si128(c2[1609],_mm_xor_si128(c2[1392],_mm_xor_si128(c2[292],_mm_xor_si128(c2[75],_mm_xor_si128(c2[1191],_mm_xor_si128(c2[974],_mm_xor_si128(c2[1852],_mm_xor_si128(c2[1635],_mm_xor_si128(c2[978],_mm_xor_si128(c2[970],_mm_xor_si128(c2[750],_mm_xor_si128(c2[1660],_mm_xor_si128(c2[1432],_mm_xor_si128(c2[1217],_mm_xor_si128(c2[1220],_mm_xor_si128(c2[1000],_mm_xor_si128(c2[3213],_mm_xor_si128(c2[2996],_mm_xor_si128(c2[2994],_mm_xor_si128(c2[2777],_mm_xor_si128(c2[1672],_mm_xor_si128(c2[1675],_mm_xor_si128(c2[1455],_mm_xor_si128(c2[2575],_mm_xor_si128(c2[2358],_mm_xor_si128(c2[3458],_mm_xor_si128(c2[3461],_mm_xor_si128(c2[3241],_mm_xor_si128(c2[406],_mm_xor_si128(c2[178],_mm_xor_si128(c2[1064],_mm_xor_si128(c2[836],_mm_xor_si128(c2[1716],_mm_xor_si128(c2[1719],_mm_xor_si128(c2[1499],_mm_xor_si128(c2[425],_mm_xor_si128(c2[208],_mm_xor_si128(c2[2625],_mm_xor_si128(c2[2408],_mm_xor_si128(c2[3503],_mm_xor_si128(c2[3506],c2[3286])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[407]=simde_mm_xor_si128(c2[449],simde_mm_xor_si128(c2[221],simde_mm_xor_si128(c2[1540],simde_mm_xor_si128(c2[1323],simde_mm_xor_si128(c2[1765],simde_mm_xor_si128(c2[1548],simde_mm_xor_si128(c2[1130],simde_mm_xor_si128(c2[902],simde_mm_xor_si128(c2[3111],simde_mm_xor_si128(c2[2883],simde_mm_xor_si128(c2[2444],simde_mm_xor_si128(c2[2447],simde_mm_xor_si128(c2[2227],simde_mm_xor_si128(c2[924],simde_mm_xor_si128(c2[707],simde_mm_xor_si128(c2[934],simde_mm_xor_si128(c2[926],simde_mm_xor_si128(c2[706],simde_mm_xor_si128(c2[2056],simde_mm_xor_si128(c2[1828],simde_mm_xor_si128(c2[1609],simde_mm_xor_si128(c2[1392],simde_mm_xor_si128(c2[292],simde_mm_xor_si128(c2[75],simde_mm_xor_si128(c2[1191],simde_mm_xor_si128(c2[974],simde_mm_xor_si128(c2[1852],simde_mm_xor_si128(c2[1635],simde_mm_xor_si128(c2[978],simde_mm_xor_si128(c2[970],simde_mm_xor_si128(c2[750],simde_mm_xor_si128(c2[1660],simde_mm_xor_si128(c2[1432],simde_mm_xor_si128(c2[1217],simde_mm_xor_si128(c2[1220],simde_mm_xor_si128(c2[1000],simde_mm_xor_si128(c2[3213],simde_mm_xor_si128(c2[2996],simde_mm_xor_si128(c2[2994],simde_mm_xor_si128(c2[2777],simde_mm_xor_si128(c2[1672],simde_mm_xor_si128(c2[1675],simde_mm_xor_si128(c2[1455],simde_mm_xor_si128(c2[2575],simde_mm_xor_si128(c2[2358],simde_mm_xor_si128(c2[3458],simde_mm_xor_si128(c2[3461],simde_mm_xor_si128(c2[3241],simde_mm_xor_si128(c2[406],simde_mm_xor_si128(c2[178],simde_mm_xor_si128(c2[1064],simde_mm_xor_si128(c2[836],simde_mm_xor_si128(c2[1716],simde_mm_xor_si128(c2[1719],simde_mm_xor_si128(c2[1499],simde_mm_xor_si128(c2[425],simde_mm_xor_si128(c2[208],simde_mm_xor_si128(c2[2625],simde_mm_xor_si128(c2[2408],simde_mm_xor_si128(c2[3503],simde_mm_xor_si128(c2[3506],c2[3286])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 38
-     d2[418]=_mm_xor_si128(c2[3310],_mm_xor_si128(c2[3090],_mm_xor_si128(c2[662],_mm_xor_si128(c2[887],_mm_xor_si128(c2[472],_mm_xor_si128(c2[252],_mm_xor_si128(c2[2222],_mm_xor_si128(c2[1566],_mm_xor_si128(c2[29],_mm_xor_si128(c2[266],_mm_xor_si128(c2[46],_mm_xor_si128(c2[45],_mm_xor_si128(c2[1387],_mm_xor_si128(c2[1167],_mm_xor_si128(c2[731],_mm_xor_si128(c2[2933],_mm_xor_si128(c2[313],_mm_xor_si128(c2[974],_mm_xor_si128(c2[89],_mm_xor_si128(c2[771],_mm_xor_si128(c2[339],_mm_xor_si128(c2[1650],_mm_xor_si128(c2[2555],_mm_xor_si128(c2[2335],_mm_xor_si128(c2[2116],_mm_xor_si128(c2[794],_mm_xor_si128(c2[1697],_mm_xor_si128(c2[2580],_mm_xor_si128(c2[3036],_mm_xor_si128(c2[186],_mm_xor_si128(c2[838],_mm_xor_si128(c2[3286],_mm_xor_si128(c2[3066],_mm_xor_si128(c2[1747],c2[2625]))))))))))))))))))))))))))))))))));
+     d2[418]=simde_mm_xor_si128(c2[3310],simde_mm_xor_si128(c2[3090],simde_mm_xor_si128(c2[662],simde_mm_xor_si128(c2[887],simde_mm_xor_si128(c2[472],simde_mm_xor_si128(c2[252],simde_mm_xor_si128(c2[2222],simde_mm_xor_si128(c2[1566],simde_mm_xor_si128(c2[29],simde_mm_xor_si128(c2[266],simde_mm_xor_si128(c2[46],simde_mm_xor_si128(c2[45],simde_mm_xor_si128(c2[1387],simde_mm_xor_si128(c2[1167],simde_mm_xor_si128(c2[731],simde_mm_xor_si128(c2[2933],simde_mm_xor_si128(c2[313],simde_mm_xor_si128(c2[974],simde_mm_xor_si128(c2[89],simde_mm_xor_si128(c2[771],simde_mm_xor_si128(c2[339],simde_mm_xor_si128(c2[1650],simde_mm_xor_si128(c2[2555],simde_mm_xor_si128(c2[2335],simde_mm_xor_si128(c2[2116],simde_mm_xor_si128(c2[794],simde_mm_xor_si128(c2[1697],simde_mm_xor_si128(c2[2580],simde_mm_xor_si128(c2[3036],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[838],simde_mm_xor_si128(c2[3286],simde_mm_xor_si128(c2[3066],simde_mm_xor_si128(c2[1747],c2[2625]))))))))))))))))))))))))))))))))));
 
 //row: 39
-     d2[429]=_mm_xor_si128(c2[1105],_mm_xor_si128(c2[885],_mm_xor_si128(c2[2207],_mm_xor_si128(c2[1987],_mm_xor_si128(c2[2201],_mm_xor_si128(c2[2866],_mm_xor_si128(c2[1786],_mm_xor_si128(c2[1566],_mm_xor_si128(c2[28],_mm_xor_si128(c2[2891],_mm_xor_si128(c2[1591],_mm_xor_si128(c2[1371],_mm_xor_si128(c2[1370],_mm_xor_si128(c2[2712],_mm_xor_si128(c2[2492],_mm_xor_si128(c2[2276],_mm_xor_si128(c2[2056],_mm_xor_si128(c2[728],_mm_xor_si128(c2[1858],_mm_xor_si128(c2[1638],_mm_xor_si128(c2[2288],_mm_xor_si128(c2[1414],_mm_xor_si128(c2[2316],_mm_xor_si128(c2[2096],_mm_xor_si128(c2[1653],_mm_xor_si128(c2[361],_mm_xor_si128(c2[141],_mm_xor_si128(c2[142],_mm_xor_si128(c2[3441],_mm_xor_si128(c2[2119],_mm_xor_si128(c2[3242],_mm_xor_si128(c2[3022],_mm_xor_si128(c2[375],_mm_xor_si128(c2[1474],_mm_xor_si128(c2[1062],_mm_xor_si128(c2[842],_mm_xor_si128(c2[1500],_mm_xor_si128(c2[2163],_mm_xor_si128(c2[1081],_mm_xor_si128(c2[861],_mm_xor_si128(c2[3281],_mm_xor_si128(c2[3061],c2[420]))))))))))))))))))))))))))))))))))))))))));
+     d2[429]=simde_mm_xor_si128(c2[1105],simde_mm_xor_si128(c2[885],simde_mm_xor_si128(c2[2207],simde_mm_xor_si128(c2[1987],simde_mm_xor_si128(c2[2201],simde_mm_xor_si128(c2[2866],simde_mm_xor_si128(c2[1786],simde_mm_xor_si128(c2[1566],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[2891],simde_mm_xor_si128(c2[1591],simde_mm_xor_si128(c2[1371],simde_mm_xor_si128(c2[1370],simde_mm_xor_si128(c2[2712],simde_mm_xor_si128(c2[2492],simde_mm_xor_si128(c2[2276],simde_mm_xor_si128(c2[2056],simde_mm_xor_si128(c2[728],simde_mm_xor_si128(c2[1858],simde_mm_xor_si128(c2[1638],simde_mm_xor_si128(c2[2288],simde_mm_xor_si128(c2[1414],simde_mm_xor_si128(c2[2316],simde_mm_xor_si128(c2[2096],simde_mm_xor_si128(c2[1653],simde_mm_xor_si128(c2[361],simde_mm_xor_si128(c2[141],simde_mm_xor_si128(c2[142],simde_mm_xor_si128(c2[3441],simde_mm_xor_si128(c2[2119],simde_mm_xor_si128(c2[3242],simde_mm_xor_si128(c2[3022],simde_mm_xor_si128(c2[375],simde_mm_xor_si128(c2[1474],simde_mm_xor_si128(c2[1062],simde_mm_xor_si128(c2[842],simde_mm_xor_si128(c2[1500],simde_mm_xor_si128(c2[2163],simde_mm_xor_si128(c2[1081],simde_mm_xor_si128(c2[861],simde_mm_xor_si128(c2[3281],simde_mm_xor_si128(c2[3061],c2[420]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 40
-     d2[440]=_mm_xor_si128(c2[1548],_mm_xor_si128(c2[2863],_mm_xor_si128(c2[2650],_mm_xor_si128(c2[446],_mm_xor_si128(c2[2864],_mm_xor_si128(c2[660],_mm_xor_si128(c2[2229],_mm_xor_si128(c2[25],_mm_xor_si128(c2[691],_mm_xor_si128(c2[2006],_mm_xor_si128(c2[24],_mm_xor_si128(c2[1570],_mm_xor_si128(c2[1350],_mm_xor_si128(c2[2034],_mm_xor_si128(c2[3349],_mm_xor_si128(c2[2033],_mm_xor_si128(c2[49],_mm_xor_si128(c2[3348],_mm_xor_si128(c2[54],_mm_xor_si128(c2[3155],_mm_xor_si128(c2[951],_mm_xor_si128(c2[2708],_mm_xor_si128(c2[515],_mm_xor_si128(c2[1391],_mm_xor_si128(c2[2706],_mm_xor_si128(c2[2290],_mm_xor_si128(c2[97],_mm_xor_si128(c2[2951],_mm_xor_si128(c2[758],_mm_xor_si128(c2[2077],_mm_xor_si128(c2[93],_mm_xor_si128(c2[3392],_mm_xor_si128(c2[2759],_mm_xor_si128(c2[555],_mm_xor_si128(c2[2316],_mm_xor_si128(c2[332],_mm_xor_si128(c2[112],_mm_xor_si128(c2[793],_mm_xor_si128(c2[2119],_mm_xor_si128(c2[574],_mm_xor_si128(c2[1900],_mm_xor_si128(c2[2782],_mm_xor_si128(c2[798],_mm_xor_si128(c2[578],_mm_xor_si128(c2[155],_mm_xor_si128(c2[1481],_mm_xor_si128(c2[1038],_mm_xor_si128(c2[2584],_mm_xor_si128(c2[2364],_mm_xor_si128(c2[1505],_mm_xor_si128(c2[2820],_mm_xor_si128(c2[2163],_mm_xor_si128(c2[3478],_mm_xor_si128(c2[2826],_mm_xor_si128(c2[842],_mm_xor_si128(c2[622],_mm_xor_si128(c2[1524],_mm_xor_si128(c2[2839],_mm_xor_si128(c2[205],_mm_xor_si128(c2[1520],_mm_xor_si128(c2[1083],_mm_xor_si128(c2[2618],c2[2398]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[440]=simde_mm_xor_si128(c2[1548],simde_mm_xor_si128(c2[2863],simde_mm_xor_si128(c2[2650],simde_mm_xor_si128(c2[446],simde_mm_xor_si128(c2[2864],simde_mm_xor_si128(c2[660],simde_mm_xor_si128(c2[2229],simde_mm_xor_si128(c2[25],simde_mm_xor_si128(c2[691],simde_mm_xor_si128(c2[2006],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[1570],simde_mm_xor_si128(c2[1350],simde_mm_xor_si128(c2[2034],simde_mm_xor_si128(c2[3349],simde_mm_xor_si128(c2[2033],simde_mm_xor_si128(c2[49],simde_mm_xor_si128(c2[3348],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[3155],simde_mm_xor_si128(c2[951],simde_mm_xor_si128(c2[2708],simde_mm_xor_si128(c2[515],simde_mm_xor_si128(c2[1391],simde_mm_xor_si128(c2[2706],simde_mm_xor_si128(c2[2290],simde_mm_xor_si128(c2[97],simde_mm_xor_si128(c2[2951],simde_mm_xor_si128(c2[758],simde_mm_xor_si128(c2[2077],simde_mm_xor_si128(c2[93],simde_mm_xor_si128(c2[3392],simde_mm_xor_si128(c2[2759],simde_mm_xor_si128(c2[555],simde_mm_xor_si128(c2[2316],simde_mm_xor_si128(c2[332],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[793],simde_mm_xor_si128(c2[2119],simde_mm_xor_si128(c2[574],simde_mm_xor_si128(c2[1900],simde_mm_xor_si128(c2[2782],simde_mm_xor_si128(c2[798],simde_mm_xor_si128(c2[578],simde_mm_xor_si128(c2[155],simde_mm_xor_si128(c2[1481],simde_mm_xor_si128(c2[1038],simde_mm_xor_si128(c2[2584],simde_mm_xor_si128(c2[2364],simde_mm_xor_si128(c2[1505],simde_mm_xor_si128(c2[2820],simde_mm_xor_si128(c2[2163],simde_mm_xor_si128(c2[3478],simde_mm_xor_si128(c2[2826],simde_mm_xor_si128(c2[842],simde_mm_xor_si128(c2[622],simde_mm_xor_si128(c2[1524],simde_mm_xor_si128(c2[2839],simde_mm_xor_si128(c2[205],simde_mm_xor_si128(c2[1520],simde_mm_xor_si128(c2[1083],simde_mm_xor_si128(c2[2618],c2[2398]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 41
-     d2[451]=_mm_xor_si128(c2[890],_mm_xor_si128(c2[670],_mm_xor_si128(c2[1761],_mm_xor_si128(c2[1986],_mm_xor_si128(c2[1571],_mm_xor_si128(c2[1351],_mm_xor_si128(c2[3332],_mm_xor_si128(c2[2665],_mm_xor_si128(c2[910],_mm_xor_si128(c2[1365],_mm_xor_si128(c2[1145],_mm_xor_si128(c2[1144],_mm_xor_si128(c2[2486],_mm_xor_si128(c2[2266],_mm_xor_si128(c2[1830],_mm_xor_si128(c2[513],_mm_xor_si128(c2[1412],_mm_xor_si128(c2[2073],_mm_xor_si128(c2[1188],_mm_xor_si128(c2[1870],_mm_xor_si128(c2[1438],_mm_xor_si128(c2[1430],_mm_xor_si128(c2[135],_mm_xor_si128(c2[3434],_mm_xor_si128(c2[3215],_mm_xor_si128(c2[1893],_mm_xor_si128(c2[2796],_mm_xor_si128(c2[160],_mm_xor_si128(c2[616],_mm_xor_si128(c2[1285],_mm_xor_si128(c2[1937],_mm_xor_si128(c2[866],_mm_xor_si128(c2[646],_mm_xor_si128(c2[2846],c2[205]))))))))))))))))))))))))))))))))));
+     d2[451]=simde_mm_xor_si128(c2[890],simde_mm_xor_si128(c2[670],simde_mm_xor_si128(c2[1761],simde_mm_xor_si128(c2[1986],simde_mm_xor_si128(c2[1571],simde_mm_xor_si128(c2[1351],simde_mm_xor_si128(c2[3332],simde_mm_xor_si128(c2[2665],simde_mm_xor_si128(c2[910],simde_mm_xor_si128(c2[1365],simde_mm_xor_si128(c2[1145],simde_mm_xor_si128(c2[1144],simde_mm_xor_si128(c2[2486],simde_mm_xor_si128(c2[2266],simde_mm_xor_si128(c2[1830],simde_mm_xor_si128(c2[513],simde_mm_xor_si128(c2[1412],simde_mm_xor_si128(c2[2073],simde_mm_xor_si128(c2[1188],simde_mm_xor_si128(c2[1870],simde_mm_xor_si128(c2[1438],simde_mm_xor_si128(c2[1430],simde_mm_xor_si128(c2[135],simde_mm_xor_si128(c2[3434],simde_mm_xor_si128(c2[3215],simde_mm_xor_si128(c2[1893],simde_mm_xor_si128(c2[2796],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[616],simde_mm_xor_si128(c2[1285],simde_mm_xor_si128(c2[1937],simde_mm_xor_si128(c2[866],simde_mm_xor_si128(c2[646],simde_mm_xor_si128(c2[2846],c2[205]))))))))))))))))))))))))))))))))));
   }
 }
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc192_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc192_byte.c
index 94ac2a3220e04c14b7604802973c95a56b3d7476..95f188e192381e3dc1ae457d7b6682f00dc4b0dd 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc192_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc192_byte.c
@@ -1,9 +1,10 @@
+#ifdef __AVX2__
 #include "PHY/sse_intrin.h"
 // generated code for Zc=192, byte encoding
 static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG2_Zc192_byte(uint8_t *c,uint8_t *d) {
-  __m256i *csimd=(__m256i *)c,*dsimd=(__m256i *)d;
+  simde__m256i *csimd=(simde__m256i *)c,*dsimd=(simde__m256i *)d;
 
-  __m256i *c2,*d2;
+  simde__m256i *c2,*d2;
 
   int i2;
   for (i2=0; i2<6; i2++) {
@@ -137,3 +138,4 @@ static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG
      d2[246]=simde_mm256_xor_si256(c2[1201],simde_mm256_xor_si256(c2[1081],simde_mm256_xor_si256(c2[2642],simde_mm256_xor_si256(c2[2402],simde_mm256_xor_si256(c2[3496],simde_mm256_xor_si256(c2[3376],simde_mm256_xor_si256(c2[1577],simde_mm256_xor_si256(c2[372],simde_mm256_xor_si256(c2[2296],simde_mm256_xor_si256(c2[265],simde_mm256_xor_si256(c2[145],simde_mm256_xor_si256(c2[1225],simde_mm256_xor_si256(c2[3639],simde_mm256_xor_si256(c2[3519],simde_mm256_xor_si256(c2[3758],simde_mm256_xor_si256(c2[1961],simde_mm256_xor_si256(c2[1371],simde_mm256_xor_si256(c2[1133],simde_mm256_xor_si256(c2[1493],simde_mm256_xor_si256(c2[2824],simde_mm256_xor_si256(c2[424],simde_mm256_xor_si256(c2[900],simde_mm256_xor_si256(c2[436],simde_mm256_xor_si256(c2[316],simde_mm256_xor_si256(c2[3194],simde_mm256_xor_si256(c2[2717],simde_mm256_xor_si256(c2[2725],simde_mm256_xor_si256(c2[2129],simde_mm256_xor_si256(c2[2497],simde_mm256_xor_si256(c2[1661],simde_mm256_xor_si256(c2[2858],simde_mm256_xor_si256(c2[1069],simde_mm256_xor_si256(c2[949],simde_mm256_xor_si256(c2[3709],c2[2629]))))))))))))))))))))))))))))))))));
   }
 }
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc192_byte_128.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc192_byte_128.c
new file mode 100644
index 0000000000000000000000000000000000000000..76f2e36ce0976e289d99c7f079e5518ef9e56c49
--- /dev/null
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc192_byte_128.c
@@ -0,0 +1,141 @@
+#ifndef __AVX2__
+#include "PHY/sse_intrin.h"
+// generated code for Zc=192, byte encoding
+static inline void ldpc_BG2_Zc192_byte(uint8_t *c,uint8_t *d) {
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
+
+  simde__m128i *c2,*d2;
+
+  int i2;
+  for (i2=0; i2<12; i2++) {
+     c2=&csimd[i2];
+     d2=&dsimd[i2];
+
+//row: 0
+     d2[0]=simde_mm_xor_si128(c2[3130],simde_mm_xor_si128(c2[2401],simde_mm_xor_si128(c2[1921],simde_mm_xor_si128(c2[30],simde_mm_xor_si128(c2[271],simde_mm_xor_si128(c2[1712],simde_mm_xor_si128(c2[1258],simde_mm_xor_si128(c2[3418],simde_mm_xor_si128(c2[316],simde_mm_xor_si128(c2[794],simde_mm_xor_si128(c2[1039],simde_mm_xor_si128(c2[3698],simde_mm_xor_si128(c2[3222],simde_mm_xor_si128(c2[103],simde_mm_xor_si128(c2[2765],simde_mm_xor_si128(c2[1804],simde_mm_xor_si128(c2[1588],simde_mm_xor_si128(c2[3505],simde_mm_xor_si128(c2[2551],simde_mm_xor_si128(c2[2579],simde_mm_xor_si128(c2[1375],simde_mm_xor_si128(c2[2123],simde_mm_xor_si128(c2[439],simde_mm_xor_si128(c2[2833],simde_mm_xor_si128(c2[2866],simde_mm_xor_si128(c2[696],c2[2387]))))))))))))))))))))))))));
+
+//row: 1
+     d2[12]=simde_mm_xor_si128(c2[3370],simde_mm_xor_si128(c2[3130],simde_mm_xor_si128(c2[2401],simde_mm_xor_si128(c2[1921],simde_mm_xor_si128(c2[270],simde_mm_xor_si128(c2[30],simde_mm_xor_si128(c2[271],simde_mm_xor_si128(c2[1712],simde_mm_xor_si128(c2[1498],simde_mm_xor_si128(c2[1258],simde_mm_xor_si128(c2[3418],simde_mm_xor_si128(c2[556],simde_mm_xor_si128(c2[316],simde_mm_xor_si128(c2[794],simde_mm_xor_si128(c2[1039],simde_mm_xor_si128(c2[3698],simde_mm_xor_si128(c2[3222],simde_mm_xor_si128(c2[103],simde_mm_xor_si128(c2[2765],simde_mm_xor_si128(c2[1804],simde_mm_xor_si128(c2[1828],simde_mm_xor_si128(c2[1588],simde_mm_xor_si128(c2[3505],simde_mm_xor_si128(c2[2551],simde_mm_xor_si128(c2[2579],simde_mm_xor_si128(c2[1375],simde_mm_xor_si128(c2[2123],simde_mm_xor_si128(c2[439],simde_mm_xor_si128(c2[2833],simde_mm_xor_si128(c2[3106],simde_mm_xor_si128(c2[2866],simde_mm_xor_si128(c2[696],c2[2387]))))))))))))))))))))))))))))))));
+
+//row: 2
+     d2[24]=simde_mm_xor_si128(c2[3370],simde_mm_xor_si128(c2[3130],simde_mm_xor_si128(c2[2641],simde_mm_xor_si128(c2[2401],simde_mm_xor_si128(c2[1921],simde_mm_xor_si128(c2[270],simde_mm_xor_si128(c2[30],simde_mm_xor_si128(c2[271],simde_mm_xor_si128(c2[1712],simde_mm_xor_si128(c2[1498],simde_mm_xor_si128(c2[1258],simde_mm_xor_si128(c2[3418],simde_mm_xor_si128(c2[556],simde_mm_xor_si128(c2[316],simde_mm_xor_si128(c2[1034],simde_mm_xor_si128(c2[794],simde_mm_xor_si128(c2[1039],simde_mm_xor_si128(c2[99],simde_mm_xor_si128(c2[3698],simde_mm_xor_si128(c2[3222],simde_mm_xor_si128(c2[103],simde_mm_xor_si128(c2[3005],simde_mm_xor_si128(c2[2765],simde_mm_xor_si128(c2[1804],simde_mm_xor_si128(c2[1828],simde_mm_xor_si128(c2[1588],simde_mm_xor_si128(c2[3745],simde_mm_xor_si128(c2[3505],simde_mm_xor_si128(c2[2551],simde_mm_xor_si128(c2[2819],simde_mm_xor_si128(c2[2579],simde_mm_xor_si128(c2[1375],simde_mm_xor_si128(c2[2363],simde_mm_xor_si128(c2[2123],simde_mm_xor_si128(c2[439],simde_mm_xor_si128(c2[2833],simde_mm_xor_si128(c2[3106],simde_mm_xor_si128(c2[2866],simde_mm_xor_si128(c2[936],simde_mm_xor_si128(c2[696],c2[2387]))))))))))))))))))))))))))))))))))))))));
+
+//row: 3
+     d2[36]=simde_mm_xor_si128(c2[3130],simde_mm_xor_si128(c2[2401],simde_mm_xor_si128(c2[1921],simde_mm_xor_si128(c2[30],simde_mm_xor_si128(c2[271],simde_mm_xor_si128(c2[1952],simde_mm_xor_si128(c2[1712],simde_mm_xor_si128(c2[1258],simde_mm_xor_si128(c2[3658],simde_mm_xor_si128(c2[3418],simde_mm_xor_si128(c2[316],simde_mm_xor_si128(c2[794],simde_mm_xor_si128(c2[1039],simde_mm_xor_si128(c2[3698],simde_mm_xor_si128(c2[3222],simde_mm_xor_si128(c2[343],simde_mm_xor_si128(c2[103],simde_mm_xor_si128(c2[2765],simde_mm_xor_si128(c2[2044],simde_mm_xor_si128(c2[1804],simde_mm_xor_si128(c2[1588],simde_mm_xor_si128(c2[3505],simde_mm_xor_si128(c2[2791],simde_mm_xor_si128(c2[2551],simde_mm_xor_si128(c2[2579],simde_mm_xor_si128(c2[1615],simde_mm_xor_si128(c2[1375],simde_mm_xor_si128(c2[2123],simde_mm_xor_si128(c2[439],simde_mm_xor_si128(c2[3073],simde_mm_xor_si128(c2[2833],simde_mm_xor_si128(c2[2866],simde_mm_xor_si128(c2[696],simde_mm_xor_si128(c2[2627],c2[2387]))))))))))))))))))))))))))))))))));
+
+//row: 4
+     d2[48]=simde_mm_xor_si128(c2[2640],simde_mm_xor_si128(c2[2400],simde_mm_xor_si128(c2[1683],simde_mm_xor_si128(c2[1203],simde_mm_xor_si128(c2[1924],simde_mm_xor_si128(c2[3391],simde_mm_xor_si128(c2[3151],simde_mm_xor_si128(c2[3392],simde_mm_xor_si128(c2[994],simde_mm_xor_si128(c2[2428],simde_mm_xor_si128(c2[768],simde_mm_xor_si128(c2[528],simde_mm_xor_si128(c2[2688],simde_mm_xor_si128(c2[3677],simde_mm_xor_si128(c2[3437],simde_mm_xor_si128(c2[76],simde_mm_xor_si128(c2[321],simde_mm_xor_si128(c2[2980],simde_mm_xor_si128(c2[2504],simde_mm_xor_si128(c2[3224],simde_mm_xor_si128(c2[2047],simde_mm_xor_si128(c2[1086],simde_mm_xor_si128(c2[1110],simde_mm_xor_si128(c2[870],simde_mm_xor_si128(c2[2787],simde_mm_xor_si128(c2[1833],simde_mm_xor_si128(c2[1849],simde_mm_xor_si128(c2[657],simde_mm_xor_si128(c2[1393],simde_mm_xor_si128(c2[3560],simde_mm_xor_si128(c2[2115],simde_mm_xor_si128(c2[2376],simde_mm_xor_si128(c2[2136],simde_mm_xor_si128(c2[3817],c2[1657]))))))))))))))))))))))))))))))))));
+
+//row: 5
+     d2[60]=simde_mm_xor_si128(c2[3361],simde_mm_xor_si128(c2[3121],simde_mm_xor_si128(c2[2404],simde_mm_xor_si128(c2[1924],simde_mm_xor_si128(c2[2400],simde_mm_xor_si128(c2[273],simde_mm_xor_si128(c2[33],simde_mm_xor_si128(c2[274],simde_mm_xor_si128(c2[1715],simde_mm_xor_si128(c2[2906],simde_mm_xor_si128(c2[1489],simde_mm_xor_si128(c2[1249],simde_mm_xor_si128(c2[3409],simde_mm_xor_si128(c2[559],simde_mm_xor_si128(c2[319],simde_mm_xor_si128(c2[797],simde_mm_xor_si128(c2[1042],simde_mm_xor_si128(c2[3701],simde_mm_xor_si128(c2[3225],simde_mm_xor_si128(c2[106],simde_mm_xor_si128(c2[2768],simde_mm_xor_si128(c2[1807],simde_mm_xor_si128(c2[2287],simde_mm_xor_si128(c2[1831],simde_mm_xor_si128(c2[1591],simde_mm_xor_si128(c2[3508],simde_mm_xor_si128(c2[2554],simde_mm_xor_si128(c2[2570],simde_mm_xor_si128(c2[1378],simde_mm_xor_si128(c2[173],simde_mm_xor_si128(c2[2114],simde_mm_xor_si128(c2[442],simde_mm_xor_si128(c2[2836],simde_mm_xor_si128(c2[3097],simde_mm_xor_si128(c2[2857],simde_mm_xor_si128(c2[699],c2[2378]))))))))))))))))))))))))))))))))))));
+
+//row: 6
+     d2[72]=simde_mm_xor_si128(c2[1450],simde_mm_xor_si128(c2[1210],simde_mm_xor_si128(c2[481],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[248],simde_mm_xor_si128(c2[2189],simde_mm_xor_si128(c2[1949],simde_mm_xor_si128(c2[2190],simde_mm_xor_si128(c2[3631],simde_mm_xor_si128(c2[3417],simde_mm_xor_si128(c2[3177],simde_mm_xor_si128(c2[1498],simde_mm_xor_si128(c2[2475],simde_mm_xor_si128(c2[2235],simde_mm_xor_si128(c2[2713],simde_mm_xor_si128(c2[2958],simde_mm_xor_si128(c2[1778],simde_mm_xor_si128(c2[1302],simde_mm_xor_si128(c2[2022],simde_mm_xor_si128(c2[845],simde_mm_xor_si128(c2[3723],simde_mm_xor_si128(c2[3005],simde_mm_xor_si128(c2[3747],simde_mm_xor_si128(c2[3507],simde_mm_xor_si128(c2[1585],simde_mm_xor_si128(c2[631],simde_mm_xor_si128(c2[659],simde_mm_xor_si128(c2[3294],simde_mm_xor_si128(c2[1134],simde_mm_xor_si128(c2[203],simde_mm_xor_si128(c2[2358],simde_mm_xor_si128(c2[913],simde_mm_xor_si128(c2[1186],simde_mm_xor_si128(c2[946],simde_mm_xor_si128(c2[2627],simde_mm_xor_si128(c2[467],c2[459]))))))))))))))))))))))))))))))))))));
+
+//row: 7
+     d2[84]=simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[725],simde_mm_xor_si128(c2[2887],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[2170],simde_mm_xor_si128(c2[3367],simde_mm_xor_si128(c2[1690],simde_mm_xor_si128(c2[1704],simde_mm_xor_si128(c2[1464],simde_mm_xor_si128(c2[3626],simde_mm_xor_si128(c2[1705],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[3146],simde_mm_xor_si128(c2[1709],simde_mm_xor_si128(c2[1469],simde_mm_xor_si128(c2[29],simde_mm_xor_si128(c2[2932],simde_mm_xor_si128(c2[2692],simde_mm_xor_si128(c2[1015],simde_mm_xor_si128(c2[1013],simde_mm_xor_si128(c2[3415],simde_mm_xor_si128(c2[3175],simde_mm_xor_si128(c2[2002],simde_mm_xor_si128(c2[1762],simde_mm_xor_si128(c2[73],simde_mm_xor_si128(c2[2240],simde_mm_xor_si128(c2[563],simde_mm_xor_si128(c2[2473],simde_mm_xor_si128(c2[796],simde_mm_xor_si128(c2[1305],simde_mm_xor_si128(c2[3467],simde_mm_xor_si128(c2[817],simde_mm_xor_si128(c2[2979],simde_mm_xor_si128(c2[1537],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[3699],simde_mm_xor_si128(c2[360],simde_mm_xor_si128(c2[2522],simde_mm_xor_si128(c2[3250],simde_mm_xor_si128(c2[1801],simde_mm_xor_si128(c2[1561],simde_mm_xor_si128(c2[2531],simde_mm_xor_si128(c2[3274],simde_mm_xor_si128(c2[3034],simde_mm_xor_si128(c2[1345],simde_mm_xor_si128(c2[1112],simde_mm_xor_si128(c2[3274],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[2548],simde_mm_xor_si128(c2[2308],simde_mm_xor_si128(c2[174],simde_mm_xor_si128(c2[2336],simde_mm_xor_si128(c2[2809],simde_mm_xor_si128(c2[1372],simde_mm_xor_si128(c2[1132],simde_mm_xor_si128(c2[169],simde_mm_xor_si128(c2[3557],simde_mm_xor_si128(c2[1880],simde_mm_xor_si128(c2[1873],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[440],simde_mm_xor_si128(c2[2842],simde_mm_xor_si128(c2[2602],simde_mm_xor_si128(c2[701],simde_mm_xor_si128(c2[461],simde_mm_xor_si128(c2[2623],simde_mm_xor_si128(c2[2142],simde_mm_xor_si128(c2[465],simde_mm_xor_si128(c2[3821],simde_mm_xor_si128(c2[2384],c2[2144]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 8
+     d2[96]=simde_mm_xor_si128(c2[1448],simde_mm_xor_si128(c2[1208],simde_mm_xor_si128(c2[731],simde_mm_xor_si128(c2[491],simde_mm_xor_si128(c2[11],simde_mm_xor_si128(c2[1447],simde_mm_xor_si128(c2[2187],simde_mm_xor_si128(c2[1947],simde_mm_xor_si128(c2[2188],simde_mm_xor_si128(c2[3629],simde_mm_xor_si128(c2[1468],simde_mm_xor_si128(c2[3415],simde_mm_xor_si128(c2[3175],simde_mm_xor_si128(c2[1496],simde_mm_xor_si128(c2[2473],simde_mm_xor_si128(c2[2233],simde_mm_xor_si128(c2[2963],simde_mm_xor_si128(c2[2723],simde_mm_xor_si128(c2[2956],simde_mm_xor_si128(c2[2016],simde_mm_xor_si128(c2[1776],simde_mm_xor_si128(c2[1300],simde_mm_xor_si128(c2[2020],simde_mm_xor_si128(c2[1083],simde_mm_xor_si128(c2[843],simde_mm_xor_si128(c2[3721],simde_mm_xor_si128(c2[3745],simde_mm_xor_si128(c2[3505],simde_mm_xor_si128(c2[1835],simde_mm_xor_si128(c2[1595],simde_mm_xor_si128(c2[629],simde_mm_xor_si128(c2[897],simde_mm_xor_si128(c2[657],simde_mm_xor_si128(c2[3292],simde_mm_xor_si128(c2[441],simde_mm_xor_si128(c2[201],simde_mm_xor_si128(c2[2356],simde_mm_xor_si128(c2[923],simde_mm_xor_si128(c2[1184],simde_mm_xor_si128(c2[944],simde_mm_xor_si128(c2[2865],simde_mm_xor_si128(c2[2625],c2[465]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 9
+     d2[108]=simde_mm_xor_si128(c2[1450],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[3609],simde_mm_xor_si128(c2[721],simde_mm_xor_si128(c2[2880],simde_mm_xor_si128(c2[241],simde_mm_xor_si128(c2[2400],simde_mm_xor_si128(c2[2189],simde_mm_xor_si128(c2[749],simde_mm_xor_si128(c2[509],simde_mm_xor_si128(c2[2430],simde_mm_xor_si128(c2[750],simde_mm_xor_si128(c2[32],simde_mm_xor_si128(c2[2191],simde_mm_xor_si128(c2[2905],simde_mm_xor_si128(c2[3417],simde_mm_xor_si128(c2[1977],simde_mm_xor_si128(c2[1737],simde_mm_xor_si128(c2[1738],simde_mm_xor_si128(c2[58],simde_mm_xor_si128(c2[2475],simde_mm_xor_si128(c2[1035],simde_mm_xor_si128(c2[795],simde_mm_xor_si128(c2[2953],simde_mm_xor_si128(c2[1273],simde_mm_xor_si128(c2[3198],simde_mm_xor_si128(c2[1518],simde_mm_xor_si128(c2[2018],simde_mm_xor_si128(c2[338],simde_mm_xor_si128(c2[1542],simde_mm_xor_si128(c2[3701],simde_mm_xor_si128(c2[2262],simde_mm_xor_si128(c2[582],simde_mm_xor_si128(c2[1085],simde_mm_xor_si128(c2[3244],simde_mm_xor_si128(c2[124],simde_mm_xor_si128(c2[2283],simde_mm_xor_si128(c2[3747],simde_mm_xor_si128(c2[2307],simde_mm_xor_si128(c2[2067],simde_mm_xor_si128(c2[1825],simde_mm_xor_si128(c2[145],simde_mm_xor_si128(c2[871],simde_mm_xor_si128(c2[3030],simde_mm_xor_si128(c2[899],simde_mm_xor_si128(c2[3058],simde_mm_xor_si128(c2[3534],simde_mm_xor_si128(c2[1854],simde_mm_xor_si128(c2[443],simde_mm_xor_si128(c2[2602],simde_mm_xor_si128(c2[2598],simde_mm_xor_si128(c2[918],simde_mm_xor_si128(c2[1153],simde_mm_xor_si128(c2[3312],simde_mm_xor_si128(c2[1160],simde_mm_xor_si128(c2[1186],simde_mm_xor_si128(c2[3585],simde_mm_xor_si128(c2[3345],simde_mm_xor_si128(c2[2867],simde_mm_xor_si128(c2[1187],simde_mm_xor_si128(c2[707],c2[2866])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 10
+     d2[120]=simde_mm_xor_si128(c2[2643],simde_mm_xor_si128(c2[1950],simde_mm_xor_si128(c2[1585],c2[1131])));
+
+//row: 11
+     d2[132]=simde_mm_xor_si128(c2[1688],simde_mm_xor_si128(c2[971],simde_mm_xor_si128(c2[491],simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[2427],simde_mm_xor_si128(c2[2668],simde_mm_xor_si128(c2[510],simde_mm_xor_si128(c2[270],simde_mm_xor_si128(c2[3655],simde_mm_xor_si128(c2[2216],simde_mm_xor_si128(c2[1976],simde_mm_xor_si128(c2[2713],simde_mm_xor_si128(c2[3203],simde_mm_xor_si128(c2[3436],simde_mm_xor_si128(c2[2256],simde_mm_xor_si128(c2[1780],simde_mm_xor_si128(c2[2740],simde_mm_xor_si128(c2[2500],simde_mm_xor_si128(c2[1323],simde_mm_xor_si128(c2[602],simde_mm_xor_si128(c2[362],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[2075],simde_mm_xor_si128(c2[1349],simde_mm_xor_si128(c2[1109],simde_mm_xor_si128(c2[1137],simde_mm_xor_si128(c2[173],simde_mm_xor_si128(c2[3772],simde_mm_xor_si128(c2[3053],simde_mm_xor_si128(c2[681],simde_mm_xor_si128(c2[2836],simde_mm_xor_si128(c2[1643],simde_mm_xor_si128(c2[1403],simde_mm_xor_si128(c2[1424],simde_mm_xor_si128(c2[3105],simde_mm_xor_si128(c2[1185],simde_mm_xor_si128(c2[945],c2[3586])))))))))))))))))))))))))))))))))))));
+
+//row: 12
+     d2[144]=simde_mm_xor_si128(c2[2171],simde_mm_xor_si128(c2[1931],simde_mm_xor_si128(c2[1202],simde_mm_xor_si128(c2[722],simde_mm_xor_si128(c2[2910],simde_mm_xor_si128(c2[2670],simde_mm_xor_si128(c2[2911],simde_mm_xor_si128(c2[513],simde_mm_xor_si128(c2[1706],simde_mm_xor_si128(c2[299],simde_mm_xor_si128(c2[59],simde_mm_xor_si128(c2[2219],simde_mm_xor_si128(c2[3196],simde_mm_xor_si128(c2[2956],simde_mm_xor_si128(c2[3434],simde_mm_xor_si128(c2[3679],simde_mm_xor_si128(c2[3197],simde_mm_xor_si128(c2[2499],simde_mm_xor_si128(c2[2023],simde_mm_xor_si128(c2[2743],simde_mm_xor_si128(c2[1566],simde_mm_xor_si128(c2[605],simde_mm_xor_si128(c2[629],simde_mm_xor_si128(c2[389],simde_mm_xor_si128(c2[2306],simde_mm_xor_si128(c2[1352],simde_mm_xor_si128(c2[1368],simde_mm_xor_si128(c2[176],simde_mm_xor_si128(c2[912],simde_mm_xor_si128(c2[3079],simde_mm_xor_si128(c2[1634],simde_mm_xor_si128(c2[1907],simde_mm_xor_si128(c2[1667],simde_mm_xor_si128(c2[3336],c2[1176]))))))))))))))))))))))))))))))))));
+
+//row: 13
+     d2[156]=simde_mm_xor_si128(c2[729],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[3371],simde_mm_xor_si128(c2[243],simde_mm_xor_si128(c2[1468],simde_mm_xor_si128(c2[1709],simde_mm_xor_si128(c2[3390],simde_mm_xor_si128(c2[3150],simde_mm_xor_si128(c2[3151],simde_mm_xor_si128(c2[2696],simde_mm_xor_si128(c2[1257],simde_mm_xor_si128(c2[1017],simde_mm_xor_si128(c2[1754],simde_mm_xor_si128(c2[2232],simde_mm_xor_si128(c2[2477],simde_mm_xor_si128(c2[1297],simde_mm_xor_si128(c2[821],simde_mm_xor_si128(c2[1781],simde_mm_xor_si128(c2[1541],simde_mm_xor_si128(c2[364],simde_mm_xor_si128(c2[3482],simde_mm_xor_si128(c2[3242],simde_mm_xor_si128(c2[3026],simde_mm_xor_si128(c2[1104],simde_mm_xor_si128(c2[390],simde_mm_xor_si128(c2[150],simde_mm_xor_si128(c2[178],simde_mm_xor_si128(c2[3053],simde_mm_xor_si128(c2[2813],simde_mm_xor_si128(c2[3561],simde_mm_xor_si128(c2[1877],simde_mm_xor_si128(c2[672],simde_mm_xor_si128(c2[432],simde_mm_xor_si128(c2[914],simde_mm_xor_si128(c2[465],simde_mm_xor_si128(c2[2146],simde_mm_xor_si128(c2[226],c2[3825])))))))))))))))))))))))))))))))))))));
+
+//row: 14
+     d2[168]=simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[480],simde_mm_xor_si128(c2[9],simde_mm_xor_si128(c2[3602],simde_mm_xor_si128(c2[3131],simde_mm_xor_si128(c2[3122],simde_mm_xor_si128(c2[2651],simde_mm_xor_si128(c2[1471],simde_mm_xor_si128(c2[1231],simde_mm_xor_si128(c2[748],simde_mm_xor_si128(c2[1472],simde_mm_xor_si128(c2[989],simde_mm_xor_si128(c2[2913],simde_mm_xor_si128(c2[2670],simde_mm_xor_si128(c2[2430],simde_mm_xor_si128(c2[745],simde_mm_xor_si128(c2[2699],simde_mm_xor_si128(c2[2459],simde_mm_xor_si128(c2[1976],simde_mm_xor_si128(c2[768],simde_mm_xor_si128(c2[537],simde_mm_xor_si128(c2[297],simde_mm_xor_si128(c2[1757],simde_mm_xor_si128(c2[1517],simde_mm_xor_si128(c2[1034],simde_mm_xor_si128(c2[1995],simde_mm_xor_si128(c2[1512],simde_mm_xor_si128(c2[2240],simde_mm_xor_si128(c2[1757],simde_mm_xor_si128(c2[1060],simde_mm_xor_si128(c2[577],simde_mm_xor_si128(c2[584],simde_mm_xor_si128(c2[101],simde_mm_xor_si128(c2[1304],simde_mm_xor_si128(c2[1061],simde_mm_xor_si128(c2[821],simde_mm_xor_si128(c2[127],simde_mm_xor_si128(c2[3483],simde_mm_xor_si128(c2[3005],simde_mm_xor_si128(c2[2762],simde_mm_xor_si128(c2[2522],simde_mm_xor_si128(c2[3029],simde_mm_xor_si128(c2[2789],simde_mm_xor_si128(c2[2306],simde_mm_xor_si128(c2[867],simde_mm_xor_si128(c2[384],simde_mm_xor_si128(c2[3752],simde_mm_xor_si128(c2[3509],simde_mm_xor_si128(c2[3269],simde_mm_xor_si128(c2[1591],simde_mm_xor_si128(c2[3768],simde_mm_xor_si128(c2[3297],simde_mm_xor_si128(c2[2576],simde_mm_xor_si128(c2[2333],simde_mm_xor_si128(c2[2093],simde_mm_xor_si128(c2[3312],simde_mm_xor_si128(c2[2841],simde_mm_xor_si128(c2[1640],simde_mm_xor_si128(c2[1157],simde_mm_xor_si128(c2[195],simde_mm_xor_si128(c2[3803],simde_mm_xor_si128(c2[3563],simde_mm_xor_si128(c2[456],simde_mm_xor_si128(c2[216],simde_mm_xor_si128(c2[3584],simde_mm_xor_si128(c2[1897],simde_mm_xor_si128(c2[1426],simde_mm_xor_si128(c2[3576],simde_mm_xor_si128(c2[3345],c2[3105])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 15
+     d2[180]=simde_mm_xor_si128(c2[2882],simde_mm_xor_si128(c2[3603],simde_mm_xor_si128(c2[3363],simde_mm_xor_si128(c2[2165],simde_mm_xor_si128(c2[2646],simde_mm_xor_si128(c2[1685],simde_mm_xor_si128(c2[2166],simde_mm_xor_si128(c2[964],simde_mm_xor_si128(c2[3633],simde_mm_xor_si128(c2[515],simde_mm_xor_si128(c2[275],simde_mm_xor_si128(c2[35],simde_mm_xor_si128(c2[504],simde_mm_xor_si128(c2[1464],simde_mm_xor_si128(c2[1945],simde_mm_xor_si128(c2[1010],simde_mm_xor_si128(c2[1731],simde_mm_xor_si128(c2[1491],simde_mm_xor_si128(c2[3170],simde_mm_xor_si128(c2[3651],simde_mm_xor_si128(c2[80],simde_mm_xor_si128(c2[801],simde_mm_xor_si128(c2[561],simde_mm_xor_si128(c2[558],simde_mm_xor_si128(c2[1039],simde_mm_xor_si128(c2[803],simde_mm_xor_si128(c2[1272],simde_mm_xor_si128(c2[3462],simde_mm_xor_si128(c2[104],simde_mm_xor_si128(c2[2986],simde_mm_xor_si128(c2[3467],simde_mm_xor_si128(c2[3706],simde_mm_xor_si128(c2[336],simde_mm_xor_si128(c2[2529],simde_mm_xor_si128(c2[3010],simde_mm_xor_si128(c2[1568],simde_mm_xor_si128(c2[2049],simde_mm_xor_si128(c2[1352],simde_mm_xor_si128(c2[2073],simde_mm_xor_si128(c2[1833],simde_mm_xor_si128(c2[3269],simde_mm_xor_si128(c2[3750],simde_mm_xor_si128(c2[2315],simde_mm_xor_si128(c2[2784],simde_mm_xor_si128(c2[2331],simde_mm_xor_si128(c2[2812],simde_mm_xor_si128(c2[1139],simde_mm_xor_si128(c2[1608],simde_mm_xor_si128(c2[1875],simde_mm_xor_si128(c2[2356],simde_mm_xor_si128(c2[203],simde_mm_xor_si128(c2[672],simde_mm_xor_si128(c2[2597],simde_mm_xor_si128(c2[3078],simde_mm_xor_si128(c2[2618],simde_mm_xor_si128(c2[3339],simde_mm_xor_si128(c2[3099],simde_mm_xor_si128(c2[460],simde_mm_xor_si128(c2[941],simde_mm_xor_si128(c2[2139],c2[2620]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 16
+     d2[192]=simde_mm_xor_si128(c2[1207],simde_mm_xor_si128(c2[967],simde_mm_xor_si128(c2[3362],simde_mm_xor_si128(c2[3122],simde_mm_xor_si128(c2[250],simde_mm_xor_si128(c2[2645],simde_mm_xor_si128(c2[2405],simde_mm_xor_si128(c2[3609],simde_mm_xor_si128(c2[1925],simde_mm_xor_si128(c2[1946],simde_mm_xor_si128(c2[1706],simde_mm_xor_si128(c2[274],simde_mm_xor_si128(c2[34],simde_mm_xor_si128(c2[1947],simde_mm_xor_si128(c2[275],simde_mm_xor_si128(c2[3388],simde_mm_xor_si128(c2[1704],simde_mm_xor_si128(c2[1709],simde_mm_xor_si128(c2[3174],simde_mm_xor_si128(c2[2934],simde_mm_xor_si128(c2[1490],simde_mm_xor_si128(c2[1250],simde_mm_xor_si128(c2[1255],simde_mm_xor_si128(c2[3410],simde_mm_xor_si128(c2[2232],simde_mm_xor_si128(c2[1992],simde_mm_xor_si128(c2[560],simde_mm_xor_si128(c2[320],simde_mm_xor_si128(c2[2482],simde_mm_xor_si128(c2[1038],simde_mm_xor_si128(c2[798],simde_mm_xor_si128(c2[2715],simde_mm_xor_si128(c2[1043],simde_mm_xor_si128(c2[1547],simde_mm_xor_si128(c2[103],simde_mm_xor_si128(c2[3702],simde_mm_xor_si128(c2[1059],simde_mm_xor_si128(c2[3226],simde_mm_xor_si128(c2[1779],simde_mm_xor_si128(c2[107],simde_mm_xor_si128(c2[602],simde_mm_xor_si128(c2[3009],simde_mm_xor_si128(c2[2769],simde_mm_xor_si128(c2[3480],simde_mm_xor_si128(c2[1808],simde_mm_xor_si128(c2[3504],simde_mm_xor_si128(c2[3264],simde_mm_xor_si128(c2[1832],simde_mm_xor_si128(c2[1592],simde_mm_xor_si128(c2[1354],simde_mm_xor_si128(c2[3749],simde_mm_xor_si128(c2[3509],simde_mm_xor_si128(c2[388],simde_mm_xor_si128(c2[2555],simde_mm_xor_si128(c2[416],simde_mm_xor_si128(c2[2811],simde_mm_xor_si128(c2[2571],simde_mm_xor_si128(c2[3051],simde_mm_xor_si128(c2[1379],simde_mm_xor_si128(c2[3799],simde_mm_xor_si128(c2[2355],simde_mm_xor_si128(c2[2115],simde_mm_xor_si128(c2[2115],simde_mm_xor_si128(c2[443],simde_mm_xor_si128(c2[682],simde_mm_xor_si128(c2[2837],simde_mm_xor_si128(c2[943],simde_mm_xor_si128(c2[703],simde_mm_xor_si128(c2[3098],simde_mm_xor_si128(c2[2858],simde_mm_xor_si128(c2[2384],simde_mm_xor_si128(c2[940],simde_mm_xor_si128(c2[700],simde_mm_xor_si128(c2[224],simde_mm_xor_si128(c2[2379],c2[467])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 17
+     d2[204]=simde_mm_xor_si128(c2[1691],simde_mm_xor_si128(c2[1451],simde_mm_xor_si128(c2[971],simde_mm_xor_si128(c2[731],simde_mm_xor_si128(c2[722],simde_mm_xor_si128(c2[242],simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[242],simde_mm_xor_si128(c2[3361],simde_mm_xor_si128(c2[2430],simde_mm_xor_si128(c2[2190],simde_mm_xor_si128(c2[1710],simde_mm_xor_si128(c2[1470],simde_mm_xor_si128(c2[2431],simde_mm_xor_si128(c2[1711],simde_mm_xor_si128(c2[33],simde_mm_xor_si128(c2[3152],simde_mm_xor_si128(c2[3393],simde_mm_xor_si128(c2[3658],simde_mm_xor_si128(c2[3418],simde_mm_xor_si128(c2[2938],simde_mm_xor_si128(c2[2698],simde_mm_xor_si128(c2[1739],simde_mm_xor_si128(c2[1019],simde_mm_xor_si128(c2[2716],simde_mm_xor_si128(c2[2476],simde_mm_xor_si128(c2[1996],simde_mm_xor_si128(c2[1756],simde_mm_xor_si128(c2[2954],simde_mm_xor_si128(c2[2474],simde_mm_xor_si128(c2[2234],simde_mm_xor_si128(c2[3199],simde_mm_xor_si128(c2[2479],simde_mm_xor_si128(c2[2019],simde_mm_xor_si128(c2[1539],simde_mm_xor_si128(c2[1299],simde_mm_xor_si128(c2[1543],simde_mm_xor_si128(c2[823],simde_mm_xor_si128(c2[2263],simde_mm_xor_si128(c2[1543],simde_mm_xor_si128(c2[1086],simde_mm_xor_si128(c2[606],simde_mm_xor_si128(c2[366],simde_mm_xor_si128(c2[125],simde_mm_xor_si128(c2[3244],simde_mm_xor_si128(c2[1801],simde_mm_xor_si128(c2[149],simde_mm_xor_si128(c2[3748],simde_mm_xor_si128(c2[3268],simde_mm_xor_si128(c2[3028],simde_mm_xor_si128(c2[1826],simde_mm_xor_si128(c2[1346],simde_mm_xor_si128(c2[1106],simde_mm_xor_si128(c2[872],simde_mm_xor_si128(c2[152],simde_mm_xor_si128(c2[888],simde_mm_xor_si128(c2[408],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[3535],simde_mm_xor_si128(c2[2815],simde_mm_xor_si128(c2[432],simde_mm_xor_si128(c2[3803],simde_mm_xor_si128(c2[3563],simde_mm_xor_si128(c2[2599],simde_mm_xor_si128(c2[1879],simde_mm_xor_si128(c2[1154],simde_mm_xor_si128(c2[434],simde_mm_xor_si128(c2[1427],simde_mm_xor_si128(c2[1187],simde_mm_xor_si128(c2[707],simde_mm_xor_si128(c2[467],simde_mm_xor_si128(c2[2856],simde_mm_xor_si128(c2[2376],simde_mm_xor_si128(c2[2136],simde_mm_xor_si128(c2[696],c2[3827])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 18
+     d2[216]=simde_mm_xor_si128(c2[2411],simde_mm_xor_si128(c2[1584],c2[3530]));
+
+//row: 19
+     d2[228]=simde_mm_xor_si128(c2[2168],simde_mm_xor_si128(c2[1451],simde_mm_xor_si128(c2[971],simde_mm_xor_si128(c2[2403],simde_mm_xor_si128(c2[2907],simde_mm_xor_si128(c2[3148],simde_mm_xor_si128(c2[750],simde_mm_xor_si128(c2[2426],simde_mm_xor_si128(c2[296],simde_mm_xor_si128(c2[2456],simde_mm_xor_si128(c2[3193],simde_mm_xor_si128(c2[3683],simde_mm_xor_si128(c2[77],simde_mm_xor_si128(c2[2736],simde_mm_xor_si128(c2[2260],simde_mm_xor_si128(c2[2980],simde_mm_xor_si128(c2[1803],simde_mm_xor_si128(c2[842],simde_mm_xor_si128(c2[626],simde_mm_xor_si128(c2[2555],simde_mm_xor_si128(c2[1589],simde_mm_xor_si128(c2[1617],simde_mm_xor_si128(c2[413],simde_mm_xor_si128(c2[1161],simde_mm_xor_si128(c2[3316],simde_mm_xor_si128(c2[1883],simde_mm_xor_si128(c2[1904],simde_mm_xor_si128(c2[3585],c2[1425]))))))))))))))))))))))))))));
+
+//row: 20
+     d2[240]=simde_mm_xor_si128(c2[1688],simde_mm_xor_si128(c2[1448],simde_mm_xor_si128(c2[731],simde_mm_xor_si128(c2[251],simde_mm_xor_si128(c2[2427],simde_mm_xor_si128(c2[2187],simde_mm_xor_si128(c2[2428],simde_mm_xor_si128(c2[30],simde_mm_xor_si128(c2[2908],simde_mm_xor_si128(c2[3655],simde_mm_xor_si128(c2[3415],simde_mm_xor_si128(c2[1736],simde_mm_xor_si128(c2[2713],simde_mm_xor_si128(c2[2473],simde_mm_xor_si128(c2[2963],simde_mm_xor_si128(c2[3196],simde_mm_xor_si128(c2[2016],simde_mm_xor_si128(c2[1540],simde_mm_xor_si128(c2[2260],simde_mm_xor_si128(c2[3219],simde_mm_xor_si128(c2[1083],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[3745],simde_mm_xor_si128(c2[1835],simde_mm_xor_si128(c2[869],simde_mm_xor_si128(c2[897],simde_mm_xor_si128(c2[3532],simde_mm_xor_si128(c2[441],simde_mm_xor_si128(c2[2596],simde_mm_xor_si128(c2[1163],simde_mm_xor_si128(c2[1424],simde_mm_xor_si128(c2[1184],simde_mm_xor_si128(c2[2865],c2[705]))))))))))))))))))))))))))))))))));
+
+//row: 21
+     d2[252]=simde_mm_xor_si128(c2[3],simde_mm_xor_si128(c2[3125],simde_mm_xor_si128(c2[2645],simde_mm_xor_si128(c2[3129],simde_mm_xor_si128(c2[754],simde_mm_xor_si128(c2[995],simde_mm_xor_si128(c2[2664],simde_mm_xor_si128(c2[2424],simde_mm_xor_si128(c2[1970],simde_mm_xor_si128(c2[531],simde_mm_xor_si128(c2[291],simde_mm_xor_si128(c2[1040],simde_mm_xor_si128(c2[1518],simde_mm_xor_si128(c2[1763],simde_mm_xor_si128(c2[583],simde_mm_xor_si128(c2[107],simde_mm_xor_si128(c2[1067],simde_mm_xor_si128(c2[827],simde_mm_xor_si128(c2[3489],simde_mm_xor_si128(c2[2768],simde_mm_xor_si128(c2[2528],simde_mm_xor_si128(c2[2312],simde_mm_xor_si128(c2[390],simde_mm_xor_si128(c2[3515],simde_mm_xor_si128(c2[3275],simde_mm_xor_si128(c2[3291],simde_mm_xor_si128(c2[2339],simde_mm_xor_si128(c2[2099],simde_mm_xor_si128(c2[2835],simde_mm_xor_si128(c2[1163],simde_mm_xor_si128(c2[3797],simde_mm_xor_si128(c2[3557],simde_mm_xor_si128(c2[3802],simde_mm_xor_si128(c2[3578],simde_mm_xor_si128(c2[1420],simde_mm_xor_si128(c2[3339],c2[3099]))))))))))))))))))))))))))))))))))));
+
+//row: 22
+     d2[264]=simde_mm_xor_si128(c2[985],c2[1011]);
+
+//row: 23
+     d2[276]=simde_mm_xor_si128(c2[2406],simde_mm_xor_si128(c2[1517],c2[3725]));
+
+//row: 24
+     d2[288]=simde_mm_xor_si128(c2[1475],simde_mm_xor_si128(c2[2217],c2[220]));
+
+//row: 25
+     d2[300]=simde_mm_xor_si128(c2[3122],c2[1321]);
+
+//row: 26
+     d2[312]=simde_mm_xor_si128(c2[1202],simde_mm_xor_si128(c2[962],simde_mm_xor_si128(c2[484],simde_mm_xor_si128(c2[485],simde_mm_xor_si128(c2[245],simde_mm_xor_si128(c2[3606],simde_mm_xor_si128(c2[3604],simde_mm_xor_si128(c2[3126],simde_mm_xor_si128(c2[1953],simde_mm_xor_si128(c2[1713],simde_mm_xor_si128(c2[1235],simde_mm_xor_si128(c2[1954],simde_mm_xor_si128(c2[1464],simde_mm_xor_si128(c2[3395],simde_mm_xor_si128(c2[3145],simde_mm_xor_si128(c2[2905],simde_mm_xor_si128(c2[3169],simde_mm_xor_si128(c2[2929],simde_mm_xor_si128(c2[2451],simde_mm_xor_si128(c2[1250],simde_mm_xor_si128(c2[1012],simde_mm_xor_si128(c2[772],simde_mm_xor_si128(c2[772],simde_mm_xor_si128(c2[2239],simde_mm_xor_si128(c2[1999],simde_mm_xor_si128(c2[1521],simde_mm_xor_si128(c2[2717],simde_mm_xor_si128(c2[2477],simde_mm_xor_si128(c2[1999],simde_mm_xor_si128(c2[2722],simde_mm_xor_si128(c2[2232],simde_mm_xor_si128(c2[1782],simde_mm_xor_si128(c2[1542],simde_mm_xor_si128(c2[1064],simde_mm_xor_si128(c2[1066],simde_mm_xor_si128(c2[576],simde_mm_xor_si128(c2[1786],simde_mm_xor_si128(c2[1536],simde_mm_xor_si128(c2[1296],simde_mm_xor_si128(c2[849],simde_mm_xor_si128(c2[609],simde_mm_xor_si128(c2[131],simde_mm_xor_si128(c2[3487],simde_mm_xor_si128(c2[3249],simde_mm_xor_si128(c2[3009],simde_mm_xor_si128(c2[3511],simde_mm_xor_si128(c2[3271],simde_mm_xor_si128(c2[2793],simde_mm_xor_si128(c2[1589],simde_mm_xor_si128(c2[1349],simde_mm_xor_si128(c2[871],simde_mm_xor_si128(c2[395],simde_mm_xor_si128(c2[145],simde_mm_xor_si128(c2[3744],simde_mm_xor_si128(c2[651],simde_mm_xor_si128(c2[411],simde_mm_xor_si128(c2[3772],simde_mm_xor_si128(c2[3058],simde_mm_xor_si128(c2[2808],simde_mm_xor_si128(c2[2568],simde_mm_xor_si128(c2[2336],simde_mm_xor_si128(c2[195],simde_mm_xor_si128(c2[3794],simde_mm_xor_si128(c2[3316],simde_mm_xor_si128(c2[2122],simde_mm_xor_si128(c2[1632],simde_mm_xor_si128(c2[677],simde_mm_xor_si128(c2[439],simde_mm_xor_si128(c2[199],simde_mm_xor_si128(c2[938],simde_mm_xor_si128(c2[698],simde_mm_xor_si128(c2[220],simde_mm_xor_si128(c2[2619],simde_mm_xor_si128(c2[2379],simde_mm_xor_si128(c2[1901],simde_mm_xor_si128(c2[219],simde_mm_xor_si128(c2[3820],c2[3580])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 27
+     d2[324]=simde_mm_xor_si128(c2[1686],c2[627]);
+
+//row: 28
+     d2[336]=simde_mm_xor_si128(c2[1468],simde_mm_xor_si128(c2[3654],c2[2050]));
+
+//row: 29
+     d2[348]=simde_mm_xor_si128(c2[3366],c2[337]);
+
+//row: 30
+     d2[360]=simde_mm_xor_si128(c2[1975],simde_mm_xor_si128(c2[2529],simde_mm_xor_si128(c2[1131],c2[2139])));
+
+//row: 31
+     d2[372]=simde_mm_xor_si128(c2[1689],simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[480],simde_mm_xor_si128(c2[2428],simde_mm_xor_si128(c2[2669],simde_mm_xor_si128(c2[511],simde_mm_xor_si128(c2[271],simde_mm_xor_si128(c2[744],simde_mm_xor_si128(c2[3656],simde_mm_xor_si128(c2[2217],simde_mm_xor_si128(c2[1977],simde_mm_xor_si128(c2[2714],simde_mm_xor_si128(c2[3192],simde_mm_xor_si128(c2[3437],simde_mm_xor_si128(c2[2257],simde_mm_xor_si128(c2[1781],simde_mm_xor_si128(c2[2741],simde_mm_xor_si128(c2[2501],simde_mm_xor_si128(c2[1324],simde_mm_xor_si128(c2[603],simde_mm_xor_si128(c2[363],simde_mm_xor_si128(c2[147],simde_mm_xor_si128(c2[2064],simde_mm_xor_si128(c2[1350],simde_mm_xor_si128(c2[1110],simde_mm_xor_si128(c2[1138],simde_mm_xor_si128(c2[174],simde_mm_xor_si128(c2[3773],simde_mm_xor_si128(c2[682],simde_mm_xor_si128(c2[2837],simde_mm_xor_si128(c2[1632],simde_mm_xor_si128(c2[1392],simde_mm_xor_si128(c2[1425],simde_mm_xor_si128(c2[3106],simde_mm_xor_si128(c2[1186],c2[946])))))))))))))))))))))))))))))))))));
+
+//row: 32
+     d2[384]=simde_mm_xor_si128(c2[3611],simde_mm_xor_si128(c2[3371],simde_mm_xor_si128(c2[2882],simde_mm_xor_si128(c2[2642],simde_mm_xor_si128(c2[2162],simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[511],simde_mm_xor_si128(c2[271],simde_mm_xor_si128(c2[512],simde_mm_xor_si128(c2[1953],simde_mm_xor_si128(c2[1739],simde_mm_xor_si128(c2[1499],simde_mm_xor_si128(c2[3659],simde_mm_xor_si128(c2[797],simde_mm_xor_si128(c2[557],simde_mm_xor_si128(c2[1275],simde_mm_xor_si128(c2[1035],simde_mm_xor_si128(c2[1280],simde_mm_xor_si128(c2[340],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[3463],simde_mm_xor_si128(c2[344],simde_mm_xor_si128(c2[3246],simde_mm_xor_si128(c2[3006],simde_mm_xor_si128(c2[2045],simde_mm_xor_si128(c2[2040],simde_mm_xor_si128(c2[2069],simde_mm_xor_si128(c2[1829],simde_mm_xor_si128(c2[147],simde_mm_xor_si128(c2[3746],simde_mm_xor_si128(c2[2792],simde_mm_xor_si128(c2[3048],simde_mm_xor_si128(c2[2808],simde_mm_xor_si128(c2[1616],simde_mm_xor_si128(c2[2592],simde_mm_xor_si128(c2[2352],simde_mm_xor_si128(c2[680],simde_mm_xor_si128(c2[3074],simde_mm_xor_si128(c2[3347],simde_mm_xor_si128(c2[3107],simde_mm_xor_si128(c2[1177],simde_mm_xor_si128(c2[937],c2[2616]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 33
+     d2[396]=simde_mm_xor_si128(c2[2166],simde_mm_xor_si128(c2[1449],simde_mm_xor_si128(c2[969],simde_mm_xor_si128(c2[2905],simde_mm_xor_si128(c2[3146],simde_mm_xor_si128(c2[748],simde_mm_xor_si128(c2[294],simde_mm_xor_si128(c2[2454],simde_mm_xor_si128(c2[1258],simde_mm_xor_si128(c2[3203],simde_mm_xor_si128(c2[3681],simde_mm_xor_si128(c2[75],simde_mm_xor_si128(c2[2746],simde_mm_xor_si128(c2[2258],simde_mm_xor_si128(c2[2978],simde_mm_xor_si128(c2[1801],simde_mm_xor_si128(c2[840],simde_mm_xor_si128(c2[624],simde_mm_xor_si128(c2[2553],simde_mm_xor_si128(c2[1587],simde_mm_xor_si128(c2[1615],simde_mm_xor_si128(c2[411],simde_mm_xor_si128(c2[899],simde_mm_xor_si128(c2[1159],simde_mm_xor_si128(c2[3314],simde_mm_xor_si128(c2[1881],simde_mm_xor_si128(c2[1902],simde_mm_xor_si128(c2[3583],c2[1423]))))))))))))))))))))))))))));
+
+//row: 34
+     d2[408]=simde_mm_xor_si128(c2[3609],simde_mm_xor_si128(c2[3369],simde_mm_xor_si128(c2[2171],simde_mm_xor_si128(c2[2880],simde_mm_xor_si128(c2[2640],simde_mm_xor_si128(c2[1442],simde_mm_xor_si128(c2[2160],simde_mm_xor_si128(c2[962],simde_mm_xor_si128(c2[3130],simde_mm_xor_si128(c2[509],simde_mm_xor_si128(c2[269],simde_mm_xor_si128(c2[2910],simde_mm_xor_si128(c2[510],simde_mm_xor_si128(c2[3151],simde_mm_xor_si128(c2[1951],simde_mm_xor_si128(c2[993],simde_mm_xor_si128(c2[753],simde_mm_xor_si128(c2[1737],simde_mm_xor_si128(c2[1497],simde_mm_xor_si128(c2[299],simde_mm_xor_si128(c2[3657],simde_mm_xor_si128(c2[2699],simde_mm_xor_si128(c2[2459],simde_mm_xor_si128(c2[795],simde_mm_xor_si128(c2[555],simde_mm_xor_si128(c2[3196],simde_mm_xor_si128(c2[1273],simde_mm_xor_si128(c2[1033],simde_mm_xor_si128(c2[3674],simde_mm_xor_si128(c2[1278],simde_mm_xor_si128(c2[80],simde_mm_xor_si128(c2[338],simde_mm_xor_si128(c2[98],simde_mm_xor_si128(c2[2739],simde_mm_xor_si128(c2[3461],simde_mm_xor_si128(c2[2263],simde_mm_xor_si128(c2[342],simde_mm_xor_si128(c2[3223],simde_mm_xor_si128(c2[2983],simde_mm_xor_si128(c2[3244],simde_mm_xor_si128(c2[3004],simde_mm_xor_si128(c2[1806],simde_mm_xor_si128(c2[2043],simde_mm_xor_si128(c2[1085],simde_mm_xor_si128(c2[845],simde_mm_xor_si128(c2[2067],simde_mm_xor_si128(c2[1827],simde_mm_xor_si128(c2[629],simde_mm_xor_si128(c2[145],simde_mm_xor_si128(c2[3744],simde_mm_xor_si128(c2[2546],simde_mm_xor_si128(c2[2790],simde_mm_xor_si128(c2[1832],simde_mm_xor_si128(c2[1592],simde_mm_xor_si128(c2[3058],simde_mm_xor_si128(c2[2818],simde_mm_xor_si128(c2[1608],simde_mm_xor_si128(c2[1614],simde_mm_xor_si128(c2[656],simde_mm_xor_si128(c2[416],simde_mm_xor_si128(c2[2602],simde_mm_xor_si128(c2[2362],simde_mm_xor_si128(c2[1152],simde_mm_xor_si128(c2[678],simde_mm_xor_si128(c2[3319],simde_mm_xor_si128(c2[3072],simde_mm_xor_si128(c2[2114],simde_mm_xor_si128(c2[1874],simde_mm_xor_si128(c2[3345],simde_mm_xor_si128(c2[3105],simde_mm_xor_si128(c2[1907],simde_mm_xor_si128(c2[1187],simde_mm_xor_si128(c2[947],simde_mm_xor_si128(c2[3576],simde_mm_xor_si128(c2[2626],simde_mm_xor_si128(c2[1656],c2[1416]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 35
+     d2[420]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[3611],simde_mm_xor_si128(c2[2882],simde_mm_xor_si128(c2[2402],simde_mm_xor_si128(c2[751],simde_mm_xor_si128(c2[511],simde_mm_xor_si128(c2[752],simde_mm_xor_si128(c2[2193],simde_mm_xor_si128(c2[3148],simde_mm_xor_si128(c2[1979],simde_mm_xor_si128(c2[1739],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[1037],simde_mm_xor_si128(c2[797],simde_mm_xor_si128(c2[1275],simde_mm_xor_si128(c2[1520],simde_mm_xor_si128(c2[340],simde_mm_xor_si128(c2[3703],simde_mm_xor_si128(c2[584],simde_mm_xor_si128(c2[3246],simde_mm_xor_si128(c2[2285],simde_mm_xor_si128(c2[2051],simde_mm_xor_si128(c2[2309],simde_mm_xor_si128(c2[2069],simde_mm_xor_si128(c2[147],simde_mm_xor_si128(c2[3032],simde_mm_xor_si128(c2[3048],simde_mm_xor_si128(c2[1856],simde_mm_xor_si128(c2[2592],simde_mm_xor_si128(c2[920],simde_mm_xor_si128(c2[3314],simde_mm_xor_si128(c2[3587],simde_mm_xor_si128(c2[3347],simde_mm_xor_si128(c2[1177],c2[2856]))))))))))))))))))))))))))))))))));
+
+//row: 36
+     d2[432]=simde_mm_xor_si128(c2[2161],simde_mm_xor_si128(c2[1737],c2[2578]));
+
+//row: 37
+     d2[444]=simde_mm_xor_si128(c2[481],simde_mm_xor_si128(c2[2880],simde_mm_xor_si128(c2[3603],simde_mm_xor_si128(c2[2163],simde_mm_xor_si128(c2[3123],simde_mm_xor_si128(c2[1683],simde_mm_xor_si128(c2[1232],simde_mm_xor_si128(c2[3631],simde_mm_xor_si128(c2[1473],simde_mm_xor_si128(c2[33],simde_mm_xor_si128(c2[2914],simde_mm_xor_si128(c2[1714],simde_mm_xor_si128(c2[1474],simde_mm_xor_si128(c2[2448],simde_mm_xor_si128(c2[1008],simde_mm_xor_si128(c2[769],simde_mm_xor_si128(c2[3408],simde_mm_xor_si128(c2[3168],simde_mm_xor_si128(c2[1518],simde_mm_xor_si128(c2[78],simde_mm_xor_si128(c2[1996],simde_mm_xor_si128(c2[556],simde_mm_xor_si128(c2[2241],simde_mm_xor_si128(c2[801],simde_mm_xor_si128(c2[1061],simde_mm_xor_si128(c2[3460],simde_mm_xor_si128(c2[585],simde_mm_xor_si128(c2[2984],simde_mm_xor_si128(c2[1305],simde_mm_xor_si128(c2[105],simde_mm_xor_si128(c2[3704],simde_mm_xor_si128(c2[128],simde_mm_xor_si128(c2[2527],simde_mm_xor_si128(c2[3006],simde_mm_xor_si128(c2[1806],simde_mm_xor_si128(c2[1566],simde_mm_xor_si128(c2[2790],simde_mm_xor_si128(c2[1350],simde_mm_xor_si128(c2[868],simde_mm_xor_si128(c2[3267],simde_mm_xor_si128(c2[3753],simde_mm_xor_si128(c2[2553],simde_mm_xor_si128(c2[2313],simde_mm_xor_si128(c2[3769],simde_mm_xor_si128(c2[2329],simde_mm_xor_si128(c2[2577],simde_mm_xor_si128(c2[1377],simde_mm_xor_si128(c2[1137],simde_mm_xor_si128(c2[3313],simde_mm_xor_si128(c2[1873],simde_mm_xor_si128(c2[1641],simde_mm_xor_si128(c2[201],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[2835],simde_mm_xor_si128(c2[2595],simde_mm_xor_si128(c2[217],simde_mm_xor_si128(c2[2616],simde_mm_xor_si128(c2[1898],simde_mm_xor_si128(c2[458],simde_mm_xor_si128(c2[3577],simde_mm_xor_si128(c2[2377],c2[2137])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 38
+     d2[456]=simde_mm_xor_si128(c2[2890],simde_mm_xor_si128(c2[2650],simde_mm_xor_si128(c2[1921],simde_mm_xor_si128(c2[1441],simde_mm_xor_si128(c2[3629],simde_mm_xor_si128(c2[3389],simde_mm_xor_si128(c2[3630],simde_mm_xor_si128(c2[1232],simde_mm_xor_si128(c2[989],simde_mm_xor_si128(c2[1018],simde_mm_xor_si128(c2[778],simde_mm_xor_si128(c2[2938],simde_mm_xor_si128(c2[76],simde_mm_xor_si128(c2[3675],simde_mm_xor_si128(c2[314],simde_mm_xor_si128(c2[559],simde_mm_xor_si128(c2[3218],simde_mm_xor_si128(c2[2742],simde_mm_xor_si128(c2[3462],simde_mm_xor_si128(c2[2285],simde_mm_xor_si128(c2[1324],simde_mm_xor_si128(c2[1809],simde_mm_xor_si128(c2[1348],simde_mm_xor_si128(c2[1108],simde_mm_xor_si128(c2[3025],simde_mm_xor_si128(c2[2071],simde_mm_xor_si128(c2[2099],simde_mm_xor_si128(c2[895],simde_mm_xor_si128(c2[1643],simde_mm_xor_si128(c2[3798],simde_mm_xor_si128(c2[2353],simde_mm_xor_si128(c2[2626],simde_mm_xor_si128(c2[2386],simde_mm_xor_si128(c2[216],c2[1907]))))))))))))))))))))))))))))))))));
+
+//row: 39
+     d2[468]=simde_mm_xor_si128(c2[1682],simde_mm_xor_si128(c2[1442],simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[725],simde_mm_xor_si128(c2[245],simde_mm_xor_si128(c2[3125],simde_mm_xor_si128(c2[2433],simde_mm_xor_si128(c2[2193],simde_mm_xor_si128(c2[2434],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[3649],simde_mm_xor_si128(c2[3409],simde_mm_xor_si128(c2[1730],simde_mm_xor_si128(c2[2719],simde_mm_xor_si128(c2[2479],simde_mm_xor_si128(c2[3197],simde_mm_xor_si128(c2[2957],simde_mm_xor_si128(c2[3202],simde_mm_xor_si128(c2[2262],simde_mm_xor_si128(c2[2022],simde_mm_xor_si128(c2[1546],simde_mm_xor_si128(c2[2266],simde_mm_xor_si128(c2[1329],simde_mm_xor_si128(c2[1089],simde_mm_xor_si128(c2[128],simde_mm_xor_si128(c2[152],simde_mm_xor_si128(c2[3751],simde_mm_xor_si128(c2[2069],simde_mm_xor_si128(c2[1829],simde_mm_xor_si128(c2[875],simde_mm_xor_si128(c2[1131],simde_mm_xor_si128(c2[891],simde_mm_xor_si128(c2[3538],simde_mm_xor_si128(c2[1136],simde_mm_xor_si128(c2[675],simde_mm_xor_si128(c2[435],simde_mm_xor_si128(c2[2602],simde_mm_xor_si128(c2[1157],simde_mm_xor_si128(c2[1418],simde_mm_xor_si128(c2[1178],simde_mm_xor_si128(c2[3099],simde_mm_xor_si128(c2[2859],c2[699]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 40
+     d2[480]=simde_mm_xor_si128(c2[1925],simde_mm_xor_si128(c2[9],simde_mm_xor_si128(c2[1208],simde_mm_xor_si128(c2[3131],simde_mm_xor_si128(c2[728],simde_mm_xor_si128(c2[2651],simde_mm_xor_si128(c2[2664],simde_mm_xor_si128(c2[748],simde_mm_xor_si128(c2[2905],simde_mm_xor_si128(c2[989],simde_mm_xor_si128(c2[507],simde_mm_xor_si128(c2[2670],simde_mm_xor_si128(c2[2430],simde_mm_xor_si128(c2[53],simde_mm_xor_si128(c2[1976],simde_mm_xor_si128(c2[2213],simde_mm_xor_si128(c2[537],simde_mm_xor_si128(c2[297],simde_mm_xor_si128(c2[1734],simde_mm_xor_si128(c2[2962],simde_mm_xor_si128(c2[1034],simde_mm_xor_si128(c2[3440],simde_mm_xor_si128(c2[1512],simde_mm_xor_si128(c2[3673],simde_mm_xor_si128(c2[1757],simde_mm_xor_si128(c2[2505],simde_mm_xor_si128(c2[577],simde_mm_xor_si128(c2[2017],simde_mm_xor_si128(c2[101],simde_mm_xor_si128(c2[2737],simde_mm_xor_si128(c2[1061],simde_mm_xor_si128(c2[821],simde_mm_xor_si128(c2[1560],simde_mm_xor_si128(c2[3483],simde_mm_xor_si128(c2[611],simde_mm_xor_si128(c2[2762],simde_mm_xor_si128(c2[2522],simde_mm_xor_si128(c2[395],simde_mm_xor_si128(c2[2306],simde_mm_xor_si128(c2[2312],simde_mm_xor_si128(c2[384],simde_mm_xor_si128(c2[1346],simde_mm_xor_si128(c2[3509],simde_mm_xor_si128(c2[3269],simde_mm_xor_si128(c2[1374],simde_mm_xor_si128(c2[3297],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[2333],simde_mm_xor_si128(c2[2093],simde_mm_xor_si128(c2[918],simde_mm_xor_si128(c2[2841],simde_mm_xor_si128(c2[3073],simde_mm_xor_si128(c2[1157],simde_mm_xor_si128(c2[1640],simde_mm_xor_si128(c2[3803],simde_mm_xor_si128(c2[3563],simde_mm_xor_si128(c2[1661],simde_mm_xor_si128(c2[3584],simde_mm_xor_si128(c2[3342],simde_mm_xor_si128(c2[1426],simde_mm_xor_si128(c2[1182],simde_mm_xor_si128(c2[3345],c2[3105]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 41
+     d2[492]=simde_mm_xor_si128(c2[2402],simde_mm_xor_si128(c2[2162],simde_mm_xor_si128(c2[1445],simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[3153],simde_mm_xor_si128(c2[2913],simde_mm_xor_si128(c2[3154],simde_mm_xor_si128(c2[744],simde_mm_xor_si128(c2[753],simde_mm_xor_si128(c2[530],simde_mm_xor_si128(c2[290],simde_mm_xor_si128(c2[2450],simde_mm_xor_si128(c2[3439],simde_mm_xor_si128(c2[3199],simde_mm_xor_si128(c2[3677],simde_mm_xor_si128(c2[83],simde_mm_xor_si128(c2[2742],simde_mm_xor_si128(c2[2266],simde_mm_xor_si128(c2[2986],simde_mm_xor_si128(c2[1809],simde_mm_xor_si128(c2[848],simde_mm_xor_si128(c2[1800],simde_mm_xor_si128(c2[872],simde_mm_xor_si128(c2[632],simde_mm_xor_si128(c2[2549],simde_mm_xor_si128(c2[1595],simde_mm_xor_si128(c2[1611],simde_mm_xor_si128(c2[419],simde_mm_xor_si128(c2[1155],simde_mm_xor_si128(c2[3322],simde_mm_xor_si128(c2[1877],simde_mm_xor_si128(c2[2138],simde_mm_xor_si128(c2[1898],simde_mm_xor_si128(c2[3579],c2[1419]))))))))))))))))))))))))))))))))));
+  }
+}
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc208_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc208_byte.c
index 3453a2f3d15da3b53d820ab9eacdc0b3aa019029..82393340c1edb32e9a910efc73a95266e14de63d 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc208_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc208_byte.c
@@ -1,9 +1,8 @@
 #include "PHY/sse_intrin.h"
 // generated code for Zc=208, byte encoding
 static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG2_Zc208_byte(uint8_t *c,uint8_t *d) {
-  __m128i *csimd=(__m128i *)c,*dsimd=(__m128i *)d;
-
-  __m128i *c2,*d2;
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
+  simde__m128i *c2,*d2;
 
   int i2;
   for (i2=0; i2<13; i2++) {
@@ -11,129 +10,129 @@ static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG
      d2=&dsimd[i2];
 
 //row: 0
-     d2[0]=_mm_xor_si128(c2[3648],_mm_xor_si128(c2[261],_mm_xor_si128(c2[3387],_mm_xor_si128(c2[547],_mm_xor_si128(c2[556],_mm_xor_si128(c2[808],_mm_xor_si128(c2[3962],_mm_xor_si128(c2[3954],_mm_xor_si128(c2[1128],_mm_xor_si128(c2[2679],_mm_xor_si128(c2[3720],_mm_xor_si128(c2[624],_mm_xor_si128(c2[1675],_mm_xor_si128(c2[365],_mm_xor_si128(c2[1436],_mm_xor_si128(c2[3776],_mm_xor_si128(c2[948],_mm_xor_si128(c2[2247],_mm_xor_si128(c2[2768],_mm_xor_si128(c2[183],_mm_xor_si128(c2[702],_mm_xor_si128(c2[3588],_mm_xor_si128(c2[996],_mm_xor_si128(c2[3859],_mm_xor_si128(c2[3354],_mm_xor_si128(c2[1025],c2[4143]))))))))))))))))))))))))));
+     d2[0]=simde_mm_xor_si128(c2[3648],simde_mm_xor_si128(c2[261],simde_mm_xor_si128(c2[3387],simde_mm_xor_si128(c2[547],simde_mm_xor_si128(c2[556],simde_mm_xor_si128(c2[808],simde_mm_xor_si128(c2[3962],simde_mm_xor_si128(c2[3954],simde_mm_xor_si128(c2[1128],simde_mm_xor_si128(c2[2679],simde_mm_xor_si128(c2[3720],simde_mm_xor_si128(c2[624],simde_mm_xor_si128(c2[1675],simde_mm_xor_si128(c2[365],simde_mm_xor_si128(c2[1436],simde_mm_xor_si128(c2[3776],simde_mm_xor_si128(c2[948],simde_mm_xor_si128(c2[2247],simde_mm_xor_si128(c2[2768],simde_mm_xor_si128(c2[183],simde_mm_xor_si128(c2[702],simde_mm_xor_si128(c2[3588],simde_mm_xor_si128(c2[996],simde_mm_xor_si128(c2[3859],simde_mm_xor_si128(c2[3354],simde_mm_xor_si128(c2[1025],c2[4143]))))))))))))))))))))))))));
 
 //row: 1
-     d2[13]=_mm_xor_si128(c2[3908],_mm_xor_si128(c2[3648],_mm_xor_si128(c2[261],_mm_xor_si128(c2[3387],_mm_xor_si128(c2[807],_mm_xor_si128(c2[547],_mm_xor_si128(c2[556],_mm_xor_si128(c2[808],_mm_xor_si128(c2[63],_mm_xor_si128(c2[3962],_mm_xor_si128(c2[3954],_mm_xor_si128(c2[1388],_mm_xor_si128(c2[1128],_mm_xor_si128(c2[2679],_mm_xor_si128(c2[3720],_mm_xor_si128(c2[624],_mm_xor_si128(c2[1675],_mm_xor_si128(c2[365],_mm_xor_si128(c2[1436],_mm_xor_si128(c2[3776],_mm_xor_si128(c2[1208],_mm_xor_si128(c2[948],_mm_xor_si128(c2[2247],_mm_xor_si128(c2[2768],_mm_xor_si128(c2[183],_mm_xor_si128(c2[702],_mm_xor_si128(c2[3588],_mm_xor_si128(c2[996],_mm_xor_si128(c2[3859],_mm_xor_si128(c2[3614],_mm_xor_si128(c2[3354],_mm_xor_si128(c2[1025],c2[4143]))))))))))))))))))))))))))))))));
+     d2[13]=simde_mm_xor_si128(c2[3908],simde_mm_xor_si128(c2[3648],simde_mm_xor_si128(c2[261],simde_mm_xor_si128(c2[3387],simde_mm_xor_si128(c2[807],simde_mm_xor_si128(c2[547],simde_mm_xor_si128(c2[556],simde_mm_xor_si128(c2[808],simde_mm_xor_si128(c2[63],simde_mm_xor_si128(c2[3962],simde_mm_xor_si128(c2[3954],simde_mm_xor_si128(c2[1388],simde_mm_xor_si128(c2[1128],simde_mm_xor_si128(c2[2679],simde_mm_xor_si128(c2[3720],simde_mm_xor_si128(c2[624],simde_mm_xor_si128(c2[1675],simde_mm_xor_si128(c2[365],simde_mm_xor_si128(c2[1436],simde_mm_xor_si128(c2[3776],simde_mm_xor_si128(c2[1208],simde_mm_xor_si128(c2[948],simde_mm_xor_si128(c2[2247],simde_mm_xor_si128(c2[2768],simde_mm_xor_si128(c2[183],simde_mm_xor_si128(c2[702],simde_mm_xor_si128(c2[3588],simde_mm_xor_si128(c2[996],simde_mm_xor_si128(c2[3859],simde_mm_xor_si128(c2[3614],simde_mm_xor_si128(c2[3354],simde_mm_xor_si128(c2[1025],c2[4143]))))))))))))))))))))))))))))))));
 
 //row: 2
-     d2[26]=_mm_xor_si128(c2[3908],_mm_xor_si128(c2[3648],_mm_xor_si128(c2[521],_mm_xor_si128(c2[261],_mm_xor_si128(c2[3387],_mm_xor_si128(c2[807],_mm_xor_si128(c2[547],_mm_xor_si128(c2[556],_mm_xor_si128(c2[808],_mm_xor_si128(c2[63],_mm_xor_si128(c2[3962],_mm_xor_si128(c2[3954],_mm_xor_si128(c2[1388],_mm_xor_si128(c2[1128],_mm_xor_si128(c2[2939],_mm_xor_si128(c2[2679],_mm_xor_si128(c2[3720],_mm_xor_si128(c2[884],_mm_xor_si128(c2[624],_mm_xor_si128(c2[1675],_mm_xor_si128(c2[365],_mm_xor_si128(c2[1696],_mm_xor_si128(c2[1436],_mm_xor_si128(c2[3776],_mm_xor_si128(c2[1208],_mm_xor_si128(c2[948],_mm_xor_si128(c2[2507],_mm_xor_si128(c2[2247],_mm_xor_si128(c2[2768],_mm_xor_si128(c2[443],_mm_xor_si128(c2[183],_mm_xor_si128(c2[702],_mm_xor_si128(c2[3848],_mm_xor_si128(c2[3588],_mm_xor_si128(c2[996],_mm_xor_si128(c2[3859],_mm_xor_si128(c2[3614],_mm_xor_si128(c2[3354],_mm_xor_si128(c2[1285],_mm_xor_si128(c2[1025],c2[4143]))))))))))))))))))))))))))))))))))))))));
+     d2[26]=simde_mm_xor_si128(c2[3908],simde_mm_xor_si128(c2[3648],simde_mm_xor_si128(c2[521],simde_mm_xor_si128(c2[261],simde_mm_xor_si128(c2[3387],simde_mm_xor_si128(c2[807],simde_mm_xor_si128(c2[547],simde_mm_xor_si128(c2[556],simde_mm_xor_si128(c2[808],simde_mm_xor_si128(c2[63],simde_mm_xor_si128(c2[3962],simde_mm_xor_si128(c2[3954],simde_mm_xor_si128(c2[1388],simde_mm_xor_si128(c2[1128],simde_mm_xor_si128(c2[2939],simde_mm_xor_si128(c2[2679],simde_mm_xor_si128(c2[3720],simde_mm_xor_si128(c2[884],simde_mm_xor_si128(c2[624],simde_mm_xor_si128(c2[1675],simde_mm_xor_si128(c2[365],simde_mm_xor_si128(c2[1696],simde_mm_xor_si128(c2[1436],simde_mm_xor_si128(c2[3776],simde_mm_xor_si128(c2[1208],simde_mm_xor_si128(c2[948],simde_mm_xor_si128(c2[2507],simde_mm_xor_si128(c2[2247],simde_mm_xor_si128(c2[2768],simde_mm_xor_si128(c2[443],simde_mm_xor_si128(c2[183],simde_mm_xor_si128(c2[702],simde_mm_xor_si128(c2[3848],simde_mm_xor_si128(c2[3588],simde_mm_xor_si128(c2[996],simde_mm_xor_si128(c2[3859],simde_mm_xor_si128(c2[3614],simde_mm_xor_si128(c2[3354],simde_mm_xor_si128(c2[1285],simde_mm_xor_si128(c2[1025],c2[4143]))))))))))))))))))))))))))))))))))))))));
 
 //row: 3
-     d2[39]=_mm_xor_si128(c2[3648],_mm_xor_si128(c2[261],_mm_xor_si128(c2[3387],_mm_xor_si128(c2[547],_mm_xor_si128(c2[556],_mm_xor_si128(c2[1068],_mm_xor_si128(c2[808],_mm_xor_si128(c2[3962],_mm_xor_si128(c2[55],_mm_xor_si128(c2[3954],_mm_xor_si128(c2[1128],_mm_xor_si128(c2[2679],_mm_xor_si128(c2[3720],_mm_xor_si128(c2[624],_mm_xor_si128(c2[1675],_mm_xor_si128(c2[625],_mm_xor_si128(c2[365],_mm_xor_si128(c2[1436],_mm_xor_si128(c2[4036],_mm_xor_si128(c2[3776],_mm_xor_si128(c2[948],_mm_xor_si128(c2[2247],_mm_xor_si128(c2[3028],_mm_xor_si128(c2[2768],_mm_xor_si128(c2[183],_mm_xor_si128(c2[962],_mm_xor_si128(c2[702],_mm_xor_si128(c2[3588],_mm_xor_si128(c2[996],_mm_xor_si128(c2[4119],_mm_xor_si128(c2[3859],_mm_xor_si128(c2[3354],_mm_xor_si128(c2[1025],_mm_xor_si128(c2[244],c2[4143]))))))))))))))))))))))))))))))))));
+     d2[39]=simde_mm_xor_si128(c2[3648],simde_mm_xor_si128(c2[261],simde_mm_xor_si128(c2[3387],simde_mm_xor_si128(c2[547],simde_mm_xor_si128(c2[556],simde_mm_xor_si128(c2[1068],simde_mm_xor_si128(c2[808],simde_mm_xor_si128(c2[3962],simde_mm_xor_si128(c2[55],simde_mm_xor_si128(c2[3954],simde_mm_xor_si128(c2[1128],simde_mm_xor_si128(c2[2679],simde_mm_xor_si128(c2[3720],simde_mm_xor_si128(c2[624],simde_mm_xor_si128(c2[1675],simde_mm_xor_si128(c2[625],simde_mm_xor_si128(c2[365],simde_mm_xor_si128(c2[1436],simde_mm_xor_si128(c2[4036],simde_mm_xor_si128(c2[3776],simde_mm_xor_si128(c2[948],simde_mm_xor_si128(c2[2247],simde_mm_xor_si128(c2[3028],simde_mm_xor_si128(c2[2768],simde_mm_xor_si128(c2[183],simde_mm_xor_si128(c2[962],simde_mm_xor_si128(c2[702],simde_mm_xor_si128(c2[3588],simde_mm_xor_si128(c2[996],simde_mm_xor_si128(c2[4119],simde_mm_xor_si128(c2[3859],simde_mm_xor_si128(c2[3354],simde_mm_xor_si128(c2[1025],simde_mm_xor_si128(c2[244],c2[4143]))))))))))))))))))))))))))))))))));
 
 //row: 4
-     d2[52]=_mm_xor_si128(c2[1043],_mm_xor_si128(c2[783],_mm_xor_si128(c2[1568],_mm_xor_si128(c2[522],_mm_xor_si128(c2[2862],_mm_xor_si128(c2[2114],_mm_xor_si128(c2[1854],_mm_xor_si128(c2[1850],_mm_xor_si128(c2[2115],_mm_xor_si128(c2[2887],_mm_xor_si128(c2[1357],_mm_xor_si128(c2[1097],_mm_xor_si128(c2[1102],_mm_xor_si128(c2[2682],_mm_xor_si128(c2[2422],_mm_xor_si128(c2[3986],_mm_xor_si128(c2[868],_mm_xor_si128(c2[1931],_mm_xor_si128(c2[2969],_mm_xor_si128(c2[1672],_mm_xor_si128(c2[2730],_mm_xor_si128(c2[911],_mm_xor_si128(c2[2502],_mm_xor_si128(c2[2242],_mm_xor_si128(c2[3541],_mm_xor_si128(c2[4062],_mm_xor_si128(c2[1490],_mm_xor_si128(c2[2009],_mm_xor_si128(c2[736],_mm_xor_si128(c2[2290],_mm_xor_si128(c2[994],_mm_xor_si128(c2[762],_mm_xor_si128(c2[502],_mm_xor_si128(c2[2319],c2[1278]))))))))))))))))))))))))))))))))));
+     d2[52]=simde_mm_xor_si128(c2[1043],simde_mm_xor_si128(c2[783],simde_mm_xor_si128(c2[1568],simde_mm_xor_si128(c2[522],simde_mm_xor_si128(c2[2862],simde_mm_xor_si128(c2[2114],simde_mm_xor_si128(c2[1854],simde_mm_xor_si128(c2[1850],simde_mm_xor_si128(c2[2115],simde_mm_xor_si128(c2[2887],simde_mm_xor_si128(c2[1357],simde_mm_xor_si128(c2[1097],simde_mm_xor_si128(c2[1102],simde_mm_xor_si128(c2[2682],simde_mm_xor_si128(c2[2422],simde_mm_xor_si128(c2[3986],simde_mm_xor_si128(c2[868],simde_mm_xor_si128(c2[1931],simde_mm_xor_si128(c2[2969],simde_mm_xor_si128(c2[1672],simde_mm_xor_si128(c2[2730],simde_mm_xor_si128(c2[911],simde_mm_xor_si128(c2[2502],simde_mm_xor_si128(c2[2242],simde_mm_xor_si128(c2[3541],simde_mm_xor_si128(c2[4062],simde_mm_xor_si128(c2[1490],simde_mm_xor_si128(c2[2009],simde_mm_xor_si128(c2[736],simde_mm_xor_si128(c2[2290],simde_mm_xor_si128(c2[994],simde_mm_xor_si128(c2[762],simde_mm_xor_si128(c2[502],simde_mm_xor_si128(c2[2319],c2[1278]))))))))))))))))))))))))))))))))));
 
 //row: 5
-     d2[65]=_mm_xor_si128(c2[3392],_mm_xor_si128(c2[3132],_mm_xor_si128(c2[3904],_mm_xor_si128(c2[2871],_mm_xor_si128(c2[2088],_mm_xor_si128(c2[291],_mm_xor_si128(c2[31],_mm_xor_si128(c2[27],_mm_xor_si128(c2[292],_mm_xor_si128(c2[289],_mm_xor_si128(c2[3693],_mm_xor_si128(c2[3433],_mm_xor_si128(c2[3438],_mm_xor_si128(c2[859],_mm_xor_si128(c2[599],_mm_xor_si128(c2[2163],_mm_xor_si128(c2[3204],_mm_xor_si128(c2[108],_mm_xor_si128(c2[1146],_mm_xor_si128(c2[4008],_mm_xor_si128(c2[920],_mm_xor_si128(c2[3260],_mm_xor_si128(c2[1172],_mm_xor_si128(c2[679],_mm_xor_si128(c2[419],_mm_xor_si128(c2[1718],_mm_xor_si128(c2[2239],_mm_xor_si128(c2[3826],_mm_xor_si128(c2[186],_mm_xor_si128(c2[1230],_mm_xor_si128(c2[3072],_mm_xor_si128(c2[480],_mm_xor_si128(c2[3330],_mm_xor_si128(c2[3098],_mm_xor_si128(c2[2838],_mm_xor_si128(c2[496],c2[3614]))))))))))))))))))))))))))))))))))));
+     d2[65]=simde_mm_xor_si128(c2[3392],simde_mm_xor_si128(c2[3132],simde_mm_xor_si128(c2[3904],simde_mm_xor_si128(c2[2871],simde_mm_xor_si128(c2[2088],simde_mm_xor_si128(c2[291],simde_mm_xor_si128(c2[31],simde_mm_xor_si128(c2[27],simde_mm_xor_si128(c2[292],simde_mm_xor_si128(c2[289],simde_mm_xor_si128(c2[3693],simde_mm_xor_si128(c2[3433],simde_mm_xor_si128(c2[3438],simde_mm_xor_si128(c2[859],simde_mm_xor_si128(c2[599],simde_mm_xor_si128(c2[2163],simde_mm_xor_si128(c2[3204],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[1146],simde_mm_xor_si128(c2[4008],simde_mm_xor_si128(c2[920],simde_mm_xor_si128(c2[3260],simde_mm_xor_si128(c2[1172],simde_mm_xor_si128(c2[679],simde_mm_xor_si128(c2[419],simde_mm_xor_si128(c2[1718],simde_mm_xor_si128(c2[2239],simde_mm_xor_si128(c2[3826],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[1230],simde_mm_xor_si128(c2[3072],simde_mm_xor_si128(c2[480],simde_mm_xor_si128(c2[3330],simde_mm_xor_si128(c2[3098],simde_mm_xor_si128(c2[2838],simde_mm_xor_si128(c2[496],c2[3614]))))))))))))))))))))))))))))))))))));
 
 //row: 6
-     d2[78]=_mm_xor_si128(c2[2869],_mm_xor_si128(c2[2609],_mm_xor_si128(c2[3381],_mm_xor_si128(c2[2348],_mm_xor_si128(c2[1820],_mm_xor_si128(c2[3927],_mm_xor_si128(c2[3667],_mm_xor_si128(c2[3676],_mm_xor_si128(c2[3928],_mm_xor_si128(c2[3183],_mm_xor_si128(c2[2923],_mm_xor_si128(c2[2915],_mm_xor_si128(c2[349],_mm_xor_si128(c2[89],_mm_xor_si128(c2[1640],_mm_xor_si128(c2[2681],_mm_xor_si128(c2[3744],_mm_xor_si128(c2[636],_mm_xor_si128(c2[3485],_mm_xor_si128(c2[397],_mm_xor_si128(c2[2737],_mm_xor_si128(c2[652],_mm_xor_si128(c2[156],_mm_xor_si128(c2[4068],_mm_xor_si128(c2[1208],_mm_xor_si128(c2[1716],_mm_xor_si128(c2[3303],_mm_xor_si128(c2[3822],_mm_xor_si128(c2[1754],_mm_xor_si128(c2[2549],_mm_xor_si128(c2[4116],_mm_xor_si128(c2[2820],_mm_xor_si128(c2[2575],_mm_xor_si128(c2[2315],_mm_xor_si128(c2[4145],_mm_xor_si128(c2[3104],c2[2324]))))))))))))))))))))))))))))))))))));
+     d2[78]=simde_mm_xor_si128(c2[2869],simde_mm_xor_si128(c2[2609],simde_mm_xor_si128(c2[3381],simde_mm_xor_si128(c2[2348],simde_mm_xor_si128(c2[1820],simde_mm_xor_si128(c2[3927],simde_mm_xor_si128(c2[3667],simde_mm_xor_si128(c2[3676],simde_mm_xor_si128(c2[3928],simde_mm_xor_si128(c2[3183],simde_mm_xor_si128(c2[2923],simde_mm_xor_si128(c2[2915],simde_mm_xor_si128(c2[349],simde_mm_xor_si128(c2[89],simde_mm_xor_si128(c2[1640],simde_mm_xor_si128(c2[2681],simde_mm_xor_si128(c2[3744],simde_mm_xor_si128(c2[636],simde_mm_xor_si128(c2[3485],simde_mm_xor_si128(c2[397],simde_mm_xor_si128(c2[2737],simde_mm_xor_si128(c2[652],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[4068],simde_mm_xor_si128(c2[1208],simde_mm_xor_si128(c2[1716],simde_mm_xor_si128(c2[3303],simde_mm_xor_si128(c2[3822],simde_mm_xor_si128(c2[1754],simde_mm_xor_si128(c2[2549],simde_mm_xor_si128(c2[4116],simde_mm_xor_si128(c2[2820],simde_mm_xor_si128(c2[2575],simde_mm_xor_si128(c2[2315],simde_mm_xor_si128(c2[4145],simde_mm_xor_si128(c2[3104],c2[2324]))))))))))))))))))))))))))))))))))));
 
 //row: 7
-     d2[91]=_mm_xor_si128(c2[2602],_mm_xor_si128(c2[2342],_mm_xor_si128(c2[2092],_mm_xor_si128(c2[3127],_mm_xor_si128(c2[2864],_mm_xor_si128(c2[2081],_mm_xor_si128(c2[1831],_mm_xor_si128(c2[3673],_mm_xor_si128(c2[3413],_mm_xor_si128(c2[3150],_mm_xor_si128(c2[3409],_mm_xor_si128(c2[3146],_mm_xor_si128(c2[3674],_mm_xor_si128(c2[3671],_mm_xor_si128(c2[3411],_mm_xor_si128(c2[816],_mm_xor_si128(c2[2916],_mm_xor_si128(c2[2656],_mm_xor_si128(c2[2393],_mm_xor_si128(c2[2661],_mm_xor_si128(c2[2658],_mm_xor_si128(c2[2398],_mm_xor_si128(c2[82],_mm_xor_si128(c2[3981],_mm_xor_si128(c2[3718],_mm_xor_si128(c2[1386],_mm_xor_si128(c2[1123],_mm_xor_si128(c2[2427],_mm_xor_si128(c2[2164],_mm_xor_si128(c2[3490],_mm_xor_si128(c2[3227],_mm_xor_si128(c2[369],_mm_xor_si128(c2[106],_mm_xor_si128(c2[3231],_mm_xor_si128(c2[3228],_mm_xor_si128(c2[2968],_mm_xor_si128(c2[130],_mm_xor_si128(c2[4039],_mm_xor_si128(c2[2470],_mm_xor_si128(c2[2480],_mm_xor_si128(c2[2220],_mm_xor_si128(c2[3774],_mm_xor_si128(c2[4061],_mm_xor_si128(c2[3801],_mm_xor_si128(c2[3538],_mm_xor_si128(c2[941],_mm_xor_si128(c2[678],_mm_xor_si128(c2[1462],_mm_xor_si128(c2[1459],_mm_xor_si128(c2[1199],_mm_xor_si128(c2[3049],_mm_xor_si128(c2[2786],_mm_xor_si128(c2[3568],_mm_xor_si128(c2[3565],_mm_xor_si128(c2[3305],_mm_xor_si128(c2[4090],_mm_xor_si128(c2[2295],_mm_xor_si128(c2[2032],_mm_xor_si128(c2[3849],_mm_xor_si128(c2[3599],_mm_xor_si128(c2[2553],_mm_xor_si128(c2[2550],_mm_xor_si128(c2[2290],_mm_xor_si128(c2[2321],_mm_xor_si128(c2[2061],_mm_xor_si128(c2[1798],_mm_xor_si128(c2[3878],_mm_xor_si128(c2[3615],_mm_xor_si128(c2[2837],_mm_xor_si128(c2[2834],c2[2574]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[91]=simde_mm_xor_si128(c2[2602],simde_mm_xor_si128(c2[2342],simde_mm_xor_si128(c2[2092],simde_mm_xor_si128(c2[3127],simde_mm_xor_si128(c2[2864],simde_mm_xor_si128(c2[2081],simde_mm_xor_si128(c2[1831],simde_mm_xor_si128(c2[3673],simde_mm_xor_si128(c2[3413],simde_mm_xor_si128(c2[3150],simde_mm_xor_si128(c2[3409],simde_mm_xor_si128(c2[3146],simde_mm_xor_si128(c2[3674],simde_mm_xor_si128(c2[3671],simde_mm_xor_si128(c2[3411],simde_mm_xor_si128(c2[816],simde_mm_xor_si128(c2[2916],simde_mm_xor_si128(c2[2656],simde_mm_xor_si128(c2[2393],simde_mm_xor_si128(c2[2661],simde_mm_xor_si128(c2[2658],simde_mm_xor_si128(c2[2398],simde_mm_xor_si128(c2[82],simde_mm_xor_si128(c2[3981],simde_mm_xor_si128(c2[3718],simde_mm_xor_si128(c2[1386],simde_mm_xor_si128(c2[1123],simde_mm_xor_si128(c2[2427],simde_mm_xor_si128(c2[2164],simde_mm_xor_si128(c2[3490],simde_mm_xor_si128(c2[3227],simde_mm_xor_si128(c2[369],simde_mm_xor_si128(c2[106],simde_mm_xor_si128(c2[3231],simde_mm_xor_si128(c2[3228],simde_mm_xor_si128(c2[2968],simde_mm_xor_si128(c2[130],simde_mm_xor_si128(c2[4039],simde_mm_xor_si128(c2[2470],simde_mm_xor_si128(c2[2480],simde_mm_xor_si128(c2[2220],simde_mm_xor_si128(c2[3774],simde_mm_xor_si128(c2[4061],simde_mm_xor_si128(c2[3801],simde_mm_xor_si128(c2[3538],simde_mm_xor_si128(c2[941],simde_mm_xor_si128(c2[678],simde_mm_xor_si128(c2[1462],simde_mm_xor_si128(c2[1459],simde_mm_xor_si128(c2[1199],simde_mm_xor_si128(c2[3049],simde_mm_xor_si128(c2[2786],simde_mm_xor_si128(c2[3568],simde_mm_xor_si128(c2[3565],simde_mm_xor_si128(c2[3305],simde_mm_xor_si128(c2[4090],simde_mm_xor_si128(c2[2295],simde_mm_xor_si128(c2[2032],simde_mm_xor_si128(c2[3849],simde_mm_xor_si128(c2[3599],simde_mm_xor_si128(c2[2553],simde_mm_xor_si128(c2[2550],simde_mm_xor_si128(c2[2290],simde_mm_xor_si128(c2[2321],simde_mm_xor_si128(c2[2061],simde_mm_xor_si128(c2[1798],simde_mm_xor_si128(c2[3878],simde_mm_xor_si128(c2[3615],simde_mm_xor_si128(c2[2837],simde_mm_xor_si128(c2[2834],c2[2574]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 8
-     d2[104]=_mm_xor_si128(c2[1310],_mm_xor_si128(c2[1050],_mm_xor_si128(c2[2082],_mm_xor_si128(c2[1822],_mm_xor_si128(c2[789],_mm_xor_si128(c2[1306],_mm_xor_si128(c2[2368],_mm_xor_si128(c2[2108],_mm_xor_si128(c2[2117],_mm_xor_si128(c2[2369],_mm_xor_si128(c2[297],_mm_xor_si128(c2[1624],_mm_xor_si128(c2[1364],_mm_xor_si128(c2[1356],_mm_xor_si128(c2[2949],_mm_xor_si128(c2[2689],_mm_xor_si128(c2[341],_mm_xor_si128(c2[81],_mm_xor_si128(c2[1122],_mm_xor_si128(c2[2445],_mm_xor_si128(c2[2185],_mm_xor_si128(c2[3236],_mm_xor_si128(c2[1926],_mm_xor_si128(c2[3257],_mm_xor_si128(c2[2997],_mm_xor_si128(c2[1178],_mm_xor_si128(c2[2756],_mm_xor_si128(c2[2496],_mm_xor_si128(c2[4068],_mm_xor_si128(c2[3808],_mm_xor_si128(c2[157],_mm_xor_si128(c2[2004],_mm_xor_si128(c2[1744],_mm_xor_si128(c2[2263],_mm_xor_si128(c2[1250],_mm_xor_si128(c2[990],_mm_xor_si128(c2[2557],_mm_xor_si128(c2[1248],_mm_xor_si128(c2[1016],_mm_xor_si128(c2[756],_mm_xor_si128(c2[2846],_mm_xor_si128(c2[2586],c2[1545]))))))))))))))))))))))))))))))))))))))))));
+     d2[104]=simde_mm_xor_si128(c2[1310],simde_mm_xor_si128(c2[1050],simde_mm_xor_si128(c2[2082],simde_mm_xor_si128(c2[1822],simde_mm_xor_si128(c2[789],simde_mm_xor_si128(c2[1306],simde_mm_xor_si128(c2[2368],simde_mm_xor_si128(c2[2108],simde_mm_xor_si128(c2[2117],simde_mm_xor_si128(c2[2369],simde_mm_xor_si128(c2[297],simde_mm_xor_si128(c2[1624],simde_mm_xor_si128(c2[1364],simde_mm_xor_si128(c2[1356],simde_mm_xor_si128(c2[2949],simde_mm_xor_si128(c2[2689],simde_mm_xor_si128(c2[341],simde_mm_xor_si128(c2[81],simde_mm_xor_si128(c2[1122],simde_mm_xor_si128(c2[2445],simde_mm_xor_si128(c2[2185],simde_mm_xor_si128(c2[3236],simde_mm_xor_si128(c2[1926],simde_mm_xor_si128(c2[3257],simde_mm_xor_si128(c2[2997],simde_mm_xor_si128(c2[1178],simde_mm_xor_si128(c2[2756],simde_mm_xor_si128(c2[2496],simde_mm_xor_si128(c2[4068],simde_mm_xor_si128(c2[3808],simde_mm_xor_si128(c2[157],simde_mm_xor_si128(c2[2004],simde_mm_xor_si128(c2[1744],simde_mm_xor_si128(c2[2263],simde_mm_xor_si128(c2[1250],simde_mm_xor_si128(c2[990],simde_mm_xor_si128(c2[2557],simde_mm_xor_si128(c2[1248],simde_mm_xor_si128(c2[1016],simde_mm_xor_si128(c2[756],simde_mm_xor_si128(c2[2846],simde_mm_xor_si128(c2[2586],c2[1545]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 9
-     d2[117]=_mm_xor_si128(c2[2868],_mm_xor_si128(c2[1],_mm_xor_si128(c2[3900],_mm_xor_si128(c2[3640],_mm_xor_si128(c2[526],_mm_xor_si128(c2[2607],_mm_xor_si128(c2[3652],_mm_xor_si128(c2[3926],_mm_xor_si128(c2[1072],_mm_xor_si128(c2[812],_mm_xor_si128(c2[3935],_mm_xor_si128(c2[808],_mm_xor_si128(c2[28],_mm_xor_si128(c2[1073],_mm_xor_si128(c2[2637],_mm_xor_si128(c2[3182],_mm_xor_si128(c2[315],_mm_xor_si128(c2[55],_mm_xor_si128(c2[3174],_mm_xor_si128(c2[60],_mm_xor_si128(c2[348],_mm_xor_si128(c2[1640],_mm_xor_si128(c2[1380],_mm_xor_si128(c2[1899],_mm_xor_si128(c2[2944],_mm_xor_si128(c2[2940],_mm_xor_si128(c2[3985],_mm_xor_si128(c2[4016],_mm_xor_si128(c2[889],_mm_xor_si128(c2[895],_mm_xor_si128(c2[1927],_mm_xor_si128(c2[3744],_mm_xor_si128(c2[630],_mm_xor_si128(c2[656],_mm_xor_si128(c2[1701],_mm_xor_si128(c2[2996],_mm_xor_si128(c2[4041],_mm_xor_si128(c2[168],_mm_xor_si128(c2[1460],_mm_xor_si128(c2[1200],_mm_xor_si128(c2[1467],_mm_xor_si128(c2[2499],_mm_xor_si128(c2[1988],_mm_xor_si128(c2[3020],_mm_xor_si128(c2[3562],_mm_xor_si128(c2[448],_mm_xor_si128(c2[4094],_mm_xor_si128(c2[967],_mm_xor_si128(c2[2808],_mm_xor_si128(c2[3853],_mm_xor_si128(c2[216],_mm_xor_si128(c2[1248],_mm_xor_si128(c2[3079],_mm_xor_si128(c2[4111],_mm_xor_si128(c2[3069],_mm_xor_si128(c2[2574],_mm_xor_si128(c2[3879],_mm_xor_si128(c2[3619],_mm_xor_si128(c2[245],_mm_xor_si128(c2[1277],_mm_xor_si128(c2[3363],c2[236])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[117]=simde_mm_xor_si128(c2[2868],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[3900],simde_mm_xor_si128(c2[3640],simde_mm_xor_si128(c2[526],simde_mm_xor_si128(c2[2607],simde_mm_xor_si128(c2[3652],simde_mm_xor_si128(c2[3926],simde_mm_xor_si128(c2[1072],simde_mm_xor_si128(c2[812],simde_mm_xor_si128(c2[3935],simde_mm_xor_si128(c2[808],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[1073],simde_mm_xor_si128(c2[2637],simde_mm_xor_si128(c2[3182],simde_mm_xor_si128(c2[315],simde_mm_xor_si128(c2[55],simde_mm_xor_si128(c2[3174],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[348],simde_mm_xor_si128(c2[1640],simde_mm_xor_si128(c2[1380],simde_mm_xor_si128(c2[1899],simde_mm_xor_si128(c2[2944],simde_mm_xor_si128(c2[2940],simde_mm_xor_si128(c2[3985],simde_mm_xor_si128(c2[4016],simde_mm_xor_si128(c2[889],simde_mm_xor_si128(c2[895],simde_mm_xor_si128(c2[1927],simde_mm_xor_si128(c2[3744],simde_mm_xor_si128(c2[630],simde_mm_xor_si128(c2[656],simde_mm_xor_si128(c2[1701],simde_mm_xor_si128(c2[2996],simde_mm_xor_si128(c2[4041],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[1460],simde_mm_xor_si128(c2[1200],simde_mm_xor_si128(c2[1467],simde_mm_xor_si128(c2[2499],simde_mm_xor_si128(c2[1988],simde_mm_xor_si128(c2[3020],simde_mm_xor_si128(c2[3562],simde_mm_xor_si128(c2[448],simde_mm_xor_si128(c2[4094],simde_mm_xor_si128(c2[967],simde_mm_xor_si128(c2[2808],simde_mm_xor_si128(c2[3853],simde_mm_xor_si128(c2[216],simde_mm_xor_si128(c2[1248],simde_mm_xor_si128(c2[3079],simde_mm_xor_si128(c2[4111],simde_mm_xor_si128(c2[3069],simde_mm_xor_si128(c2[2574],simde_mm_xor_si128(c2[3879],simde_mm_xor_si128(c2[3619],simde_mm_xor_si128(c2[245],simde_mm_xor_si128(c2[1277],simde_mm_xor_si128(c2[3363],c2[236])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 10
-     d2[130]=_mm_xor_si128(c2[3387],_mm_xor_si128(c2[3149],_mm_xor_si128(c2[427],c2[965])));
+     d2[130]=simde_mm_xor_si128(c2[3387],simde_mm_xor_si128(c2[3149],simde_mm_xor_si128(c2[427],c2[965])));
 
 //row: 11
-     d2[143]=_mm_xor_si128(c2[1569],_mm_xor_si128(c2[2341],_mm_xor_si128(c2[1308],_mm_xor_si128(c2[1822],_mm_xor_si128(c2[2627],_mm_xor_si128(c2[2636],_mm_xor_si128(c2[3148],_mm_xor_si128(c2[2888],_mm_xor_si128(c2[1883],_mm_xor_si128(c2[2135],_mm_xor_si128(c2[1875],_mm_xor_si128(c2[3208],_mm_xor_si128(c2[600],_mm_xor_si128(c2[1641],_mm_xor_si128(c2[2704],_mm_xor_si128(c2[3755],_mm_xor_si128(c2[2705],_mm_xor_si128(c2[2445],_mm_xor_si128(c2[3516],_mm_xor_si128(c2[1957],_mm_xor_si128(c2[1697],_mm_xor_si128(c2[3028],_mm_xor_si128(c2[168],_mm_xor_si128(c2[936],_mm_xor_si128(c2[676],_mm_xor_si128(c2[2263],_mm_xor_si128(c2[3042],_mm_xor_si128(c2[2782],_mm_xor_si128(c2[3563],_mm_xor_si128(c2[1509],_mm_xor_si128(c2[3076],_mm_xor_si128(c2[2040],_mm_xor_si128(c2[1780],_mm_xor_si128(c2[1275],_mm_xor_si128(c2[3105],_mm_xor_si128(c2[2324],_mm_xor_si128(c2[2064],c2[1016])))))))))))))))))))))))))))))))))))));
+     d2[143]=simde_mm_xor_si128(c2[1569],simde_mm_xor_si128(c2[2341],simde_mm_xor_si128(c2[1308],simde_mm_xor_si128(c2[1822],simde_mm_xor_si128(c2[2627],simde_mm_xor_si128(c2[2636],simde_mm_xor_si128(c2[3148],simde_mm_xor_si128(c2[2888],simde_mm_xor_si128(c2[1883],simde_mm_xor_si128(c2[2135],simde_mm_xor_si128(c2[1875],simde_mm_xor_si128(c2[3208],simde_mm_xor_si128(c2[600],simde_mm_xor_si128(c2[1641],simde_mm_xor_si128(c2[2704],simde_mm_xor_si128(c2[3755],simde_mm_xor_si128(c2[2705],simde_mm_xor_si128(c2[2445],simde_mm_xor_si128(c2[3516],simde_mm_xor_si128(c2[1957],simde_mm_xor_si128(c2[1697],simde_mm_xor_si128(c2[3028],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[936],simde_mm_xor_si128(c2[676],simde_mm_xor_si128(c2[2263],simde_mm_xor_si128(c2[3042],simde_mm_xor_si128(c2[2782],simde_mm_xor_si128(c2[3563],simde_mm_xor_si128(c2[1509],simde_mm_xor_si128(c2[3076],simde_mm_xor_si128(c2[2040],simde_mm_xor_si128(c2[1780],simde_mm_xor_si128(c2[1275],simde_mm_xor_si128(c2[3105],simde_mm_xor_si128(c2[2324],simde_mm_xor_si128(c2[2064],c2[1016])))))))))))))))))))))))))))))))))))));
 
 //row: 12
-     d2[156]=_mm_xor_si128(c2[12],_mm_xor_si128(c2[3911],_mm_xor_si128(c2[524],_mm_xor_si128(c2[3650],_mm_xor_si128(c2[1070],_mm_xor_si128(c2[810],_mm_xor_si128(c2[806],_mm_xor_si128(c2[1071],_mm_xor_si128(c2[547],_mm_xor_si128(c2[313],_mm_xor_si128(c2[53],_mm_xor_si128(c2[58],_mm_xor_si128(c2[1638],_mm_xor_si128(c2[1378],_mm_xor_si128(c2[2942],_mm_xor_si128(c2[3983],_mm_xor_si128(c2[2947],_mm_xor_si128(c2[887],_mm_xor_si128(c2[1925],_mm_xor_si128(c2[628],_mm_xor_si128(c2[1699],_mm_xor_si128(c2[4039],_mm_xor_si128(c2[1458],_mm_xor_si128(c2[1198],_mm_xor_si128(c2[2497],_mm_xor_si128(c2[3018],_mm_xor_si128(c2[446],_mm_xor_si128(c2[965],_mm_xor_si128(c2[3851],_mm_xor_si128(c2[1259],_mm_xor_si128(c2[4109],_mm_xor_si128(c2[3877],_mm_xor_si128(c2[3617],_mm_xor_si128(c2[1275],c2[234]))))))))))))))))))))))))))))))))));
+     d2[156]=simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[3911],simde_mm_xor_si128(c2[524],simde_mm_xor_si128(c2[3650],simde_mm_xor_si128(c2[1070],simde_mm_xor_si128(c2[810],simde_mm_xor_si128(c2[806],simde_mm_xor_si128(c2[1071],simde_mm_xor_si128(c2[547],simde_mm_xor_si128(c2[313],simde_mm_xor_si128(c2[53],simde_mm_xor_si128(c2[58],simde_mm_xor_si128(c2[1638],simde_mm_xor_si128(c2[1378],simde_mm_xor_si128(c2[2942],simde_mm_xor_si128(c2[3983],simde_mm_xor_si128(c2[2947],simde_mm_xor_si128(c2[887],simde_mm_xor_si128(c2[1925],simde_mm_xor_si128(c2[628],simde_mm_xor_si128(c2[1699],simde_mm_xor_si128(c2[4039],simde_mm_xor_si128(c2[1458],simde_mm_xor_si128(c2[1198],simde_mm_xor_si128(c2[2497],simde_mm_xor_si128(c2[3018],simde_mm_xor_si128(c2[446],simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[3851],simde_mm_xor_si128(c2[1259],simde_mm_xor_si128(c2[4109],simde_mm_xor_si128(c2[3877],simde_mm_xor_si128(c2[3617],simde_mm_xor_si128(c2[1275],c2[234]))))))))))))))))))))))))))))))))));
 
 //row: 13
-     d2[169]=_mm_xor_si128(c2[2087],_mm_xor_si128(c2[2872],_mm_xor_si128(c2[1826],_mm_xor_si128(c2[2],_mm_xor_si128(c2[3158],_mm_xor_si128(c2[3154],_mm_xor_si128(c2[3666],_mm_xor_si128(c2[3406],_mm_xor_si128(c2[1329],_mm_xor_si128(c2[2401],_mm_xor_si128(c2[2653],_mm_xor_si128(c2[2393],_mm_xor_si128(c2[3726],_mm_xor_si128(c2[1118],_mm_xor_si128(c2[2159],_mm_xor_si128(c2[3235],_mm_xor_si128(c2[114],_mm_xor_si128(c2[3236],_mm_xor_si128(c2[2976],_mm_xor_si128(c2[4034],_mm_xor_si128(c2[2475],_mm_xor_si128(c2[2215],_mm_xor_si128(c2[3546],_mm_xor_si128(c2[686],_mm_xor_si128(c2[1467],_mm_xor_si128(c2[1207],_mm_xor_si128(c2[2794],_mm_xor_si128(c2[3573],_mm_xor_si128(c2[3313],_mm_xor_si128(c2[2040],_mm_xor_si128(c2[3594],_mm_xor_si128(c2[2558],_mm_xor_si128(c2[2298],_mm_xor_si128(c2[4113],_mm_xor_si128(c2[1806],_mm_xor_si128(c2[3623],_mm_xor_si128(c2[2842],c2[2582])))))))))))))))))))))))))))))))))))));
+     d2[169]=simde_mm_xor_si128(c2[2087],simde_mm_xor_si128(c2[2872],simde_mm_xor_si128(c2[1826],simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[3158],simde_mm_xor_si128(c2[3154],simde_mm_xor_si128(c2[3666],simde_mm_xor_si128(c2[3406],simde_mm_xor_si128(c2[1329],simde_mm_xor_si128(c2[2401],simde_mm_xor_si128(c2[2653],simde_mm_xor_si128(c2[2393],simde_mm_xor_si128(c2[3726],simde_mm_xor_si128(c2[1118],simde_mm_xor_si128(c2[2159],simde_mm_xor_si128(c2[3235],simde_mm_xor_si128(c2[114],simde_mm_xor_si128(c2[3236],simde_mm_xor_si128(c2[2976],simde_mm_xor_si128(c2[4034],simde_mm_xor_si128(c2[2475],simde_mm_xor_si128(c2[2215],simde_mm_xor_si128(c2[3546],simde_mm_xor_si128(c2[686],simde_mm_xor_si128(c2[1467],simde_mm_xor_si128(c2[1207],simde_mm_xor_si128(c2[2794],simde_mm_xor_si128(c2[3573],simde_mm_xor_si128(c2[3313],simde_mm_xor_si128(c2[2040],simde_mm_xor_si128(c2[3594],simde_mm_xor_si128(c2[2558],simde_mm_xor_si128(c2[2298],simde_mm_xor_si128(c2[4113],simde_mm_xor_si128(c2[1806],simde_mm_xor_si128(c2[3623],simde_mm_xor_si128(c2[2842],c2[2582])))))))))))))))))))))))))))))))))))));
 
 //row: 14
-     d2[182]=_mm_xor_si128(c2[792],_mm_xor_si128(c2[532],_mm_xor_si128(c2[2862],_mm_xor_si128(c2[1304],_mm_xor_si128(c2[3647],_mm_xor_si128(c2[271],_mm_xor_si128(c2[2601],_mm_xor_si128(c2[1850],_mm_xor_si128(c2[1590],_mm_xor_si128(c2[3933],_mm_xor_si128(c2[1586],_mm_xor_si128(c2[3929],_mm_xor_si128(c2[1851],_mm_xor_si128(c2[295],_mm_xor_si128(c2[35],_mm_xor_si128(c2[2891],_mm_xor_si128(c2[1093],_mm_xor_si128(c2[833],_mm_xor_si128(c2[3176],_mm_xor_si128(c2[838],_mm_xor_si128(c2[3441],_mm_xor_si128(c2[3181],_mm_xor_si128(c2[2418],_mm_xor_si128(c2[2158],_mm_xor_si128(c2[342],_mm_xor_si128(c2[3722],_mm_xor_si128(c2[1906],_mm_xor_si128(c2[604],_mm_xor_si128(c2[2947],_mm_xor_si128(c2[1667],_mm_xor_si128(c2[4010],_mm_xor_si128(c2[2705],_mm_xor_si128(c2[889],_mm_xor_si128(c2[1408],_mm_xor_si128(c2[4011],_mm_xor_si128(c2[3751],_mm_xor_si128(c2[2479],_mm_xor_si128(c2[650],_mm_xor_si128(c2[660],_mm_xor_si128(c2[3250],_mm_xor_si128(c2[2990],_mm_xor_si128(c2[2238],_mm_xor_si128(c2[1978],_mm_xor_si128(c2[162],_mm_xor_si128(c2[3277],_mm_xor_si128(c2[1461],_mm_xor_si128(c2[3798],_mm_xor_si128(c2[2242],_mm_xor_si128(c2[1982],_mm_xor_si128(c2[1197],_mm_xor_si128(c2[1226],_mm_xor_si128(c2[3569],_mm_xor_si128(c2[1745],_mm_xor_si128(c2[189],_mm_xor_si128(c2[4088],_mm_xor_si128(c2[472],_mm_xor_si128(c2[2815],_mm_xor_si128(c2[2039],_mm_xor_si128(c2[210],_mm_xor_si128(c2[730],_mm_xor_si128(c2[3333],_mm_xor_si128(c2[3073],_mm_xor_si128(c2[498],_mm_xor_si128(c2[238],_mm_xor_si128(c2[2581],_mm_xor_si128(c2[2055],_mm_xor_si128(c2[239],_mm_xor_si128(c2[1014],_mm_xor_si128(c2[3617],c2[3357])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[182]=simde_mm_xor_si128(c2[792],simde_mm_xor_si128(c2[532],simde_mm_xor_si128(c2[2862],simde_mm_xor_si128(c2[1304],simde_mm_xor_si128(c2[3647],simde_mm_xor_si128(c2[271],simde_mm_xor_si128(c2[2601],simde_mm_xor_si128(c2[1850],simde_mm_xor_si128(c2[1590],simde_mm_xor_si128(c2[3933],simde_mm_xor_si128(c2[1586],simde_mm_xor_si128(c2[3929],simde_mm_xor_si128(c2[1851],simde_mm_xor_si128(c2[295],simde_mm_xor_si128(c2[35],simde_mm_xor_si128(c2[2891],simde_mm_xor_si128(c2[1093],simde_mm_xor_si128(c2[833],simde_mm_xor_si128(c2[3176],simde_mm_xor_si128(c2[838],simde_mm_xor_si128(c2[3441],simde_mm_xor_si128(c2[3181],simde_mm_xor_si128(c2[2418],simde_mm_xor_si128(c2[2158],simde_mm_xor_si128(c2[342],simde_mm_xor_si128(c2[3722],simde_mm_xor_si128(c2[1906],simde_mm_xor_si128(c2[604],simde_mm_xor_si128(c2[2947],simde_mm_xor_si128(c2[1667],simde_mm_xor_si128(c2[4010],simde_mm_xor_si128(c2[2705],simde_mm_xor_si128(c2[889],simde_mm_xor_si128(c2[1408],simde_mm_xor_si128(c2[4011],simde_mm_xor_si128(c2[3751],simde_mm_xor_si128(c2[2479],simde_mm_xor_si128(c2[650],simde_mm_xor_si128(c2[660],simde_mm_xor_si128(c2[3250],simde_mm_xor_si128(c2[2990],simde_mm_xor_si128(c2[2238],simde_mm_xor_si128(c2[1978],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[3277],simde_mm_xor_si128(c2[1461],simde_mm_xor_si128(c2[3798],simde_mm_xor_si128(c2[2242],simde_mm_xor_si128(c2[1982],simde_mm_xor_si128(c2[1197],simde_mm_xor_si128(c2[1226],simde_mm_xor_si128(c2[3569],simde_mm_xor_si128(c2[1745],simde_mm_xor_si128(c2[189],simde_mm_xor_si128(c2[4088],simde_mm_xor_si128(c2[472],simde_mm_xor_si128(c2[2815],simde_mm_xor_si128(c2[2039],simde_mm_xor_si128(c2[210],simde_mm_xor_si128(c2[730],simde_mm_xor_si128(c2[3333],simde_mm_xor_si128(c2[3073],simde_mm_xor_si128(c2[498],simde_mm_xor_si128(c2[238],simde_mm_xor_si128(c2[2581],simde_mm_xor_si128(c2[2055],simde_mm_xor_si128(c2[239],simde_mm_xor_si128(c2[1014],simde_mm_xor_si128(c2[3617],c2[3357])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 15
-     d2[195]=_mm_xor_si128(c2[2602],_mm_xor_si128(c2[1302],_mm_xor_si128(c2[1042],_mm_xor_si128(c2[3387],_mm_xor_si128(c2[1827],_mm_xor_si128(c2[2341],_mm_xor_si128(c2[781],_mm_xor_si128(c2[3650],_mm_xor_si128(c2[3673],_mm_xor_si128(c2[2373],_mm_xor_si128(c2[2113],_mm_xor_si128(c2[3669],_mm_xor_si128(c2[2109],_mm_xor_si128(c2[3934],_mm_xor_si128(c2[2374],_mm_xor_si128(c2[2916],_mm_xor_si128(c2[1616],_mm_xor_si128(c2[1356],_mm_xor_si128(c2[2921],_mm_xor_si128(c2[1361],_mm_xor_si128(c2[82],_mm_xor_si128(c2[2941],_mm_xor_si128(c2[2681],_mm_xor_si128(c2[1646],_mm_xor_si128(c2[86],_mm_xor_si128(c2[2687],_mm_xor_si128(c2[1127],_mm_xor_si128(c2[3750],_mm_xor_si128(c2[2190],_mm_xor_si128(c2[629],_mm_xor_si128(c2[3228],_mm_xor_si128(c2[3491],_mm_xor_si128(c2[1931],_mm_xor_si128(c2[390],_mm_xor_si128(c2[3002],_mm_xor_si128(c2[2730],_mm_xor_si128(c2[1170],_mm_xor_si128(c2[4061],_mm_xor_si128(c2[2761],_mm_xor_si128(c2[2501],_mm_xor_si128(c2[1201],_mm_xor_si128(c2[3800],_mm_xor_si128(c2[1722],_mm_xor_si128(c2[162],_mm_xor_si128(c2[3309],_mm_xor_si128(c2[1749],_mm_xor_si128(c2[3828],_mm_xor_si128(c2[2268],_mm_xor_si128(c2[2555],_mm_xor_si128(c2[995],_mm_xor_si128(c2[4109],_mm_xor_si128(c2[2549],_mm_xor_si128(c2[2813],_mm_xor_si128(c2[1253],_mm_xor_si128(c2[2321],_mm_xor_si128(c2[1021],_mm_xor_si128(c2[761],_mm_xor_si128(c2[4138],_mm_xor_si128(c2[2578],_mm_xor_si128(c2[3097],c2[1537]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[195]=simde_mm_xor_si128(c2[2602],simde_mm_xor_si128(c2[1302],simde_mm_xor_si128(c2[1042],simde_mm_xor_si128(c2[3387],simde_mm_xor_si128(c2[1827],simde_mm_xor_si128(c2[2341],simde_mm_xor_si128(c2[781],simde_mm_xor_si128(c2[3650],simde_mm_xor_si128(c2[3673],simde_mm_xor_si128(c2[2373],simde_mm_xor_si128(c2[2113],simde_mm_xor_si128(c2[3669],simde_mm_xor_si128(c2[2109],simde_mm_xor_si128(c2[3934],simde_mm_xor_si128(c2[2374],simde_mm_xor_si128(c2[2916],simde_mm_xor_si128(c2[1616],simde_mm_xor_si128(c2[1356],simde_mm_xor_si128(c2[2921],simde_mm_xor_si128(c2[1361],simde_mm_xor_si128(c2[82],simde_mm_xor_si128(c2[2941],simde_mm_xor_si128(c2[2681],simde_mm_xor_si128(c2[1646],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[2687],simde_mm_xor_si128(c2[1127],simde_mm_xor_si128(c2[3750],simde_mm_xor_si128(c2[2190],simde_mm_xor_si128(c2[629],simde_mm_xor_si128(c2[3228],simde_mm_xor_si128(c2[3491],simde_mm_xor_si128(c2[1931],simde_mm_xor_si128(c2[390],simde_mm_xor_si128(c2[3002],simde_mm_xor_si128(c2[2730],simde_mm_xor_si128(c2[1170],simde_mm_xor_si128(c2[4061],simde_mm_xor_si128(c2[2761],simde_mm_xor_si128(c2[2501],simde_mm_xor_si128(c2[1201],simde_mm_xor_si128(c2[3800],simde_mm_xor_si128(c2[1722],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[3309],simde_mm_xor_si128(c2[1749],simde_mm_xor_si128(c2[3828],simde_mm_xor_si128(c2[2268],simde_mm_xor_si128(c2[2555],simde_mm_xor_si128(c2[995],simde_mm_xor_si128(c2[4109],simde_mm_xor_si128(c2[2549],simde_mm_xor_si128(c2[2813],simde_mm_xor_si128(c2[1253],simde_mm_xor_si128(c2[2321],simde_mm_xor_si128(c2[1021],simde_mm_xor_si128(c2[761],simde_mm_xor_si128(c2[4138],simde_mm_xor_si128(c2[2578],simde_mm_xor_si128(c2[3097],c2[1537]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 16
-     d2[208]=_mm_xor_si128(c2[1312],_mm_xor_si128(c2[1052],_mm_xor_si128(c2[3906],_mm_xor_si128(c2[3646],_mm_xor_si128(c2[1824],_mm_xor_si128(c2[532],_mm_xor_si128(c2[272],_mm_xor_si128(c2[791],_mm_xor_si128(c2[3385],_mm_xor_si128(c2[2370],_mm_xor_si128(c2[2110],_mm_xor_si128(c2[818],_mm_xor_si128(c2[558],_mm_xor_si128(c2[2106],_mm_xor_si128(c2[554],_mm_xor_si128(c2[2371],_mm_xor_si128(c2[806],_mm_xor_si128(c2[3413],_mm_xor_si128(c2[1613],_mm_xor_si128(c2[1353],_mm_xor_si128(c2[61],_mm_xor_si128(c2[3960],_mm_xor_si128(c2[1358],_mm_xor_si128(c2[3952],_mm_xor_si128(c2[2938],_mm_xor_si128(c2[2678],_mm_xor_si128(c2[1386],_mm_xor_si128(c2[1126],_mm_xor_si128(c2[83],_mm_xor_si128(c2[2950],_mm_xor_si128(c2[2690],_mm_xor_si128(c2[1124],_mm_xor_si128(c2[3718],_mm_xor_si128(c2[2187],_mm_xor_si128(c2[895],_mm_xor_si128(c2[635],_mm_xor_si128(c2[3225],_mm_xor_si128(c2[1673],_mm_xor_si128(c2[1928],_mm_xor_si128(c2[376],_mm_xor_si128(c2[2999],_mm_xor_si128(c2[1694],_mm_xor_si128(c2[1434],_mm_xor_si128(c2[1180],_mm_xor_si128(c2[3774],_mm_xor_si128(c2[2758],_mm_xor_si128(c2[2498],_mm_xor_si128(c2[1206],_mm_xor_si128(c2[946],_mm_xor_si128(c2[3797],_mm_xor_si128(c2[2505],_mm_xor_si128(c2[2245],_mm_xor_si128(c2[159],_mm_xor_si128(c2[2766],_mm_xor_si128(c2[1746],_mm_xor_si128(c2[454],_mm_xor_si128(c2[194],_mm_xor_si128(c2[2265],_mm_xor_si128(c2[713],_mm_xor_si128(c2[992],_mm_xor_si128(c2[3859],_mm_xor_si128(c2[3599],_mm_xor_si128(c2[2559],_mm_xor_si128(c2[994],_mm_xor_si128(c2[1250],_mm_xor_si128(c2[3857],_mm_xor_si128(c2[1018],_mm_xor_si128(c2[758],_mm_xor_si128(c2[3625],_mm_xor_si128(c2[3365],_mm_xor_si128(c2[2575],_mm_xor_si128(c2[1283],_mm_xor_si128(c2[1023],_mm_xor_si128(c2[1534],_mm_xor_si128(c2[4141],c2[4135])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[208]=simde_mm_xor_si128(c2[1312],simde_mm_xor_si128(c2[1052],simde_mm_xor_si128(c2[3906],simde_mm_xor_si128(c2[3646],simde_mm_xor_si128(c2[1824],simde_mm_xor_si128(c2[532],simde_mm_xor_si128(c2[272],simde_mm_xor_si128(c2[791],simde_mm_xor_si128(c2[3385],simde_mm_xor_si128(c2[2370],simde_mm_xor_si128(c2[2110],simde_mm_xor_si128(c2[818],simde_mm_xor_si128(c2[558],simde_mm_xor_si128(c2[2106],simde_mm_xor_si128(c2[554],simde_mm_xor_si128(c2[2371],simde_mm_xor_si128(c2[806],simde_mm_xor_si128(c2[3413],simde_mm_xor_si128(c2[1613],simde_mm_xor_si128(c2[1353],simde_mm_xor_si128(c2[61],simde_mm_xor_si128(c2[3960],simde_mm_xor_si128(c2[1358],simde_mm_xor_si128(c2[3952],simde_mm_xor_si128(c2[2938],simde_mm_xor_si128(c2[2678],simde_mm_xor_si128(c2[1386],simde_mm_xor_si128(c2[1126],simde_mm_xor_si128(c2[83],simde_mm_xor_si128(c2[2950],simde_mm_xor_si128(c2[2690],simde_mm_xor_si128(c2[1124],simde_mm_xor_si128(c2[3718],simde_mm_xor_si128(c2[2187],simde_mm_xor_si128(c2[895],simde_mm_xor_si128(c2[635],simde_mm_xor_si128(c2[3225],simde_mm_xor_si128(c2[1673],simde_mm_xor_si128(c2[1928],simde_mm_xor_si128(c2[376],simde_mm_xor_si128(c2[2999],simde_mm_xor_si128(c2[1694],simde_mm_xor_si128(c2[1434],simde_mm_xor_si128(c2[1180],simde_mm_xor_si128(c2[3774],simde_mm_xor_si128(c2[2758],simde_mm_xor_si128(c2[2498],simde_mm_xor_si128(c2[1206],simde_mm_xor_si128(c2[946],simde_mm_xor_si128(c2[3797],simde_mm_xor_si128(c2[2505],simde_mm_xor_si128(c2[2245],simde_mm_xor_si128(c2[159],simde_mm_xor_si128(c2[2766],simde_mm_xor_si128(c2[1746],simde_mm_xor_si128(c2[454],simde_mm_xor_si128(c2[194],simde_mm_xor_si128(c2[2265],simde_mm_xor_si128(c2[713],simde_mm_xor_si128(c2[992],simde_mm_xor_si128(c2[3859],simde_mm_xor_si128(c2[3599],simde_mm_xor_si128(c2[2559],simde_mm_xor_si128(c2[994],simde_mm_xor_si128(c2[1250],simde_mm_xor_si128(c2[3857],simde_mm_xor_si128(c2[1018],simde_mm_xor_si128(c2[758],simde_mm_xor_si128(c2[3625],simde_mm_xor_si128(c2[3365],simde_mm_xor_si128(c2[2575],simde_mm_xor_si128(c2[1283],simde_mm_xor_si128(c2[1023],simde_mm_xor_si128(c2[1534],simde_mm_xor_si128(c2[4141],c2[4135])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 17
-     d2[221]=_mm_xor_si128(c2[3384],_mm_xor_si128(c2[3124],_mm_xor_si128(c2[531],_mm_xor_si128(c2[271],_mm_xor_si128(c2[3909],_mm_xor_si128(c2[1303],_mm_xor_si128(c2[1043],_mm_xor_si128(c2[2863],_mm_xor_si128(c2[10],_mm_xor_si128(c2[296],_mm_xor_si128(c2[36],_mm_xor_si128(c2[1589],_mm_xor_si128(c2[1329],_mm_xor_si128(c2[32],_mm_xor_si128(c2[1338],_mm_xor_si128(c2[297],_mm_xor_si128(c2[1590],_mm_xor_si128(c2[2369],_mm_xor_si128(c2[3698],_mm_xor_si128(c2[3438],_mm_xor_si128(c2[832],_mm_xor_si128(c2[572],_mm_xor_si128(c2[3443],_mm_xor_si128(c2[577],_mm_xor_si128(c2[864],_mm_xor_si128(c2[604],_mm_xor_si128(c2[2170],_mm_xor_si128(c2[1910],_mm_xor_si128(c2[2168],_mm_xor_si128(c2[3721],_mm_xor_si128(c2[3461],_mm_xor_si128(c2[3209],_mm_xor_si128(c2[343],_mm_xor_si128(c2[113],_mm_xor_si128(c2[1666],_mm_xor_si128(c2[1406],_mm_xor_si128(c2[1151],_mm_xor_si128(c2[2444],_mm_xor_si128(c2[4013],_mm_xor_si128(c2[1147],_mm_xor_si128(c2[912],_mm_xor_si128(c2[2478],_mm_xor_si128(c2[2218],_mm_xor_si128(c2[3252],_mm_xor_si128(c2[399],_mm_xor_si128(c2[2482],_mm_xor_si128(c2[684],_mm_xor_si128(c2[424],_mm_xor_si128(c2[1977],_mm_xor_si128(c2[1717],_mm_xor_si128(c2[1723],_mm_xor_si128(c2[3276],_mm_xor_si128(c2[3016],_mm_xor_si128(c2[2244],_mm_xor_si128(c2[3537],_mm_xor_si128(c2[3831],_mm_xor_si128(c2[1225],_mm_xor_si128(c2[965],_mm_xor_si128(c2[191],_mm_xor_si128(c2[1484],_mm_xor_si128(c2[3077],_mm_xor_si128(c2[471],_mm_xor_si128(c2[211],_mm_xor_si128(c2[472],_mm_xor_si128(c2[1778],_mm_xor_si128(c2[3335],_mm_xor_si128(c2[469],_mm_xor_si128(c2[3103],_mm_xor_si128(c2[2843],_mm_xor_si128(c2[237],_mm_xor_si128(c2[4136],_mm_xor_si128(c2[501],_mm_xor_si128(c2[2054],_mm_xor_si128(c2[1794],_mm_xor_si128(c2[3619],c2[766])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[221]=simde_mm_xor_si128(c2[3384],simde_mm_xor_si128(c2[3124],simde_mm_xor_si128(c2[531],simde_mm_xor_si128(c2[271],simde_mm_xor_si128(c2[3909],simde_mm_xor_si128(c2[1303],simde_mm_xor_si128(c2[1043],simde_mm_xor_si128(c2[2863],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[296],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[1589],simde_mm_xor_si128(c2[1329],simde_mm_xor_si128(c2[32],simde_mm_xor_si128(c2[1338],simde_mm_xor_si128(c2[297],simde_mm_xor_si128(c2[1590],simde_mm_xor_si128(c2[2369],simde_mm_xor_si128(c2[3698],simde_mm_xor_si128(c2[3438],simde_mm_xor_si128(c2[832],simde_mm_xor_si128(c2[572],simde_mm_xor_si128(c2[3443],simde_mm_xor_si128(c2[577],simde_mm_xor_si128(c2[864],simde_mm_xor_si128(c2[604],simde_mm_xor_si128(c2[2170],simde_mm_xor_si128(c2[1910],simde_mm_xor_si128(c2[2168],simde_mm_xor_si128(c2[3721],simde_mm_xor_si128(c2[3461],simde_mm_xor_si128(c2[3209],simde_mm_xor_si128(c2[343],simde_mm_xor_si128(c2[113],simde_mm_xor_si128(c2[1666],simde_mm_xor_si128(c2[1406],simde_mm_xor_si128(c2[1151],simde_mm_xor_si128(c2[2444],simde_mm_xor_si128(c2[4013],simde_mm_xor_si128(c2[1147],simde_mm_xor_si128(c2[912],simde_mm_xor_si128(c2[2478],simde_mm_xor_si128(c2[2218],simde_mm_xor_si128(c2[3252],simde_mm_xor_si128(c2[399],simde_mm_xor_si128(c2[2482],simde_mm_xor_si128(c2[684],simde_mm_xor_si128(c2[424],simde_mm_xor_si128(c2[1977],simde_mm_xor_si128(c2[1717],simde_mm_xor_si128(c2[1723],simde_mm_xor_si128(c2[3276],simde_mm_xor_si128(c2[3016],simde_mm_xor_si128(c2[2244],simde_mm_xor_si128(c2[3537],simde_mm_xor_si128(c2[3831],simde_mm_xor_si128(c2[1225],simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[191],simde_mm_xor_si128(c2[1484],simde_mm_xor_si128(c2[3077],simde_mm_xor_si128(c2[471],simde_mm_xor_si128(c2[211],simde_mm_xor_si128(c2[472],simde_mm_xor_si128(c2[1778],simde_mm_xor_si128(c2[3335],simde_mm_xor_si128(c2[469],simde_mm_xor_si128(c2[3103],simde_mm_xor_si128(c2[2843],simde_mm_xor_si128(c2[237],simde_mm_xor_si128(c2[4136],simde_mm_xor_si128(c2[501],simde_mm_xor_si128(c2[2054],simde_mm_xor_si128(c2[1794],simde_mm_xor_si128(c2[3619],c2[766])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 18
-     d2[234]=_mm_xor_si128(c2[268],_mm_xor_si128(c2[3028],c2[3310]));
+     d2[234]=simde_mm_xor_si128(c2[268],simde_mm_xor_si128(c2[3028],c2[3310]));
 
 //row: 19
-     d2[247]=_mm_xor_si128(c2[532],_mm_xor_si128(c2[1304],_mm_xor_si128(c2[271],_mm_xor_si128(c2[3646],_mm_xor_si128(c2[1590],_mm_xor_si128(c2[1586],_mm_xor_si128(c2[1851],_mm_xor_si128(c2[3153],_mm_xor_si128(c2[833],_mm_xor_si128(c2[838],_mm_xor_si128(c2[2158],_mm_xor_si128(c2[3722],_mm_xor_si128(c2[604],_mm_xor_si128(c2[1667],_mm_xor_si128(c2[2705],_mm_xor_si128(c2[1408],_mm_xor_si128(c2[2479],_mm_xor_si128(c2[660],_mm_xor_si128(c2[1978],_mm_xor_si128(c2[3277],_mm_xor_si128(c2[3798],_mm_xor_si128(c2[1226],_mm_xor_si128(c2[1745],_mm_xor_si128(c2[472],_mm_xor_si128(c2[2039],_mm_xor_si128(c2[730],_mm_xor_si128(c2[238],_mm_xor_si128(c2[2055],c2[1014]))))))))))))))))))))))))))));
+     d2[247]=simde_mm_xor_si128(c2[532],simde_mm_xor_si128(c2[1304],simde_mm_xor_si128(c2[271],simde_mm_xor_si128(c2[3646],simde_mm_xor_si128(c2[1590],simde_mm_xor_si128(c2[1586],simde_mm_xor_si128(c2[1851],simde_mm_xor_si128(c2[3153],simde_mm_xor_si128(c2[833],simde_mm_xor_si128(c2[838],simde_mm_xor_si128(c2[2158],simde_mm_xor_si128(c2[3722],simde_mm_xor_si128(c2[604],simde_mm_xor_si128(c2[1667],simde_mm_xor_si128(c2[2705],simde_mm_xor_si128(c2[1408],simde_mm_xor_si128(c2[2479],simde_mm_xor_si128(c2[660],simde_mm_xor_si128(c2[1978],simde_mm_xor_si128(c2[3277],simde_mm_xor_si128(c2[3798],simde_mm_xor_si128(c2[1226],simde_mm_xor_si128(c2[1745],simde_mm_xor_si128(c2[472],simde_mm_xor_si128(c2[2039],simde_mm_xor_si128(c2[730],simde_mm_xor_si128(c2[238],simde_mm_xor_si128(c2[2055],c2[1014]))))))))))))))))))))))))))));
 
 //row: 20
-     d2[260]=_mm_xor_si128(c2[263],_mm_xor_si128(c2[3],_mm_xor_si128(c2[788],_mm_xor_si128(c2[3901],_mm_xor_si128(c2[1334],_mm_xor_si128(c2[1074],_mm_xor_si128(c2[1070],_mm_xor_si128(c2[1335],_mm_xor_si128(c2[1078],_mm_xor_si128(c2[577],_mm_xor_si128(c2[317],_mm_xor_si128(c2[322],_mm_xor_si128(c2[1902],_mm_xor_si128(c2[1642],_mm_xor_si128(c2[3206],_mm_xor_si128(c2[88],_mm_xor_si128(c2[1151],_mm_xor_si128(c2[2189],_mm_xor_si128(c2[892],_mm_xor_si128(c2[886],_mm_xor_si128(c2[1950],_mm_xor_si128(c2[131],_mm_xor_si128(c2[1722],_mm_xor_si128(c2[1462],_mm_xor_si128(c2[2761],_mm_xor_si128(c2[3282],_mm_xor_si128(c2[710],_mm_xor_si128(c2[1229],_mm_xor_si128(c2[4115],_mm_xor_si128(c2[1510],_mm_xor_si128(c2[214],_mm_xor_si128(c2[4141],_mm_xor_si128(c2[3881],_mm_xor_si128(c2[1539],c2[498]))))))))))))))))))))))))))))))))));
+     d2[260]=simde_mm_xor_si128(c2[263],simde_mm_xor_si128(c2[3],simde_mm_xor_si128(c2[788],simde_mm_xor_si128(c2[3901],simde_mm_xor_si128(c2[1334],simde_mm_xor_si128(c2[1074],simde_mm_xor_si128(c2[1070],simde_mm_xor_si128(c2[1335],simde_mm_xor_si128(c2[1078],simde_mm_xor_si128(c2[577],simde_mm_xor_si128(c2[317],simde_mm_xor_si128(c2[322],simde_mm_xor_si128(c2[1902],simde_mm_xor_si128(c2[1642],simde_mm_xor_si128(c2[3206],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[1151],simde_mm_xor_si128(c2[2189],simde_mm_xor_si128(c2[892],simde_mm_xor_si128(c2[886],simde_mm_xor_si128(c2[1950],simde_mm_xor_si128(c2[131],simde_mm_xor_si128(c2[1722],simde_mm_xor_si128(c2[1462],simde_mm_xor_si128(c2[2761],simde_mm_xor_si128(c2[3282],simde_mm_xor_si128(c2[710],simde_mm_xor_si128(c2[1229],simde_mm_xor_si128(c2[4115],simde_mm_xor_si128(c2[1510],simde_mm_xor_si128(c2[214],simde_mm_xor_si128(c2[4141],simde_mm_xor_si128(c2[3881],simde_mm_xor_si128(c2[1539],c2[498]))))))))))))))))))))))))))))))))));
 
 //row: 21
-     d2[273]=_mm_xor_si128(c2[1310],_mm_xor_si128(c2[2082],_mm_xor_si128(c2[1049],_mm_xor_si128(c2[2600],_mm_xor_si128(c2[2368],_mm_xor_si128(c2[2377],_mm_xor_si128(c2[2889],_mm_xor_si128(c2[2629],_mm_xor_si128(c2[1624],_mm_xor_si128(c2[1876],_mm_xor_si128(c2[1616],_mm_xor_si128(c2[2949],_mm_xor_si128(c2[341],_mm_xor_si128(c2[1382],_mm_xor_si128(c2[2445],_mm_xor_si128(c2[3496],_mm_xor_si128(c2[2446],_mm_xor_si128(c2[2186],_mm_xor_si128(c2[3257],_mm_xor_si128(c2[1698],_mm_xor_si128(c2[1438],_mm_xor_si128(c2[2756],_mm_xor_si128(c2[4068],_mm_xor_si128(c2[677],_mm_xor_si128(c2[417],_mm_xor_si128(c2[2004],_mm_xor_si128(c2[2783],_mm_xor_si128(c2[2523],_mm_xor_si128(c2[1250],_mm_xor_si128(c2[2817],_mm_xor_si128(c2[1768],_mm_xor_si128(c2[1508],_mm_xor_si128(c2[2815],_mm_xor_si128(c2[1016],_mm_xor_si128(c2[2846],_mm_xor_si128(c2[2065],c2[1805]))))))))))))))))))))))))))))))))))));
+     d2[273]=simde_mm_xor_si128(c2[1310],simde_mm_xor_si128(c2[2082],simde_mm_xor_si128(c2[1049],simde_mm_xor_si128(c2[2600],simde_mm_xor_si128(c2[2368],simde_mm_xor_si128(c2[2377],simde_mm_xor_si128(c2[2889],simde_mm_xor_si128(c2[2629],simde_mm_xor_si128(c2[1624],simde_mm_xor_si128(c2[1876],simde_mm_xor_si128(c2[1616],simde_mm_xor_si128(c2[2949],simde_mm_xor_si128(c2[341],simde_mm_xor_si128(c2[1382],simde_mm_xor_si128(c2[2445],simde_mm_xor_si128(c2[3496],simde_mm_xor_si128(c2[2446],simde_mm_xor_si128(c2[2186],simde_mm_xor_si128(c2[3257],simde_mm_xor_si128(c2[1698],simde_mm_xor_si128(c2[1438],simde_mm_xor_si128(c2[2756],simde_mm_xor_si128(c2[4068],simde_mm_xor_si128(c2[677],simde_mm_xor_si128(c2[417],simde_mm_xor_si128(c2[2004],simde_mm_xor_si128(c2[2783],simde_mm_xor_si128(c2[2523],simde_mm_xor_si128(c2[1250],simde_mm_xor_si128(c2[2817],simde_mm_xor_si128(c2[1768],simde_mm_xor_si128(c2[1508],simde_mm_xor_si128(c2[2815],simde_mm_xor_si128(c2[1016],simde_mm_xor_si128(c2[2846],simde_mm_xor_si128(c2[2065],c2[1805]))))))))))))))))))))))))))))))))))));
 
 //row: 22
-     d2[286]=_mm_xor_si128(c2[2638],c2[3699]);
+     d2[286]=simde_mm_xor_si128(c2[2638],c2[3699]);
 
 //row: 23
-     d2[299]=_mm_xor_si128(c2[1043],_mm_xor_si128(c2[2688],c2[3510]));
+     d2[299]=simde_mm_xor_si128(c2[1043],simde_mm_xor_si128(c2[2688],c2[3510]));
 
 //row: 24
-     d2[312]=_mm_xor_si128(c2[293],_mm_xor_si128(c2[322],c2[2319]));
+     d2[312]=simde_mm_xor_si128(c2[293],simde_mm_xor_si128(c2[322],c2[2319]));
 
 //row: 25
-     d2[325]=_mm_xor_si128(c2[1312],c2[662]);
+     d2[325]=simde_mm_xor_si128(c2[1312],c2[662]);
 
 //row: 26
-     d2[338]=_mm_xor_si128(c2[12],_mm_xor_si128(c2[3911],_mm_xor_si128(c2[3905],_mm_xor_si128(c2[784],_mm_xor_si128(c2[524],_mm_xor_si128(c2[531],_mm_xor_si128(c2[3650],_mm_xor_si128(c2[3644],_mm_xor_si128(c2[1070],_mm_xor_si128(c2[810],_mm_xor_si128(c2[817],_mm_xor_si128(c2[806],_mm_xor_si128(c2[813],_mm_xor_si128(c2[1071],_mm_xor_si128(c2[1338],_mm_xor_si128(c2[1078],_mm_xor_si128(c2[313],_mm_xor_si128(c2[53],_mm_xor_si128(c2[60],_mm_xor_si128(c2[58],_mm_xor_si128(c2[312],_mm_xor_si128(c2[52],_mm_xor_si128(c2[1102],_mm_xor_si128(c2[1638],_mm_xor_si128(c2[1378],_mm_xor_si128(c2[1385],_mm_xor_si128(c2[3202],_mm_xor_si128(c2[2942],_mm_xor_si128(c2[2949],_mm_xor_si128(c2[3983],_mm_xor_si128(c2[3990],_mm_xor_si128(c2[1147],_mm_xor_si128(c2[887],_mm_xor_si128(c2[894],_mm_xor_si128(c2[1925],_mm_xor_si128(c2[1932],_mm_xor_si128(c2[628],_mm_xor_si128(c2[895],_mm_xor_si128(c2[635],_mm_xor_si128(c2[1959],_mm_xor_si128(c2[1699],_mm_xor_si128(c2[1693],_mm_xor_si128(c2[4039],_mm_xor_si128(c2[134],_mm_xor_si128(c2[4033],_mm_xor_si128(c2[1458],_mm_xor_si128(c2[1198],_mm_xor_si128(c2[1205],_mm_xor_si128(c2[2757],_mm_xor_si128(c2[2497],_mm_xor_si128(c2[2504],_mm_xor_si128(c2[3018],_mm_xor_si128(c2[3285],_mm_xor_si128(c2[3025],_mm_xor_si128(c2[706],_mm_xor_si128(c2[446],_mm_xor_si128(c2[453],_mm_xor_si128(c2[965],_mm_xor_si128(c2[1232],_mm_xor_si128(c2[972],_mm_xor_si128(c2[3312],_mm_xor_si128(c2[4111],_mm_xor_si128(c2[3851],_mm_xor_si128(c2[3858],_mm_xor_si128(c2[1259],_mm_xor_si128(c2[1253],_mm_xor_si128(c2[4109],_mm_xor_si128(c2[217],_mm_xor_si128(c2[4116],_mm_xor_si128(c2[3877],_mm_xor_si128(c2[3617],_mm_xor_si128(c2[3624],_mm_xor_si128(c2[1535],_mm_xor_si128(c2[1275],_mm_xor_si128(c2[1282],_mm_xor_si128(c2[234],_mm_xor_si128(c2[501],c2[241])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[338]=simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[3911],simde_mm_xor_si128(c2[3905],simde_mm_xor_si128(c2[784],simde_mm_xor_si128(c2[524],simde_mm_xor_si128(c2[531],simde_mm_xor_si128(c2[3650],simde_mm_xor_si128(c2[3644],simde_mm_xor_si128(c2[1070],simde_mm_xor_si128(c2[810],simde_mm_xor_si128(c2[817],simde_mm_xor_si128(c2[806],simde_mm_xor_si128(c2[813],simde_mm_xor_si128(c2[1071],simde_mm_xor_si128(c2[1338],simde_mm_xor_si128(c2[1078],simde_mm_xor_si128(c2[313],simde_mm_xor_si128(c2[53],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[58],simde_mm_xor_si128(c2[312],simde_mm_xor_si128(c2[52],simde_mm_xor_si128(c2[1102],simde_mm_xor_si128(c2[1638],simde_mm_xor_si128(c2[1378],simde_mm_xor_si128(c2[1385],simde_mm_xor_si128(c2[3202],simde_mm_xor_si128(c2[2942],simde_mm_xor_si128(c2[2949],simde_mm_xor_si128(c2[3983],simde_mm_xor_si128(c2[3990],simde_mm_xor_si128(c2[1147],simde_mm_xor_si128(c2[887],simde_mm_xor_si128(c2[894],simde_mm_xor_si128(c2[1925],simde_mm_xor_si128(c2[1932],simde_mm_xor_si128(c2[628],simde_mm_xor_si128(c2[895],simde_mm_xor_si128(c2[635],simde_mm_xor_si128(c2[1959],simde_mm_xor_si128(c2[1699],simde_mm_xor_si128(c2[1693],simde_mm_xor_si128(c2[4039],simde_mm_xor_si128(c2[134],simde_mm_xor_si128(c2[4033],simde_mm_xor_si128(c2[1458],simde_mm_xor_si128(c2[1198],simde_mm_xor_si128(c2[1205],simde_mm_xor_si128(c2[2757],simde_mm_xor_si128(c2[2497],simde_mm_xor_si128(c2[2504],simde_mm_xor_si128(c2[3018],simde_mm_xor_si128(c2[3285],simde_mm_xor_si128(c2[3025],simde_mm_xor_si128(c2[706],simde_mm_xor_si128(c2[446],simde_mm_xor_si128(c2[453],simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[1232],simde_mm_xor_si128(c2[972],simde_mm_xor_si128(c2[3312],simde_mm_xor_si128(c2[4111],simde_mm_xor_si128(c2[3851],simde_mm_xor_si128(c2[3858],simde_mm_xor_si128(c2[1259],simde_mm_xor_si128(c2[1253],simde_mm_xor_si128(c2[4109],simde_mm_xor_si128(c2[217],simde_mm_xor_si128(c2[4116],simde_mm_xor_si128(c2[3877],simde_mm_xor_si128(c2[3617],simde_mm_xor_si128(c2[3624],simde_mm_xor_si128(c2[1535],simde_mm_xor_si128(c2[1275],simde_mm_xor_si128(c2[1282],simde_mm_xor_si128(c2[234],simde_mm_xor_si128(c2[501],c2[241])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 27
-     d2[351]=_mm_xor_si128(c2[2090],c2[428]);
+     d2[351]=simde_mm_xor_si128(c2[2090],c2[428]);
 
 //row: 28
-     d2[364]=_mm_xor_si128(c2[3666],_mm_xor_si128(c2[2663],c2[3772]));
+     d2[364]=simde_mm_xor_si128(c2[3666],simde_mm_xor_si128(c2[2663],c2[3772]));
 
 //row: 29
-     d2[377]=_mm_xor_si128(c2[523],c2[2965]);
+     d2[377]=simde_mm_xor_si128(c2[523],c2[2965]);
 
 //row: 30
-     d2[390]=_mm_xor_si128(c2[1616],_mm_xor_si128(c2[391],_mm_xor_si128(c2[705],c2[1794])));
+     d2[390]=simde_mm_xor_si128(c2[1616],simde_mm_xor_si128(c2[391],simde_mm_xor_si128(c2[705],c2[1794])));
 
 //row: 31
-     d2[403]=_mm_xor_si128(c2[2867],_mm_xor_si128(c2[3652],_mm_xor_si128(c2[2606],_mm_xor_si128(c2[3938],_mm_xor_si128(c2[3934],_mm_xor_si128(c2[287],_mm_xor_si128(c2[27],_mm_xor_si128(c2[813],_mm_xor_si128(c2[3181],_mm_xor_si128(c2[3433],_mm_xor_si128(c2[3173],_mm_xor_si128(c2[347],_mm_xor_si128(c2[1898],_mm_xor_si128(c2[2939],_mm_xor_si128(c2[4015],_mm_xor_si128(c2[894],_mm_xor_si128(c2[4016],_mm_xor_si128(c2[3756],_mm_xor_si128(c2[655],_mm_xor_si128(c2[3255],_mm_xor_si128(c2[2995],_mm_xor_si128(c2[167],_mm_xor_si128(c2[1466],_mm_xor_si128(c2[2247],_mm_xor_si128(c2[1987],_mm_xor_si128(c2[3574],_mm_xor_si128(c2[194],_mm_xor_si128(c2[4093],_mm_xor_si128(c2[2820],_mm_xor_si128(c2[215],_mm_xor_si128(c2[3338],_mm_xor_si128(c2[3078],_mm_xor_si128(c2[2586],_mm_xor_si128(c2[244],_mm_xor_si128(c2[3622],c2[3362])))))))))))))))))))))))))))))))))));
+     d2[403]=simde_mm_xor_si128(c2[2867],simde_mm_xor_si128(c2[3652],simde_mm_xor_si128(c2[2606],simde_mm_xor_si128(c2[3938],simde_mm_xor_si128(c2[3934],simde_mm_xor_si128(c2[287],simde_mm_xor_si128(c2[27],simde_mm_xor_si128(c2[813],simde_mm_xor_si128(c2[3181],simde_mm_xor_si128(c2[3433],simde_mm_xor_si128(c2[3173],simde_mm_xor_si128(c2[347],simde_mm_xor_si128(c2[1898],simde_mm_xor_si128(c2[2939],simde_mm_xor_si128(c2[4015],simde_mm_xor_si128(c2[894],simde_mm_xor_si128(c2[4016],simde_mm_xor_si128(c2[3756],simde_mm_xor_si128(c2[655],simde_mm_xor_si128(c2[3255],simde_mm_xor_si128(c2[2995],simde_mm_xor_si128(c2[167],simde_mm_xor_si128(c2[1466],simde_mm_xor_si128(c2[2247],simde_mm_xor_si128(c2[1987],simde_mm_xor_si128(c2[3574],simde_mm_xor_si128(c2[194],simde_mm_xor_si128(c2[4093],simde_mm_xor_si128(c2[2820],simde_mm_xor_si128(c2[215],simde_mm_xor_si128(c2[3338],simde_mm_xor_si128(c2[3078],simde_mm_xor_si128(c2[2586],simde_mm_xor_si128(c2[244],simde_mm_xor_si128(c2[3622],c2[3362])))))))))))))))))))))))))))))))))));
 
 //row: 32
-     d2[416]=_mm_xor_si128(c2[526],_mm_xor_si128(c2[266],_mm_xor_si128(c2[1311],_mm_xor_si128(c2[1051],_mm_xor_si128(c2[5],_mm_xor_si128(c2[3646],_mm_xor_si128(c2[1597],_mm_xor_si128(c2[1337],_mm_xor_si128(c2[1333],_mm_xor_si128(c2[1598],_mm_xor_si128(c2[840],_mm_xor_si128(c2[580],_mm_xor_si128(c2[572],_mm_xor_si128(c2[2165],_mm_xor_si128(c2[1905],_mm_xor_si128(c2[3729],_mm_xor_si128(c2[3469],_mm_xor_si128(c2[338],_mm_xor_si128(c2[1674],_mm_xor_si128(c2[1414],_mm_xor_si128(c2[2452],_mm_xor_si128(c2[1155],_mm_xor_si128(c2[2473],_mm_xor_si128(c2[2213],_mm_xor_si128(c2[394],_mm_xor_si128(c2[130],_mm_xor_si128(c2[1985],_mm_xor_si128(c2[1725],_mm_xor_si128(c2[3284],_mm_xor_si128(c2[3024],_mm_xor_si128(c2[3545],_mm_xor_si128(c2[1233],_mm_xor_si128(c2[973],_mm_xor_si128(c2[1492],_mm_xor_si128(c2[479],_mm_xor_si128(c2[219],_mm_xor_si128(c2[1773],_mm_xor_si128(c2[477],_mm_xor_si128(c2[245],_mm_xor_si128(c2[4144],_mm_xor_si128(c2[2062],_mm_xor_si128(c2[1802],c2[761]))))))))))))))))))))))))))))))))))))))))));
+     d2[416]=simde_mm_xor_si128(c2[526],simde_mm_xor_si128(c2[266],simde_mm_xor_si128(c2[1311],simde_mm_xor_si128(c2[1051],simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[3646],simde_mm_xor_si128(c2[1597],simde_mm_xor_si128(c2[1337],simde_mm_xor_si128(c2[1333],simde_mm_xor_si128(c2[1598],simde_mm_xor_si128(c2[840],simde_mm_xor_si128(c2[580],simde_mm_xor_si128(c2[572],simde_mm_xor_si128(c2[2165],simde_mm_xor_si128(c2[1905],simde_mm_xor_si128(c2[3729],simde_mm_xor_si128(c2[3469],simde_mm_xor_si128(c2[338],simde_mm_xor_si128(c2[1674],simde_mm_xor_si128(c2[1414],simde_mm_xor_si128(c2[2452],simde_mm_xor_si128(c2[1155],simde_mm_xor_si128(c2[2473],simde_mm_xor_si128(c2[2213],simde_mm_xor_si128(c2[394],simde_mm_xor_si128(c2[130],simde_mm_xor_si128(c2[1985],simde_mm_xor_si128(c2[1725],simde_mm_xor_si128(c2[3284],simde_mm_xor_si128(c2[3024],simde_mm_xor_si128(c2[3545],simde_mm_xor_si128(c2[1233],simde_mm_xor_si128(c2[973],simde_mm_xor_si128(c2[1492],simde_mm_xor_si128(c2[479],simde_mm_xor_si128(c2[219],simde_mm_xor_si128(c2[1773],simde_mm_xor_si128(c2[477],simde_mm_xor_si128(c2[245],simde_mm_xor_si128(c2[4144],simde_mm_xor_si128(c2[2062],simde_mm_xor_si128(c2[1802],c2[761]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 33
-     d2[429]=_mm_xor_si128(c2[267],_mm_xor_si128(c2[1052],_mm_xor_si128(c2[6],_mm_xor_si128(c2[1338],_mm_xor_si128(c2[1334],_mm_xor_si128(c2[1586],_mm_xor_si128(c2[581],_mm_xor_si128(c2[573],_mm_xor_si128(c2[842],_mm_xor_si128(c2[1906],_mm_xor_si128(c2[3470],_mm_xor_si128(c2[339],_mm_xor_si128(c2[1415],_mm_xor_si128(c2[2453],_mm_xor_si128(c2[1156],_mm_xor_si128(c2[2214],_mm_xor_si128(c2[395],_mm_xor_si128(c2[1726],_mm_xor_si128(c2[3025],_mm_xor_si128(c2[3546],_mm_xor_si128(c2[974],_mm_xor_si128(c2[1493],_mm_xor_si128(c2[3572],_mm_xor_si128(c2[220],_mm_xor_si128(c2[1774],_mm_xor_si128(c2[478],_mm_xor_si128(c2[4145],_mm_xor_si128(c2[1803],c2[762]))))))))))))))))))))))))))));
+     d2[429]=simde_mm_xor_si128(c2[267],simde_mm_xor_si128(c2[1052],simde_mm_xor_si128(c2[6],simde_mm_xor_si128(c2[1338],simde_mm_xor_si128(c2[1334],simde_mm_xor_si128(c2[1586],simde_mm_xor_si128(c2[581],simde_mm_xor_si128(c2[573],simde_mm_xor_si128(c2[842],simde_mm_xor_si128(c2[1906],simde_mm_xor_si128(c2[3470],simde_mm_xor_si128(c2[339],simde_mm_xor_si128(c2[1415],simde_mm_xor_si128(c2[2453],simde_mm_xor_si128(c2[1156],simde_mm_xor_si128(c2[2214],simde_mm_xor_si128(c2[395],simde_mm_xor_si128(c2[1726],simde_mm_xor_si128(c2[3025],simde_mm_xor_si128(c2[3546],simde_mm_xor_si128(c2[974],simde_mm_xor_si128(c2[1493],simde_mm_xor_si128(c2[3572],simde_mm_xor_si128(c2[220],simde_mm_xor_si128(c2[1774],simde_mm_xor_si128(c2[478],simde_mm_xor_si128(c2[4145],simde_mm_xor_si128(c2[1803],c2[762]))))))))))))))))))))))))))));
 
 //row: 34
-     d2[442]=_mm_xor_si128(c2[3647],_mm_xor_si128(c2[3387],_mm_xor_si128(c2[3907],_mm_xor_si128(c2[260],_mm_xor_si128(c2[0],_mm_xor_si128(c2[520],_mm_xor_si128(c2[3126],_mm_xor_si128(c2[3646],_mm_xor_si128(c2[1312],_mm_xor_si128(c2[546],_mm_xor_si128(c2[286],_mm_xor_si128(c2[806],_mm_xor_si128(c2[295],_mm_xor_si128(c2[815],_mm_xor_si128(c2[547],_mm_xor_si128(c2[1327],_mm_xor_si128(c2[1067],_mm_xor_si128(c2[3961],_mm_xor_si128(c2[3701],_mm_xor_si128(c2[62],_mm_xor_si128(c2[3693],_mm_xor_si128(c2[314],_mm_xor_si128(c2[54],_mm_xor_si128(c2[1127],_mm_xor_si128(c2[867],_mm_xor_si128(c2[1387],_mm_xor_si128(c2[2678],_mm_xor_si128(c2[2418],_mm_xor_si128(c2[2938],_mm_xor_si128(c2[3459],_mm_xor_si128(c2[3979],_mm_xor_si128(c2[636],_mm_xor_si128(c2[376],_mm_xor_si128(c2[896],_mm_xor_si128(c2[1414],_mm_xor_si128(c2[1934],_mm_xor_si128(c2[104],_mm_xor_si128(c2[884],_mm_xor_si128(c2[624],_mm_xor_si128(c2[1435],_mm_xor_si128(c2[1175],_mm_xor_si128(c2[1695],_mm_xor_si128(c2[3515],_mm_xor_si128(c2[136],_mm_xor_si128(c2[4035],_mm_xor_si128(c2[947],_mm_xor_si128(c2[687],_mm_xor_si128(c2[1207],_mm_xor_si128(c2[2246],_mm_xor_si128(c2[1986],_mm_xor_si128(c2[2506],_mm_xor_si128(c2[2507],_mm_xor_si128(c2[3287],_mm_xor_si128(c2[3027],_mm_xor_si128(c2[182],_mm_xor_si128(c2[4094],_mm_xor_si128(c2[442],_mm_xor_si128(c2[454],_mm_xor_si128(c2[1234],_mm_xor_si128(c2[974],_mm_xor_si128(c2[3600],_mm_xor_si128(c2[3340],_mm_xor_si128(c2[3860],_mm_xor_si128(c2[735],_mm_xor_si128(c2[1255],_mm_xor_si128(c2[3598],_mm_xor_si128(c2[219],_mm_xor_si128(c2[4118],_mm_xor_si128(c2[3366],_mm_xor_si128(c2[3106],_mm_xor_si128(c2[3626],_mm_xor_si128(c2[1024],_mm_xor_si128(c2[764],_mm_xor_si128(c2[1284],_mm_xor_si128(c2[3882],_mm_xor_si128(c2[503],c2[243]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[442]=simde_mm_xor_si128(c2[3647],simde_mm_xor_si128(c2[3387],simde_mm_xor_si128(c2[3907],simde_mm_xor_si128(c2[260],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[520],simde_mm_xor_si128(c2[3126],simde_mm_xor_si128(c2[3646],simde_mm_xor_si128(c2[1312],simde_mm_xor_si128(c2[546],simde_mm_xor_si128(c2[286],simde_mm_xor_si128(c2[806],simde_mm_xor_si128(c2[295],simde_mm_xor_si128(c2[815],simde_mm_xor_si128(c2[547],simde_mm_xor_si128(c2[1327],simde_mm_xor_si128(c2[1067],simde_mm_xor_si128(c2[3961],simde_mm_xor_si128(c2[3701],simde_mm_xor_si128(c2[62],simde_mm_xor_si128(c2[3693],simde_mm_xor_si128(c2[314],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[1127],simde_mm_xor_si128(c2[867],simde_mm_xor_si128(c2[1387],simde_mm_xor_si128(c2[2678],simde_mm_xor_si128(c2[2418],simde_mm_xor_si128(c2[2938],simde_mm_xor_si128(c2[3459],simde_mm_xor_si128(c2[3979],simde_mm_xor_si128(c2[636],simde_mm_xor_si128(c2[376],simde_mm_xor_si128(c2[896],simde_mm_xor_si128(c2[1414],simde_mm_xor_si128(c2[1934],simde_mm_xor_si128(c2[104],simde_mm_xor_si128(c2[884],simde_mm_xor_si128(c2[624],simde_mm_xor_si128(c2[1435],simde_mm_xor_si128(c2[1175],simde_mm_xor_si128(c2[1695],simde_mm_xor_si128(c2[3515],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[4035],simde_mm_xor_si128(c2[947],simde_mm_xor_si128(c2[687],simde_mm_xor_si128(c2[1207],simde_mm_xor_si128(c2[2246],simde_mm_xor_si128(c2[1986],simde_mm_xor_si128(c2[2506],simde_mm_xor_si128(c2[2507],simde_mm_xor_si128(c2[3287],simde_mm_xor_si128(c2[3027],simde_mm_xor_si128(c2[182],simde_mm_xor_si128(c2[4094],simde_mm_xor_si128(c2[442],simde_mm_xor_si128(c2[454],simde_mm_xor_si128(c2[1234],simde_mm_xor_si128(c2[974],simde_mm_xor_si128(c2[3600],simde_mm_xor_si128(c2[3340],simde_mm_xor_si128(c2[3860],simde_mm_xor_si128(c2[735],simde_mm_xor_si128(c2[1255],simde_mm_xor_si128(c2[3598],simde_mm_xor_si128(c2[219],simde_mm_xor_si128(c2[4118],simde_mm_xor_si128(c2[3366],simde_mm_xor_si128(c2[3106],simde_mm_xor_si128(c2[3626],simde_mm_xor_si128(c2[1024],simde_mm_xor_si128(c2[764],simde_mm_xor_si128(c2[1284],simde_mm_xor_si128(c2[3882],simde_mm_xor_si128(c2[503],c2[243]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 35
-     d2[455]=_mm_xor_si128(c2[1047],_mm_xor_si128(c2[787],_mm_xor_si128(c2[1572],_mm_xor_si128(c2[526],_mm_xor_si128(c2[2118],_mm_xor_si128(c2[1858],_mm_xor_si128(c2[1854],_mm_xor_si128(c2[2106],_mm_xor_si128(c2[3415],_mm_xor_si128(c2[1361],_mm_xor_si128(c2[1101],_mm_xor_si128(c2[1093],_mm_xor_si128(c2[2686],_mm_xor_si128(c2[2426],_mm_xor_si128(c2[3990],_mm_xor_si128(c2[859],_mm_xor_si128(c2[1935],_mm_xor_si128(c2[2973],_mm_xor_si128(c2[1676],_mm_xor_si128(c2[2734],_mm_xor_si128(c2[915],_mm_xor_si128(c2[1960],_mm_xor_si128(c2[2506],_mm_xor_si128(c2[2246],_mm_xor_si128(c2[3545],_mm_xor_si128(c2[4066],_mm_xor_si128(c2[1494],_mm_xor_si128(c2[2013],_mm_xor_si128(c2[740],_mm_xor_si128(c2[2294],_mm_xor_si128(c2[998],_mm_xor_si128(c2[766],_mm_xor_si128(c2[506],_mm_xor_si128(c2[2323],c2[1282]))))))))))))))))))))))))))))))))));
+     d2[455]=simde_mm_xor_si128(c2[1047],simde_mm_xor_si128(c2[787],simde_mm_xor_si128(c2[1572],simde_mm_xor_si128(c2[526],simde_mm_xor_si128(c2[2118],simde_mm_xor_si128(c2[1858],simde_mm_xor_si128(c2[1854],simde_mm_xor_si128(c2[2106],simde_mm_xor_si128(c2[3415],simde_mm_xor_si128(c2[1361],simde_mm_xor_si128(c2[1101],simde_mm_xor_si128(c2[1093],simde_mm_xor_si128(c2[2686],simde_mm_xor_si128(c2[2426],simde_mm_xor_si128(c2[3990],simde_mm_xor_si128(c2[859],simde_mm_xor_si128(c2[1935],simde_mm_xor_si128(c2[2973],simde_mm_xor_si128(c2[1676],simde_mm_xor_si128(c2[2734],simde_mm_xor_si128(c2[915],simde_mm_xor_si128(c2[1960],simde_mm_xor_si128(c2[2506],simde_mm_xor_si128(c2[2246],simde_mm_xor_si128(c2[3545],simde_mm_xor_si128(c2[4066],simde_mm_xor_si128(c2[1494],simde_mm_xor_si128(c2[2013],simde_mm_xor_si128(c2[740],simde_mm_xor_si128(c2[2294],simde_mm_xor_si128(c2[998],simde_mm_xor_si128(c2[766],simde_mm_xor_si128(c2[506],simde_mm_xor_si128(c2[2323],c2[1282]))))))))))))))))))))))))))))))))));
 
 //row: 36
-     d2[468]=_mm_xor_si128(c2[1312],_mm_xor_si128(c2[1882],c2[973]));
+     d2[468]=simde_mm_xor_si128(c2[1312],simde_mm_xor_si128(c2[1882],c2[973]));
 
 //row: 37
-     d2[481]=_mm_xor_si128(c2[787],_mm_xor_si128(c2[3907],_mm_xor_si128(c2[1572],_mm_xor_si128(c2[520],_mm_xor_si128(c2[526],_mm_xor_si128(c2[3646],_mm_xor_si128(c2[1858],_mm_xor_si128(c2[806],_mm_xor_si128(c2[1854],_mm_xor_si128(c2[815],_mm_xor_si128(c2[2106],_mm_xor_si128(c2[1327],_mm_xor_si128(c2[1067],_mm_xor_si128(c2[1101],_mm_xor_si128(c2[62],_mm_xor_si128(c2[1093],_mm_xor_si128(c2[314],_mm_xor_si128(c2[54],_mm_xor_si128(c2[2426],_mm_xor_si128(c2[1387],_mm_xor_si128(c2[3990],_mm_xor_si128(c2[2938],_mm_xor_si128(c2[859],_mm_xor_si128(c2[3979],_mm_xor_si128(c2[1935],_mm_xor_si128(c2[896],_mm_xor_si128(c2[2973],_mm_xor_si128(c2[1934],_mm_xor_si128(c2[1676],_mm_xor_si128(c2[884],_mm_xor_si128(c2[624],_mm_xor_si128(c2[2734],_mm_xor_si128(c2[1695],_mm_xor_si128(c2[915],_mm_xor_si128(c2[136],_mm_xor_si128(c2[4035],_mm_xor_si128(c2[2246],_mm_xor_si128(c2[1207],_mm_xor_si128(c2[3545],_mm_xor_si128(c2[2506],_mm_xor_si128(c2[4066],_mm_xor_si128(c2[3287],_mm_xor_si128(c2[3027],_mm_xor_si128(c2[1494],_mm_xor_si128(c2[442],_mm_xor_si128(c2[2013],_mm_xor_si128(c2[1234],_mm_xor_si128(c2[974],_mm_xor_si128(c2[740],_mm_xor_si128(c2[3860],_mm_xor_si128(c2[2294],_mm_xor_si128(c2[1255],_mm_xor_si128(c2[998],_mm_xor_si128(c2[219],_mm_xor_si128(c2[4118],_mm_xor_si128(c2[506],_mm_xor_si128(c2[3626],_mm_xor_si128(c2[2323],_mm_xor_si128(c2[1284],_mm_xor_si128(c2[1282],_mm_xor_si128(c2[503],c2[243])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[481]=simde_mm_xor_si128(c2[787],simde_mm_xor_si128(c2[3907],simde_mm_xor_si128(c2[1572],simde_mm_xor_si128(c2[520],simde_mm_xor_si128(c2[526],simde_mm_xor_si128(c2[3646],simde_mm_xor_si128(c2[1858],simde_mm_xor_si128(c2[806],simde_mm_xor_si128(c2[1854],simde_mm_xor_si128(c2[815],simde_mm_xor_si128(c2[2106],simde_mm_xor_si128(c2[1327],simde_mm_xor_si128(c2[1067],simde_mm_xor_si128(c2[1101],simde_mm_xor_si128(c2[62],simde_mm_xor_si128(c2[1093],simde_mm_xor_si128(c2[314],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[2426],simde_mm_xor_si128(c2[1387],simde_mm_xor_si128(c2[3990],simde_mm_xor_si128(c2[2938],simde_mm_xor_si128(c2[859],simde_mm_xor_si128(c2[3979],simde_mm_xor_si128(c2[1935],simde_mm_xor_si128(c2[896],simde_mm_xor_si128(c2[2973],simde_mm_xor_si128(c2[1934],simde_mm_xor_si128(c2[1676],simde_mm_xor_si128(c2[884],simde_mm_xor_si128(c2[624],simde_mm_xor_si128(c2[2734],simde_mm_xor_si128(c2[1695],simde_mm_xor_si128(c2[915],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[4035],simde_mm_xor_si128(c2[2246],simde_mm_xor_si128(c2[1207],simde_mm_xor_si128(c2[3545],simde_mm_xor_si128(c2[2506],simde_mm_xor_si128(c2[4066],simde_mm_xor_si128(c2[3287],simde_mm_xor_si128(c2[3027],simde_mm_xor_si128(c2[1494],simde_mm_xor_si128(c2[442],simde_mm_xor_si128(c2[2013],simde_mm_xor_si128(c2[1234],simde_mm_xor_si128(c2[974],simde_mm_xor_si128(c2[740],simde_mm_xor_si128(c2[3860],simde_mm_xor_si128(c2[2294],simde_mm_xor_si128(c2[1255],simde_mm_xor_si128(c2[998],simde_mm_xor_si128(c2[219],simde_mm_xor_si128(c2[4118],simde_mm_xor_si128(c2[506],simde_mm_xor_si128(c2[3626],simde_mm_xor_si128(c2[2323],simde_mm_xor_si128(c2[1284],simde_mm_xor_si128(c2[1282],simde_mm_xor_si128(c2[503],c2[243])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 38
-     d2[494]=_mm_xor_si128(c2[3647],_mm_xor_si128(c2[3387],_mm_xor_si128(c2[0],_mm_xor_si128(c2[3126],_mm_xor_si128(c2[546],_mm_xor_si128(c2[286],_mm_xor_si128(c2[295],_mm_xor_si128(c2[547],_mm_xor_si128(c2[3415],_mm_xor_si128(c2[3961],_mm_xor_si128(c2[3701],_mm_xor_si128(c2[3693],_mm_xor_si128(c2[1127],_mm_xor_si128(c2[867],_mm_xor_si128(c2[2418],_mm_xor_si128(c2[3459],_mm_xor_si128(c2[376],_mm_xor_si128(c2[1414],_mm_xor_si128(c2[104],_mm_xor_si128(c2[1175],_mm_xor_si128(c2[3515],_mm_xor_si128(c2[3520],_mm_xor_si128(c2[947],_mm_xor_si128(c2[687],_mm_xor_si128(c2[1986],_mm_xor_si128(c2[2507],_mm_xor_si128(c2[4094],_mm_xor_si128(c2[454],_mm_xor_si128(c2[3340],_mm_xor_si128(c2[735],_mm_xor_si128(c2[3598],_mm_xor_si128(c2[3366],_mm_xor_si128(c2[3106],_mm_xor_si128(c2[764],c2[3882]))))))))))))))))))))))))))))))))));
+     d2[494]=simde_mm_xor_si128(c2[3647],simde_mm_xor_si128(c2[3387],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[3126],simde_mm_xor_si128(c2[546],simde_mm_xor_si128(c2[286],simde_mm_xor_si128(c2[295],simde_mm_xor_si128(c2[547],simde_mm_xor_si128(c2[3415],simde_mm_xor_si128(c2[3961],simde_mm_xor_si128(c2[3701],simde_mm_xor_si128(c2[3693],simde_mm_xor_si128(c2[1127],simde_mm_xor_si128(c2[867],simde_mm_xor_si128(c2[2418],simde_mm_xor_si128(c2[3459],simde_mm_xor_si128(c2[376],simde_mm_xor_si128(c2[1414],simde_mm_xor_si128(c2[104],simde_mm_xor_si128(c2[1175],simde_mm_xor_si128(c2[3515],simde_mm_xor_si128(c2[3520],simde_mm_xor_si128(c2[947],simde_mm_xor_si128(c2[687],simde_mm_xor_si128(c2[1986],simde_mm_xor_si128(c2[2507],simde_mm_xor_si128(c2[4094],simde_mm_xor_si128(c2[454],simde_mm_xor_si128(c2[3340],simde_mm_xor_si128(c2[735],simde_mm_xor_si128(c2[3598],simde_mm_xor_si128(c2[3366],simde_mm_xor_si128(c2[3106],simde_mm_xor_si128(c2[764],c2[3882]))))))))))))))))))))))))))))))))));
 
 //row: 39
-     d2[507]=_mm_xor_si128(c2[3126],_mm_xor_si128(c2[2866],_mm_xor_si128(c2[3911],_mm_xor_si128(c2[3651],_mm_xor_si128(c2[2605],_mm_xor_si128(c2[1311],_mm_xor_si128(c2[38],_mm_xor_si128(c2[3937],_mm_xor_si128(c2[3933],_mm_xor_si128(c2[26],_mm_xor_si128(c2[3440],_mm_xor_si128(c2[3180],_mm_xor_si128(c2[3172],_mm_xor_si128(c2[606],_mm_xor_si128(c2[346],_mm_xor_si128(c2[2170],_mm_xor_si128(c2[1910],_mm_xor_si128(c2[2938],_mm_xor_si128(c2[115],_mm_xor_si128(c2[4014],_mm_xor_si128(c2[893],_mm_xor_si128(c2[3755],_mm_xor_si128(c2[914],_mm_xor_si128(c2[654],_mm_xor_si128(c2[2994],_mm_xor_si128(c2[426],_mm_xor_si128(c2[166],_mm_xor_si128(c2[1725],_mm_xor_si128(c2[1465],_mm_xor_si128(c2[1986],_mm_xor_si128(c2[3833],_mm_xor_si128(c2[3573],_mm_xor_si128(c2[4092],_mm_xor_si128(c2[3571],_mm_xor_si128(c2[3079],_mm_xor_si128(c2[2819],_mm_xor_si128(c2[214],_mm_xor_si128(c2[3077],_mm_xor_si128(c2[2845],_mm_xor_si128(c2[2585],_mm_xor_si128(c2[503],_mm_xor_si128(c2[243],c2[3361]))))))))))))))))))))))))))))))))))))))))));
+     d2[507]=simde_mm_xor_si128(c2[3126],simde_mm_xor_si128(c2[2866],simde_mm_xor_si128(c2[3911],simde_mm_xor_si128(c2[3651],simde_mm_xor_si128(c2[2605],simde_mm_xor_si128(c2[1311],simde_mm_xor_si128(c2[38],simde_mm_xor_si128(c2[3937],simde_mm_xor_si128(c2[3933],simde_mm_xor_si128(c2[26],simde_mm_xor_si128(c2[3440],simde_mm_xor_si128(c2[3180],simde_mm_xor_si128(c2[3172],simde_mm_xor_si128(c2[606],simde_mm_xor_si128(c2[346],simde_mm_xor_si128(c2[2170],simde_mm_xor_si128(c2[1910],simde_mm_xor_si128(c2[2938],simde_mm_xor_si128(c2[115],simde_mm_xor_si128(c2[4014],simde_mm_xor_si128(c2[893],simde_mm_xor_si128(c2[3755],simde_mm_xor_si128(c2[914],simde_mm_xor_si128(c2[654],simde_mm_xor_si128(c2[2994],simde_mm_xor_si128(c2[426],simde_mm_xor_si128(c2[166],simde_mm_xor_si128(c2[1725],simde_mm_xor_si128(c2[1465],simde_mm_xor_si128(c2[1986],simde_mm_xor_si128(c2[3833],simde_mm_xor_si128(c2[3573],simde_mm_xor_si128(c2[4092],simde_mm_xor_si128(c2[3571],simde_mm_xor_si128(c2[3079],simde_mm_xor_si128(c2[2819],simde_mm_xor_si128(c2[214],simde_mm_xor_si128(c2[3077],simde_mm_xor_si128(c2[2845],simde_mm_xor_si128(c2[2585],simde_mm_xor_si128(c2[503],simde_mm_xor_si128(c2[243],c2[3361]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 40
-     d2[520]=_mm_xor_si128(c2[266],_mm_xor_si128(c2[267],_mm_xor_si128(c2[1051],_mm_xor_si128(c2[1052],_mm_xor_si128(c2[5],_mm_xor_si128(c2[6],_mm_xor_si128(c2[1337],_mm_xor_si128(c2[1338],_mm_xor_si128(c2[1333],_mm_xor_si128(c2[1334],_mm_xor_si128(c2[1598],_mm_xor_si128(c2[1846],_mm_xor_si128(c2[1586],_mm_xor_si128(c2[580],_mm_xor_si128(c2[581],_mm_xor_si128(c2[572],_mm_xor_si128(c2[833],_mm_xor_si128(c2[573],_mm_xor_si128(c2[324],_mm_xor_si128(c2[1905],_mm_xor_si128(c2[1906],_mm_xor_si128(c2[3469],_mm_xor_si128(c2[3470],_mm_xor_si128(c2[338],_mm_xor_si128(c2[339],_mm_xor_si128(c2[1414],_mm_xor_si128(c2[1415],_mm_xor_si128(c2[2452],_mm_xor_si128(c2[2453],_mm_xor_si128(c2[1155],_mm_xor_si128(c2[1416],_mm_xor_si128(c2[1156],_mm_xor_si128(c2[2213],_mm_xor_si128(c2[2214],_mm_xor_si128(c2[394],_mm_xor_si128(c2[655],_mm_xor_si128(c2[395],_mm_xor_si128(c2[1725],_mm_xor_si128(c2[1726],_mm_xor_si128(c2[3024],_mm_xor_si128(c2[3025],_mm_xor_si128(c2[3545],_mm_xor_si128(c2[3806],_mm_xor_si128(c2[3546],_mm_xor_si128(c2[973],_mm_xor_si128(c2[974],_mm_xor_si128(c2[1492],_mm_xor_si128(c2[1753],_mm_xor_si128(c2[1493],_mm_xor_si128(c2[219],_mm_xor_si128(c2[220],_mm_xor_si128(c2[1773],_mm_xor_si128(c2[1774],_mm_xor_si128(c2[477],_mm_xor_si128(c2[738],_mm_xor_si128(c2[478],_mm_xor_si128(c2[4144],_mm_xor_si128(c2[4145],_mm_xor_si128(c2[1802],_mm_xor_si128(c2[1803],_mm_xor_si128(c2[761],_mm_xor_si128(c2[1022],c2[762]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[520]=simde_mm_xor_si128(c2[266],simde_mm_xor_si128(c2[267],simde_mm_xor_si128(c2[1051],simde_mm_xor_si128(c2[1052],simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[6],simde_mm_xor_si128(c2[1337],simde_mm_xor_si128(c2[1338],simde_mm_xor_si128(c2[1333],simde_mm_xor_si128(c2[1334],simde_mm_xor_si128(c2[1598],simde_mm_xor_si128(c2[1846],simde_mm_xor_si128(c2[1586],simde_mm_xor_si128(c2[580],simde_mm_xor_si128(c2[581],simde_mm_xor_si128(c2[572],simde_mm_xor_si128(c2[833],simde_mm_xor_si128(c2[573],simde_mm_xor_si128(c2[324],simde_mm_xor_si128(c2[1905],simde_mm_xor_si128(c2[1906],simde_mm_xor_si128(c2[3469],simde_mm_xor_si128(c2[3470],simde_mm_xor_si128(c2[338],simde_mm_xor_si128(c2[339],simde_mm_xor_si128(c2[1414],simde_mm_xor_si128(c2[1415],simde_mm_xor_si128(c2[2452],simde_mm_xor_si128(c2[2453],simde_mm_xor_si128(c2[1155],simde_mm_xor_si128(c2[1416],simde_mm_xor_si128(c2[1156],simde_mm_xor_si128(c2[2213],simde_mm_xor_si128(c2[2214],simde_mm_xor_si128(c2[394],simde_mm_xor_si128(c2[655],simde_mm_xor_si128(c2[395],simde_mm_xor_si128(c2[1725],simde_mm_xor_si128(c2[1726],simde_mm_xor_si128(c2[3024],simde_mm_xor_si128(c2[3025],simde_mm_xor_si128(c2[3545],simde_mm_xor_si128(c2[3806],simde_mm_xor_si128(c2[3546],simde_mm_xor_si128(c2[973],simde_mm_xor_si128(c2[974],simde_mm_xor_si128(c2[1492],simde_mm_xor_si128(c2[1753],simde_mm_xor_si128(c2[1493],simde_mm_xor_si128(c2[219],simde_mm_xor_si128(c2[220],simde_mm_xor_si128(c2[1773],simde_mm_xor_si128(c2[1774],simde_mm_xor_si128(c2[477],simde_mm_xor_si128(c2[738],simde_mm_xor_si128(c2[478],simde_mm_xor_si128(c2[4144],simde_mm_xor_si128(c2[4145],simde_mm_xor_si128(c2[1802],simde_mm_xor_si128(c2[1803],simde_mm_xor_si128(c2[761],simde_mm_xor_si128(c2[1022],c2[762]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 41
-     d2[533]=_mm_xor_si128(c2[1566],_mm_xor_si128(c2[1306],_mm_xor_si128(c2[2091],_mm_xor_si128(c2[1045],_mm_xor_si128(c2[2637],_mm_xor_si128(c2[2377],_mm_xor_si128(c2[2373],_mm_xor_si128(c2[2638],_mm_xor_si128(c2[3937],_mm_xor_si128(c2[1880],_mm_xor_si128(c2[1620],_mm_xor_si128(c2[1612],_mm_xor_si128(c2[3205],_mm_xor_si128(c2[2945],_mm_xor_si128(c2[350],_mm_xor_si128(c2[1378],_mm_xor_si128(c2[2454],_mm_xor_si128(c2[3492],_mm_xor_si128(c2[2195],_mm_xor_si128(c2[3253],_mm_xor_si128(c2[1434],_mm_xor_si128(c2[1442],_mm_xor_si128(c2[3025],_mm_xor_si128(c2[2765],_mm_xor_si128(c2[4064],_mm_xor_si128(c2[426],_mm_xor_si128(c2[2013],_mm_xor_si128(c2[2532],_mm_xor_si128(c2[1259],_mm_xor_si128(c2[2813],_mm_xor_si128(c2[1517],_mm_xor_si128(c2[1285],_mm_xor_si128(c2[1025],_mm_xor_si128(c2[2842],c2[1801]))))))))))))))))))))))))))))))))));
+     d2[533]=simde_mm_xor_si128(c2[1566],simde_mm_xor_si128(c2[1306],simde_mm_xor_si128(c2[2091],simde_mm_xor_si128(c2[1045],simde_mm_xor_si128(c2[2637],simde_mm_xor_si128(c2[2377],simde_mm_xor_si128(c2[2373],simde_mm_xor_si128(c2[2638],simde_mm_xor_si128(c2[3937],simde_mm_xor_si128(c2[1880],simde_mm_xor_si128(c2[1620],simde_mm_xor_si128(c2[1612],simde_mm_xor_si128(c2[3205],simde_mm_xor_si128(c2[2945],simde_mm_xor_si128(c2[350],simde_mm_xor_si128(c2[1378],simde_mm_xor_si128(c2[2454],simde_mm_xor_si128(c2[3492],simde_mm_xor_si128(c2[2195],simde_mm_xor_si128(c2[3253],simde_mm_xor_si128(c2[1434],simde_mm_xor_si128(c2[1442],simde_mm_xor_si128(c2[3025],simde_mm_xor_si128(c2[2765],simde_mm_xor_si128(c2[4064],simde_mm_xor_si128(c2[426],simde_mm_xor_si128(c2[2013],simde_mm_xor_si128(c2[2532],simde_mm_xor_si128(c2[1259],simde_mm_xor_si128(c2[2813],simde_mm_xor_si128(c2[1517],simde_mm_xor_si128(c2[1285],simde_mm_xor_si128(c2[1025],simde_mm_xor_si128(c2[2842],c2[1801]))))))))))))))))))))))))))))))))));
   }
 }
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc224_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc224_byte.c
index 4938de2b2a4763dac8d2c3df3bf529be318a8495..66438cb893769606e34490f3e04345fe836f8db3 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc224_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc224_byte.c
@@ -1,9 +1,10 @@
+#ifdef __AVX2__
 #include "PHY/sse_intrin.h"
 // generated code for Zc=224, byte encoding
 static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG2_Zc224_byte(uint8_t *c,uint8_t *d) {
-  __m256i *csimd=(__m256i *)c,*dsimd=(__m256i *)d;
+  simde__m256i *csimd=(simde__m256i *)c,*dsimd=(simde__m256i *)d;
 
-  __m256i *c2,*d2;
+  simde__m256i *c2,*d2;
 
   int i2;
   for (i2=0; i2<7; i2++) {
@@ -137,3 +138,4 @@ static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG
      d2[287]=simde_mm256_xor_si256(c2[3504],simde_mm256_xor_si256(c2[3644],simde_mm256_xor_si256(c2[984],simde_mm256_xor_si256(c2[1400],simde_mm256_xor_si256(c2[4359],simde_mm256_xor_si256(c2[20],simde_mm256_xor_si256(c2[2960],simde_mm256_xor_si256(c2[1555],simde_mm256_xor_si256(c2[3377],simde_mm256_xor_si256(c2[1151],simde_mm256_xor_si256(c2[1291],simde_mm256_xor_si256(c2[3390],simde_mm256_xor_si256(c2[883],simde_mm256_xor_si256(c2[1023],simde_mm256_xor_si256(c2[2002],simde_mm256_xor_si256(c2[46],simde_mm256_xor_si256(c2[762],simde_mm256_xor_si256(c2[2437],simde_mm256_xor_si256(c2[3139],simde_mm256_xor_si256(c2[2030],simde_mm256_xor_si256(c2[2036],simde_mm256_xor_si256(c2[773],simde_mm256_xor_si256(c2[2329],simde_mm256_xor_si256(c2[2469],simde_mm256_xor_si256(c2[790],simde_mm256_xor_si256(c2[2050],simde_mm256_xor_si256(c2[3739],simde_mm256_xor_si256(c2[104],simde_mm256_xor_si256(c2[2215],simde_mm256_xor_si256(c2[1937],simde_mm256_xor_si256(c2[2072],simde_mm256_xor_si256(c2[3628],simde_mm256_xor_si256(c2[3768],simde_mm256_xor_si256(c2[2509],c2[3352]))))))))))))))))))))))))))))))))));
   }
 }
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc224_byte_128.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc224_byte_128.c
new file mode 100644
index 0000000000000000000000000000000000000000..f006013fd357970a6652debead7d3cccd0a1bdb7
--- /dev/null
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc224_byte_128.c
@@ -0,0 +1,141 @@
+#ifndef __AVX2__
+#include "PHY/sse_intrin.h"
+// generated code for Zc=224, byte encoding
+static inline void ldpc_BG2_Zc224_byte(uint8_t *c,uint8_t *d) {
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
+
+  simde__m128i *c2,*d2;
+
+  int i2;
+  for (i2=0; i2<14; i2++) {
+     c2=&csimd[i2];
+     d2=&dsimd[i2];
+
+//row: 0
+     d2[0]=simde_mm_xor_si128(c2[2244],simde_mm_xor_si128(c2[1403],simde_mm_xor_si128(c2[2249],simde_mm_xor_si128(c2[3954],simde_mm_xor_si128(c2[876],simde_mm_xor_si128(c2[2559],simde_mm_xor_si128(c2[2017],simde_mm_xor_si128(c2[1736],simde_mm_xor_si128(c2[1495],simde_mm_xor_si128(c2[3453],simde_mm_xor_si128(c2[4006],simde_mm_xor_si128(c2[959],simde_mm_xor_si128(c2[4323],simde_mm_xor_si128(c2[1234],simde_mm_xor_si128(c2[3509],simde_mm_xor_si128(c2[3507],simde_mm_xor_si128(c2[4373],simde_mm_xor_si128(c2[1015],simde_mm_xor_si128(c2[3535],simde_mm_xor_si128(c2[2448],simde_mm_xor_si128(c2[4122],simde_mm_xor_si128(c2[3865],simde_mm_xor_si128(c2[3309],simde_mm_xor_si128(c2[3593],simde_mm_xor_si128(c2[2492],simde_mm_xor_si128(c2[4453],c2[1660]))))))))))))))))))))))))));
+
+//row: 1
+     d2[14]=simde_mm_xor_si128(c2[2244],simde_mm_xor_si128(c2[2524],simde_mm_xor_si128(c2[1683],simde_mm_xor_si128(c2[2529],simde_mm_xor_si128(c2[3954],simde_mm_xor_si128(c2[4234],simde_mm_xor_si128(c2[1156],simde_mm_xor_si128(c2[2839],simde_mm_xor_si128(c2[2017],simde_mm_xor_si128(c2[2297],simde_mm_xor_si128(c2[2016],simde_mm_xor_si128(c2[1495],simde_mm_xor_si128(c2[1775],simde_mm_xor_si128(c2[3733],simde_mm_xor_si128(c2[4286],simde_mm_xor_si128(c2[1239],simde_mm_xor_si128(c2[124],simde_mm_xor_si128(c2[1514],simde_mm_xor_si128(c2[3789],simde_mm_xor_si128(c2[3787],simde_mm_xor_si128(c2[4373],simde_mm_xor_si128(c2[174],simde_mm_xor_si128(c2[1295],simde_mm_xor_si128(c2[3815],simde_mm_xor_si128(c2[2728],simde_mm_xor_si128(c2[4402],simde_mm_xor_si128(c2[4145],simde_mm_xor_si128(c2[3589],simde_mm_xor_si128(c2[3873],simde_mm_xor_si128(c2[2492],simde_mm_xor_si128(c2[2772],simde_mm_xor_si128(c2[254],c2[1940]))))))))))))))))))))))))))))))));
+
+//row: 2
+     d2[28]=simde_mm_xor_si128(c2[2244],simde_mm_xor_si128(c2[2524],simde_mm_xor_si128(c2[1403],simde_mm_xor_si128(c2[1683],simde_mm_xor_si128(c2[2529],simde_mm_xor_si128(c2[3954],simde_mm_xor_si128(c2[4234],simde_mm_xor_si128(c2[1156],simde_mm_xor_si128(c2[2839],simde_mm_xor_si128(c2[2017],simde_mm_xor_si128(c2[2297],simde_mm_xor_si128(c2[2016],simde_mm_xor_si128(c2[1495],simde_mm_xor_si128(c2[1775],simde_mm_xor_si128(c2[3453],simde_mm_xor_si128(c2[3733],simde_mm_xor_si128(c2[4286],simde_mm_xor_si128(c2[959],simde_mm_xor_si128(c2[1239],simde_mm_xor_si128(c2[124],simde_mm_xor_si128(c2[1514],simde_mm_xor_si128(c2[3509],simde_mm_xor_si128(c2[3789],simde_mm_xor_si128(c2[3787],simde_mm_xor_si128(c2[4373],simde_mm_xor_si128(c2[174],simde_mm_xor_si128(c2[1015],simde_mm_xor_si128(c2[1295],simde_mm_xor_si128(c2[3815],simde_mm_xor_si128(c2[2448],simde_mm_xor_si128(c2[2728],simde_mm_xor_si128(c2[4402],simde_mm_xor_si128(c2[3865],simde_mm_xor_si128(c2[4145],simde_mm_xor_si128(c2[3589],simde_mm_xor_si128(c2[3873],simde_mm_xor_si128(c2[2492],simde_mm_xor_si128(c2[2772],simde_mm_xor_si128(c2[4453],simde_mm_xor_si128(c2[254],c2[1940]))))))))))))))))))))))))))))))))))))))));
+
+//row: 3
+     d2[42]=simde_mm_xor_si128(c2[2524],simde_mm_xor_si128(c2[1683],simde_mm_xor_si128(c2[2529],simde_mm_xor_si128(c2[4234],simde_mm_xor_si128(c2[1156],simde_mm_xor_si128(c2[2559],simde_mm_xor_si128(c2[2839],simde_mm_xor_si128(c2[2297],simde_mm_xor_si128(c2[1736],simde_mm_xor_si128(c2[2016],simde_mm_xor_si128(c2[1775],simde_mm_xor_si128(c2[3733],simde_mm_xor_si128(c2[4286],simde_mm_xor_si128(c2[1239],simde_mm_xor_si128(c2[124],simde_mm_xor_si128(c2[1234],simde_mm_xor_si128(c2[1514],simde_mm_xor_si128(c2[3789],simde_mm_xor_si128(c2[3507],simde_mm_xor_si128(c2[3787],simde_mm_xor_si128(c2[174],simde_mm_xor_si128(c2[1295],simde_mm_xor_si128(c2[3535],simde_mm_xor_si128(c2[3815],simde_mm_xor_si128(c2[2728],simde_mm_xor_si128(c2[4122],simde_mm_xor_si128(c2[4402],simde_mm_xor_si128(c2[4145],simde_mm_xor_si128(c2[3589],simde_mm_xor_si128(c2[3593],simde_mm_xor_si128(c2[3873],simde_mm_xor_si128(c2[2772],simde_mm_xor_si128(c2[254],simde_mm_xor_si128(c2[1660],c2[1940]))))))))))))))))))))))))))))))))));
+
+//row: 4
+     d2[56]=simde_mm_xor_si128(c2[3650],simde_mm_xor_si128(c2[3930],simde_mm_xor_si128(c2[3089],simde_mm_xor_si128(c2[3921],simde_mm_xor_si128(c2[2252],simde_mm_xor_si128(c2[881],simde_mm_xor_si128(c2[1161],simde_mm_xor_si128(c2[2548],simde_mm_xor_si128(c2[4231],simde_mm_xor_si128(c2[29],simde_mm_xor_si128(c2[3423],simde_mm_xor_si128(c2[3703],simde_mm_xor_si128(c2[3422],simde_mm_xor_si128(c2[2887],simde_mm_xor_si128(c2[3167],simde_mm_xor_si128(c2[646],simde_mm_xor_si128(c2[1213],simde_mm_xor_si128(c2[2645],simde_mm_xor_si128(c2[1516],simde_mm_xor_si128(c2[2920],simde_mm_xor_si128(c2[702],simde_mm_xor_si128(c2[700],simde_mm_xor_si128(c2[1300],simde_mm_xor_si128(c2[1580],simde_mm_xor_si128(c2[2701],simde_mm_xor_si128(c2[728],simde_mm_xor_si128(c2[4120],simde_mm_xor_si128(c2[1329],simde_mm_xor_si128(c2[1072],simde_mm_xor_si128(c2[516],simde_mm_xor_si128(c2[786],simde_mm_xor_si128(c2[3898],simde_mm_xor_si128(c2[4178],simde_mm_xor_si128(c2[1660],c2[3332]))))))))))))))))))))))))))))))))));
+
+//row: 5
+     d2[70]=simde_mm_xor_si128(c2[2522],simde_mm_xor_si128(c2[2802],simde_mm_xor_si128(c2[1961],simde_mm_xor_si128(c2[2807],simde_mm_xor_si128(c2[2531],simde_mm_xor_si128(c2[4232],simde_mm_xor_si128(c2[33],simde_mm_xor_si128(c2[1434],simde_mm_xor_si128(c2[3117],simde_mm_xor_si128(c2[2836],simde_mm_xor_si128(c2[2309],simde_mm_xor_si128(c2[2589],simde_mm_xor_si128(c2[2308],simde_mm_xor_si128(c2[1773],simde_mm_xor_si128(c2[2053],simde_mm_xor_si128(c2[4011],simde_mm_xor_si128(c2[85],simde_mm_xor_si128(c2[1517],simde_mm_xor_si128(c2[402],simde_mm_xor_si128(c2[1792],simde_mm_xor_si128(c2[4067],simde_mm_xor_si128(c2[4065],simde_mm_xor_si128(c2[2950],simde_mm_xor_si128(c2[172],simde_mm_xor_si128(c2[452],simde_mm_xor_si128(c2[1573],simde_mm_xor_si128(c2[4093],simde_mm_xor_si128(c2[3006],simde_mm_xor_si128(c2[201],simde_mm_xor_si128(c2[3289],simde_mm_xor_si128(c2[4437],simde_mm_xor_si128(c2[3867],simde_mm_xor_si128(c2[4151],simde_mm_xor_si128(c2[2784],simde_mm_xor_si128(c2[3064],simde_mm_xor_si128(c2[532],c2[2218]))))))))))))))))))))))))))))))))))));
+
+//row: 6
+     d2[84]=simde_mm_xor_si128(c2[2523],simde_mm_xor_si128(c2[2803],simde_mm_xor_si128(c2[1962],simde_mm_xor_si128(c2[2808],simde_mm_xor_si128(c2[3087],simde_mm_xor_si128(c2[4233],simde_mm_xor_si128(c2[34],simde_mm_xor_si128(c2[1435],simde_mm_xor_si128(c2[3118],simde_mm_xor_si128(c2[2296],simde_mm_xor_si128(c2[2576],simde_mm_xor_si128(c2[2309],simde_mm_xor_si128(c2[1774],simde_mm_xor_si128(c2[2054],simde_mm_xor_si128(c2[4012],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[1518],simde_mm_xor_si128(c2[403],simde_mm_xor_si128(c2[1793],simde_mm_xor_si128(c2[4068],simde_mm_xor_si128(c2[4066],simde_mm_xor_si128(c2[2103],simde_mm_xor_si128(c2[173],simde_mm_xor_si128(c2[453],simde_mm_xor_si128(c2[1574],simde_mm_xor_si128(c2[4094],simde_mm_xor_si128(c2[3007],simde_mm_xor_si128(c2[202],simde_mm_xor_si128(c2[4397],simde_mm_xor_si128(c2[4424],simde_mm_xor_si128(c2[3868],simde_mm_xor_si128(c2[4152],simde_mm_xor_si128(c2[2785],simde_mm_xor_si128(c2[3065],simde_mm_xor_si128(c2[533],simde_mm_xor_si128(c2[2219],c2[4185]))))))))))))))))))))))))))))))))))));
+
+//row: 7
+     d2[98]=simde_mm_xor_si128(c2[3933],simde_mm_xor_si128(c2[4213],simde_mm_xor_si128(c2[3921],simde_mm_xor_si128(c2[3372],simde_mm_xor_si128(c2[3080],simde_mm_xor_si128(c2[4204],simde_mm_xor_si128(c2[3926],simde_mm_xor_si128(c2[1150],simde_mm_xor_si128(c2[1430],simde_mm_xor_si128(c2[1152],simde_mm_xor_si128(c2[2831],simde_mm_xor_si128(c2[2553],simde_mm_xor_si128(c2[35],simde_mm_xor_si128(c2[3956],simde_mm_xor_si128(c2[4236],simde_mm_xor_si128(c2[1994],simde_mm_xor_si128(c2[3706],simde_mm_xor_si128(c2[3986],simde_mm_xor_si128(c2[3708],simde_mm_xor_si128(c2[3705],simde_mm_xor_si128(c2[3147],simde_mm_xor_si128(c2[3427],simde_mm_xor_si128(c2[3170],simde_mm_xor_si128(c2[3450],simde_mm_xor_si128(c2[3172],simde_mm_xor_si128(c2[929],simde_mm_xor_si128(c2[651],simde_mm_xor_si128(c2[1496],simde_mm_xor_si128(c2[1204],simde_mm_xor_si128(c2[2914],simde_mm_xor_si128(c2[2636],simde_mm_xor_si128(c2[1799],simde_mm_xor_si128(c2[1521],simde_mm_xor_si128(c2[3203],simde_mm_xor_si128(c2[2645],simde_mm_xor_si128(c2[2925],simde_mm_xor_si128(c2[985],simde_mm_xor_si128(c2[707],simde_mm_xor_si128(c2[983],simde_mm_xor_si128(c2[425],simde_mm_xor_si128(c2[705],simde_mm_xor_si128(c2[3780],simde_mm_xor_si128(c2[1569],simde_mm_xor_si128(c2[1849],simde_mm_xor_si128(c2[1571],simde_mm_xor_si128(c2[2970],simde_mm_xor_si128(c2[2692],simde_mm_xor_si128(c2[1011],simde_mm_xor_si128(c2[453],simde_mm_xor_si128(c2[733],simde_mm_xor_si128(c2[4403],simde_mm_xor_si128(c2[4125],simde_mm_xor_si128(c2[1598],simde_mm_xor_si128(c2[1040],simde_mm_xor_si128(c2[1320],simde_mm_xor_si128(c2[2722],simde_mm_xor_si128(c2[1355],simde_mm_xor_si128(c2[1077],simde_mm_xor_si128(c2[785],simde_mm_xor_si128(c2[507],simde_mm_xor_si128(c2[1069],simde_mm_xor_si128(c2[511],simde_mm_xor_si128(c2[791],simde_mm_xor_si128(c2[4181],simde_mm_xor_si128(c2[4461],simde_mm_xor_si128(c2[4183],simde_mm_xor_si128(c2[1943],simde_mm_xor_si128(c2[1665],simde_mm_xor_si128(c2[3615],simde_mm_xor_si128(c2[3057],c2[3337]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 8
+     d2[112]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[280],simde_mm_xor_si128(c2[3652],simde_mm_xor_si128(c2[3932],simde_mm_xor_si128(c2[285],simde_mm_xor_si128(c2[849],simde_mm_xor_si128(c2[1710],simde_mm_xor_si128(c2[1990],simde_mm_xor_si128(c2[3391],simde_mm_xor_si128(c2[595],simde_mm_xor_si128(c2[3110],simde_mm_xor_si128(c2[4266],simde_mm_xor_si128(c2[67],simde_mm_xor_si128(c2[4265],simde_mm_xor_si128(c2[3730],simde_mm_xor_si128(c2[4010],simde_mm_xor_si128(c2[1209],simde_mm_xor_si128(c2[1489],simde_mm_xor_si128(c2[2056],simde_mm_xor_si128(c2[3194],simde_mm_xor_si128(c2[3474],simde_mm_xor_si128(c2[2359],simde_mm_xor_si128(c2[3763],simde_mm_xor_si128(c2[1265],simde_mm_xor_si128(c2[1545],simde_mm_xor_si128(c2[1543],simde_mm_xor_si128(c2[2129],simde_mm_xor_si128(c2[2409],simde_mm_xor_si128(c2[3250],simde_mm_xor_si128(c2[3530],simde_mm_xor_si128(c2[1571],simde_mm_xor_si128(c2[204],simde_mm_xor_si128(c2[484],simde_mm_xor_si128(c2[2158],simde_mm_xor_si128(c2[1635],simde_mm_xor_si128(c2[1915],simde_mm_xor_si128(c2[1345],simde_mm_xor_si128(c2[1629],simde_mm_xor_si128(c2[262],simde_mm_xor_si128(c2[542],simde_mm_xor_si128(c2[2223],simde_mm_xor_si128(c2[2503],c2[4175]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 9
+     d2[126]=simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[3089],simde_mm_xor_si128(c2[3369],simde_mm_xor_si128(c2[3640],simde_mm_xor_si128(c2[2528],simde_mm_xor_si128(c2[7],simde_mm_xor_si128(c2[3360],simde_mm_xor_si128(c2[1712],simde_mm_xor_si128(c2[320],simde_mm_xor_si128(c2[600],simde_mm_xor_si128(c2[3113],simde_mm_xor_si128(c2[2001],simde_mm_xor_si128(c2[317],simde_mm_xor_si128(c2[3670],simde_mm_xor_si128(c2[588],simde_mm_xor_si128(c2[4268],simde_mm_xor_si128(c2[2862],simde_mm_xor_si128(c2[3142],simde_mm_xor_si128(c2[3987],simde_mm_xor_si128(c2[2861],simde_mm_xor_si128(c2[3732],simde_mm_xor_si128(c2[2326],simde_mm_xor_si128(c2[2606],simde_mm_xor_si128(c2[1211],simde_mm_xor_si128(c2[85],simde_mm_xor_si128(c2[1764],simde_mm_xor_si128(c2[652],simde_mm_xor_si128(c2[3196],simde_mm_xor_si128(c2[2084],simde_mm_xor_si128(c2[2081],simde_mm_xor_si128(c2[955],simde_mm_xor_si128(c2[3485],simde_mm_xor_si128(c2[2359],simde_mm_xor_si128(c2[1267],simde_mm_xor_si128(c2[141],simde_mm_xor_si128(c2[1265],simde_mm_xor_si128(c2[153],simde_mm_xor_si128(c2[2131],simde_mm_xor_si128(c2[739],simde_mm_xor_si128(c2[1019],simde_mm_xor_si128(c2[3252],simde_mm_xor_si128(c2[2140],simde_mm_xor_si128(c2[1293],simde_mm_xor_si128(c2[181],simde_mm_xor_si128(c2[206],simde_mm_xor_si128(c2[3559],simde_mm_xor_si128(c2[1880],simde_mm_xor_si128(c2[768],simde_mm_xor_si128(c2[1637],simde_mm_xor_si128(c2[511],simde_mm_xor_si128(c2[1067],simde_mm_xor_si128(c2[4434],simde_mm_xor_si128(c2[1351],simde_mm_xor_si128(c2[225],simde_mm_xor_si128(c2[4145],simde_mm_xor_si128(c2[264],simde_mm_xor_si128(c2[3337],simde_mm_xor_si128(c2[3617],simde_mm_xor_si128(c2[2225],simde_mm_xor_si128(c2[1099],simde_mm_xor_si128(c2[3897],c2[2785])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 10
+     d2[140]=simde_mm_xor_si128(c2[3930],simde_mm_xor_si128(c2[1717],simde_mm_xor_si128(c2[2408],c2[2439])));
+
+//row: 11
+     d2[154]=simde_mm_xor_si128(c2[3370],simde_mm_xor_si128(c2[2529],simde_mm_xor_si128(c2[3361],simde_mm_xor_si128(c2[846],simde_mm_xor_si128(c2[601],simde_mm_xor_si128(c2[1988],simde_mm_xor_si128(c2[3391],simde_mm_xor_si128(c2[3671],simde_mm_xor_si128(c2[3143],simde_mm_xor_si128(c2[2582],simde_mm_xor_si128(c2[2862],simde_mm_xor_si128(c2[2607],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[653],simde_mm_xor_si128(c2[2085],simde_mm_xor_si128(c2[956],simde_mm_xor_si128(c2[2080],simde_mm_xor_si128(c2[2360],simde_mm_xor_si128(c2[142],simde_mm_xor_si128(c2[4353],simde_mm_xor_si128(c2[140],simde_mm_xor_si128(c2[1020],simde_mm_xor_si128(c2[2141],simde_mm_xor_si128(c2[4381],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[3560],simde_mm_xor_si128(c2[489],simde_mm_xor_si128(c2[769],simde_mm_xor_si128(c2[3004],simde_mm_xor_si128(c2[512],simde_mm_xor_si128(c2[4435],simde_mm_xor_si128(c2[4425],simde_mm_xor_si128(c2[226],simde_mm_xor_si128(c2[3618],simde_mm_xor_si128(c2[1100],simde_mm_xor_si128(c2[2492],simde_mm_xor_si128(c2[2772],c2[4178])))))))))))))))))))))))))))))))))))));
+
+//row: 12
+     d2[168]=simde_mm_xor_si128(c2[1411],simde_mm_xor_si128(c2[1691],simde_mm_xor_si128(c2[850],simde_mm_xor_si128(c2[1682],simde_mm_xor_si128(c2[3121],simde_mm_xor_si128(c2[3401],simde_mm_xor_si128(c2[309],simde_mm_xor_si128(c2[1992],simde_mm_xor_si128(c2[3950],simde_mm_xor_si128(c2[1184],simde_mm_xor_si128(c2[1464],simde_mm_xor_si128(c2[1183],simde_mm_xor_si128(c2[648],simde_mm_xor_si128(c2[928],simde_mm_xor_si128(c2[2886],simde_mm_xor_si128(c2[3453],simde_mm_xor_si128(c2[2617],simde_mm_xor_si128(c2[392],simde_mm_xor_si128(c2[3756],simde_mm_xor_si128(c2[681],simde_mm_xor_si128(c2[2942],simde_mm_xor_si128(c2[2940],simde_mm_xor_si128(c2[3540],simde_mm_xor_si128(c2[3820],simde_mm_xor_si128(c2[448],simde_mm_xor_si128(c2[2968],simde_mm_xor_si128(c2[1881],simde_mm_xor_si128(c2[3569],simde_mm_xor_si128(c2[3312],simde_mm_xor_si128(c2[2756],simde_mm_xor_si128(c2[3026],simde_mm_xor_si128(c2[1659],simde_mm_xor_si128(c2[1939],simde_mm_xor_si128(c2[3900],c2[1093]))))))))))))))))))))))))))))))))));
+
+//row: 13
+     d2[182]=simde_mm_xor_si128(c2[1413],simde_mm_xor_si128(c2[572],simde_mm_xor_si128(c2[1404],simde_mm_xor_si128(c2[1402],simde_mm_xor_si128(c2[3109],simde_mm_xor_si128(c2[31],simde_mm_xor_si128(c2[1434],simde_mm_xor_si128(c2[1714],simde_mm_xor_si128(c2[315],simde_mm_xor_si128(c2[1186],simde_mm_xor_si128(c2[625],simde_mm_xor_si128(c2[905],simde_mm_xor_si128(c2[650],simde_mm_xor_si128(c2[2608],simde_mm_xor_si128(c2[3175],simde_mm_xor_si128(c2[114],simde_mm_xor_si128(c2[3478],simde_mm_xor_si128(c2[123],simde_mm_xor_si128(c2[403],simde_mm_xor_si128(c2[2664],simde_mm_xor_si128(c2[2382],simde_mm_xor_si128(c2[2662],simde_mm_xor_si128(c2[3528],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[2410],simde_mm_xor_si128(c2[2690],simde_mm_xor_si128(c2[1603],simde_mm_xor_si128(c2[2997],simde_mm_xor_si128(c2[3277],simde_mm_xor_si128(c2[3034],simde_mm_xor_si128(c2[2464],simde_mm_xor_si128(c2[2468],simde_mm_xor_si128(c2[2748],simde_mm_xor_si128(c2[4432],simde_mm_xor_si128(c2[1661],simde_mm_xor_si128(c2[3622],simde_mm_xor_si128(c2[535],c2[815])))))))))))))))))))))))))))))))))))));
+
+//row: 14
+     d2[196]=simde_mm_xor_si128(c2[7],simde_mm_xor_si128(c2[287],simde_mm_xor_si128(c2[3651],simde_mm_xor_si128(c2[3925],simde_mm_xor_si128(c2[2810],simde_mm_xor_si128(c2[292],simde_mm_xor_si128(c2[3642],simde_mm_xor_si128(c2[1717],simde_mm_xor_si128(c2[1997],simde_mm_xor_si128(c2[868],simde_mm_xor_si128(c2[3398],simde_mm_xor_si128(c2[2269],simde_mm_xor_si128(c2[588],simde_mm_xor_si128(c2[3672],simde_mm_xor_si128(c2[3952],simde_mm_xor_si128(c2[1150],simde_mm_xor_si128(c2[4259],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[3424],simde_mm_xor_si128(c2[4258],simde_mm_xor_si128(c2[2863],simde_mm_xor_si128(c2[3143],simde_mm_xor_si128(c2[3737],simde_mm_xor_si128(c2[4017],simde_mm_xor_si128(c2[2888],simde_mm_xor_si128(c2[1496],simde_mm_xor_si128(c2[367],simde_mm_xor_si128(c2[2049],simde_mm_xor_si128(c2[934],simde_mm_xor_si128(c2[3481],simde_mm_xor_si128(c2[2352],simde_mm_xor_si128(c2[2352],simde_mm_xor_si128(c2[1237],simde_mm_xor_si128(c2[3756],simde_mm_xor_si128(c2[2361],simde_mm_xor_si128(c2[2641],simde_mm_xor_si128(c2[1552],simde_mm_xor_si128(c2[423],simde_mm_xor_si128(c2[1550],simde_mm_xor_si128(c2[141],simde_mm_xor_si128(c2[421],simde_mm_xor_si128(c2[2136],simde_mm_xor_si128(c2[2416],simde_mm_xor_si128(c2[1301],simde_mm_xor_si128(c2[3537],simde_mm_xor_si128(c2[2408],simde_mm_xor_si128(c2[1578],simde_mm_xor_si128(c2[169],simde_mm_xor_si128(c2[449],simde_mm_xor_si128(c2[4373],simde_mm_xor_si128(c2[477],simde_mm_xor_si128(c2[3841],simde_mm_xor_si128(c2[2165],simde_mm_xor_si128(c2[756],simde_mm_xor_si128(c2[1036],simde_mm_xor_si128(c2[1908],simde_mm_xor_si128(c2[793],simde_mm_xor_si128(c2[1352],simde_mm_xor_si128(c2[237],simde_mm_xor_si128(c2[1636],simde_mm_xor_si128(c2[227],simde_mm_xor_si128(c2[507],simde_mm_xor_si128(c2[255],simde_mm_xor_si128(c2[535],simde_mm_xor_si128(c2[3899],simde_mm_xor_si128(c2[2496],simde_mm_xor_si128(c2[1381],simde_mm_xor_si128(c2[4182],simde_mm_xor_si128(c2[2773],c2[3053])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 15
+     d2[210]=simde_mm_xor_si128(c2[3],simde_mm_xor_si128(c2[1691],simde_mm_xor_si128(c2[1971],simde_mm_xor_si128(c2[3641],simde_mm_xor_si128(c2[1130],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[1962],simde_mm_xor_si128(c2[1127],simde_mm_xor_si128(c2[1713],simde_mm_xor_si128(c2[3401],simde_mm_xor_si128(c2[3681],simde_mm_xor_si128(c2[3114],simde_mm_xor_si128(c2[589],simde_mm_xor_si128(c2[318],simde_mm_xor_si128(c2[2272],simde_mm_xor_si128(c2[4269],simde_mm_xor_si128(c2[1464],simde_mm_xor_si128(c2[1744],simde_mm_xor_si128(c2[3988],simde_mm_xor_si128(c2[1463],simde_mm_xor_si128(c2[3733],simde_mm_xor_si128(c2[928],simde_mm_xor_si128(c2[1208],simde_mm_xor_si128(c2[1212],simde_mm_xor_si128(c2[3166],simde_mm_xor_si128(c2[1765],simde_mm_xor_si128(c2[3733],simde_mm_xor_si128(c2[3197],simde_mm_xor_si128(c2[672],simde_mm_xor_si128(c2[2082],simde_mm_xor_si128(c2[4036],simde_mm_xor_si128(c2[3472],simde_mm_xor_si128(c2[961],simde_mm_xor_si128(c2[1268],simde_mm_xor_si128(c2[3222],simde_mm_xor_si128(c2[1266],simde_mm_xor_si128(c2[3220],simde_mm_xor_si128(c2[2132],simde_mm_xor_si128(c2[3820],simde_mm_xor_si128(c2[4100],simde_mm_xor_si128(c2[3253],simde_mm_xor_si128(c2[728],simde_mm_xor_si128(c2[1294],simde_mm_xor_si128(c2[3248],simde_mm_xor_si128(c2[207],simde_mm_xor_si128(c2[2161],simde_mm_xor_si128(c2[1881],simde_mm_xor_si128(c2[3849],simde_mm_xor_si128(c2[1624],simde_mm_xor_si128(c2[3592],simde_mm_xor_si128(c2[1068],simde_mm_xor_si128(c2[3036],simde_mm_xor_si128(c2[1352],simde_mm_xor_si128(c2[3306],simde_mm_xor_si128(c2[265],simde_mm_xor_si128(c2[1939],simde_mm_xor_si128(c2[2219],simde_mm_xor_si128(c2[2212],simde_mm_xor_si128(c2[4180],simde_mm_xor_si128(c2[3898],c2[1373]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 16
+     d2[224]=simde_mm_xor_si128(c2[3932],simde_mm_xor_si128(c2[4212],simde_mm_xor_si128(c2[2530],simde_mm_xor_si128(c2[2810],simde_mm_xor_si128(c2[3371],simde_mm_xor_si128(c2[1689],simde_mm_xor_si128(c2[1969],simde_mm_xor_si128(c2[4203],simde_mm_xor_si128(c2[2801],simde_mm_xor_si128(c2[1149],simde_mm_xor_si128(c2[1429],simde_mm_xor_si128(c2[4240],simde_mm_xor_si128(c2[41],simde_mm_xor_si128(c2[2830],simde_mm_xor_si128(c2[1428],simde_mm_xor_si128(c2[34],simde_mm_xor_si128(c2[3111],simde_mm_xor_si128(c2[3112],simde_mm_xor_si128(c2[3705],simde_mm_xor_si128(c2[3985],simde_mm_xor_si128(c2[2303],simde_mm_xor_si128(c2[2583],simde_mm_xor_si128(c2[3704],simde_mm_xor_si128(c2[2302],simde_mm_xor_si128(c2[3169],simde_mm_xor_si128(c2[3449],simde_mm_xor_si128(c2[1767],simde_mm_xor_si128(c2[2047],simde_mm_xor_si128(c2[928],simde_mm_xor_si128(c2[3725],simde_mm_xor_si128(c2[4005],simde_mm_xor_si128(c2[1495],simde_mm_xor_si128(c2[93],simde_mm_xor_si128(c2[2913],simde_mm_xor_si128(c2[1245],simde_mm_xor_si128(c2[1525],simde_mm_xor_si128(c2[1798],simde_mm_xor_si128(c2[396],simde_mm_xor_si128(c2[3202],simde_mm_xor_si128(c2[1800],simde_mm_xor_si128(c2[984],simde_mm_xor_si128(c2[3781],simde_mm_xor_si128(c2[4061],simde_mm_xor_si128(c2[982],simde_mm_xor_si128(c2[4073],simde_mm_xor_si128(c2[1568],simde_mm_xor_si128(c2[1848],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[460],simde_mm_xor_si128(c2[2969],simde_mm_xor_si128(c2[1301],simde_mm_xor_si128(c2[1581],simde_mm_xor_si128(c2[1010],simde_mm_xor_si128(c2[4101],simde_mm_xor_si128(c2[4402],simde_mm_xor_si128(c2[2720],simde_mm_xor_si128(c2[3000],simde_mm_xor_si128(c2[1597],simde_mm_xor_si128(c2[209],simde_mm_xor_si128(c2[1354],simde_mm_xor_si128(c2[4151],simde_mm_xor_si128(c2[4431],simde_mm_xor_si128(c2[784],simde_mm_xor_si128(c2[3875],simde_mm_xor_si128(c2[1068],simde_mm_xor_si128(c2[4145],simde_mm_xor_si128(c2[4180],simde_mm_xor_si128(c2[4460],simde_mm_xor_si128(c2[2778],simde_mm_xor_si128(c2[3058],simde_mm_xor_si128(c2[1942],simde_mm_xor_si128(c2[260],simde_mm_xor_si128(c2[540],simde_mm_xor_si128(c2[3614],simde_mm_xor_si128(c2[2212],c2[4181])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 17
+     d2[238]=simde_mm_xor_si128(c2[1683],simde_mm_xor_si128(c2[1963],simde_mm_xor_si128(c2[2804],simde_mm_xor_si128(c2[3084],simde_mm_xor_si128(c2[1122],simde_mm_xor_si128(c2[1963],simde_mm_xor_si128(c2[2243],simde_mm_xor_si128(c2[1968],simde_mm_xor_si128(c2[3089],simde_mm_xor_si128(c2[3393],simde_mm_xor_si128(c2[3673],simde_mm_xor_si128(c2[35],simde_mm_xor_si128(c2[315],simde_mm_xor_si128(c2[595],simde_mm_xor_si128(c2[1716],simde_mm_xor_si128(c2[2278],simde_mm_xor_si128(c2[3399],simde_mm_xor_si128(c2[31],simde_mm_xor_si128(c2[1456],simde_mm_xor_si128(c2[1736],simde_mm_xor_si128(c2[2577],simde_mm_xor_si128(c2[2857],simde_mm_xor_si128(c2[1469],simde_mm_xor_si128(c2[2576],simde_mm_xor_si128(c2[934],simde_mm_xor_si128(c2[1214],simde_mm_xor_si128(c2[2055],simde_mm_xor_si128(c2[2335],simde_mm_xor_si128(c2[3172],simde_mm_xor_si128(c2[4013],simde_mm_xor_si128(c2[4293],simde_mm_xor_si128(c2[3725],simde_mm_xor_si128(c2[367],simde_mm_xor_si128(c2[678],simde_mm_xor_si128(c2[1519],simde_mm_xor_si128(c2[1799],simde_mm_xor_si128(c2[4042],simde_mm_xor_si128(c2[684],simde_mm_xor_si128(c2[953],simde_mm_xor_si128(c2[2074],simde_mm_xor_si128(c2[3228],simde_mm_xor_si128(c2[4069],simde_mm_xor_si128(c2[4349],simde_mm_xor_si128(c2[3226],simde_mm_xor_si128(c2[4347],simde_mm_xor_si128(c2[1268],simde_mm_xor_si128(c2[3812],simde_mm_xor_si128(c2[4092],simde_mm_xor_si128(c2[454],simde_mm_xor_si128(c2[734],simde_mm_xor_si128(c2[734],simde_mm_xor_si128(c2[1575],simde_mm_xor_si128(c2[1855],simde_mm_xor_si128(c2[3254],simde_mm_xor_si128(c2[4375],simde_mm_xor_si128(c2[2167],simde_mm_xor_si128(c2[3008],simde_mm_xor_si128(c2[3288],simde_mm_xor_si128(c2[3841],simde_mm_xor_si128(c2[483],simde_mm_xor_si128(c2[3584],simde_mm_xor_si128(c2[4425],simde_mm_xor_si128(c2[226],simde_mm_xor_si128(c2[3028],simde_mm_xor_si128(c2[4149],simde_mm_xor_si128(c2[3312],simde_mm_xor_si128(c2[4433],simde_mm_xor_si128(c2[1945],simde_mm_xor_si128(c2[2225],simde_mm_xor_si128(c2[3052],simde_mm_xor_si128(c2[3332],simde_mm_xor_si128(c2[4172],simde_mm_xor_si128(c2[534],simde_mm_xor_si128(c2[814],simde_mm_xor_si128(c2[1379],c2[2500])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 18
+     d2[252]=simde_mm_xor_si128(c2[1124],simde_mm_xor_si128(c2[169],c2[3565]));
+
+//row: 19
+     d2[266]=simde_mm_xor_si128(c2[3929],simde_mm_xor_si128(c2[3088],simde_mm_xor_si128(c2[3920],simde_mm_xor_si128(c2[842],simde_mm_xor_si128(c2[1160],simde_mm_xor_si128(c2[2561],simde_mm_xor_si128(c2[4230],simde_mm_xor_si128(c2[2836],simde_mm_xor_si128(c2[3702],simde_mm_xor_si128(c2[3421],simde_mm_xor_si128(c2[3166],simde_mm_xor_si128(c2[645],simde_mm_xor_si128(c2[1212],simde_mm_xor_si128(c2[2644],simde_mm_xor_si128(c2[1515],simde_mm_xor_si128(c2[2919],simde_mm_xor_si128(c2[701],simde_mm_xor_si128(c2[713],simde_mm_xor_si128(c2[1579],simde_mm_xor_si128(c2[2700],simde_mm_xor_si128(c2[741],simde_mm_xor_si128(c2[4119],simde_mm_xor_si128(c2[1328],simde_mm_xor_si128(c2[1071],simde_mm_xor_si128(c2[515],simde_mm_xor_si128(c2[785],simde_mm_xor_si128(c2[4177],simde_mm_xor_si128(c2[1659],c2[3345]))))))))))))))))))))))))))));
+
+//row: 20
+     d2[280]=simde_mm_xor_si128(c2[1413],simde_mm_xor_si128(c2[1693],simde_mm_xor_si128(c2[852],simde_mm_xor_si128(c2[1684],simde_mm_xor_si128(c2[3109],simde_mm_xor_si128(c2[3389],simde_mm_xor_si128(c2[311],simde_mm_xor_si128(c2[1994],simde_mm_xor_si128(c2[1708],simde_mm_xor_si128(c2[1186],simde_mm_xor_si128(c2[1466],simde_mm_xor_si128(c2[1185],simde_mm_xor_si128(c2[650],simde_mm_xor_si128(c2[930],simde_mm_xor_si128(c2[2888],simde_mm_xor_si128(c2[3455],simde_mm_xor_si128(c2[394],simde_mm_xor_si128(c2[3758],simde_mm_xor_si128(c2[683],simde_mm_xor_si128(c2[1233],simde_mm_xor_si128(c2[2944],simde_mm_xor_si128(c2[2942],simde_mm_xor_si128(c2[3528],simde_mm_xor_si128(c2[3808],simde_mm_xor_si128(c2[450],simde_mm_xor_si128(c2[2970],simde_mm_xor_si128(c2[1883],simde_mm_xor_si128(c2[3557],simde_mm_xor_si128(c2[3314],simde_mm_xor_si128(c2[2744],simde_mm_xor_si128(c2[3028],simde_mm_xor_si128(c2[1661],simde_mm_xor_si128(c2[1941],simde_mm_xor_si128(c2[3902],c2[1095]))))))))))))))))))))))))))))))))));
+
+//row: 21
+     d2[294]=simde_mm_xor_si128(c2[2809],simde_mm_xor_si128(c2[1968],simde_mm_xor_si128(c2[2800],simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[1441],simde_mm_xor_si128(c2[2830],simde_mm_xor_si128(c2[3110],simde_mm_xor_si128(c2[2582],simde_mm_xor_si128(c2[2021],simde_mm_xor_si128(c2[2301],simde_mm_xor_si128(c2[2046],simde_mm_xor_si128(c2[4004],simde_mm_xor_si128(c2[92],simde_mm_xor_si128(c2[1524],simde_mm_xor_si128(c2[395],simde_mm_xor_si128(c2[1519],simde_mm_xor_si128(c2[1799],simde_mm_xor_si128(c2[4060],simde_mm_xor_si128(c2[3792],simde_mm_xor_si128(c2[4072],simde_mm_xor_si128(c2[459],simde_mm_xor_si128(c2[1580],simde_mm_xor_si128(c2[3820],simde_mm_xor_si128(c2[4100],simde_mm_xor_si128(c2[2999],simde_mm_xor_si128(c2[4407],simde_mm_xor_si128(c2[208],simde_mm_xor_si128(c2[4430],simde_mm_xor_si128(c2[3874],simde_mm_xor_si128(c2[3864],simde_mm_xor_si128(c2[4144],simde_mm_xor_si128(c2[3306],simde_mm_xor_si128(c2[3057],simde_mm_xor_si128(c2[539],simde_mm_xor_si128(c2[1945],c2[2225]))))))))))))))))))))))))))))))))))));
+
+//row: 22
+     d2[308]=simde_mm_xor_si128(c2[311],c2[336]);
+
+//row: 23
+     d2[322]=simde_mm_xor_si128(c2[3369],simde_mm_xor_si128(c2[1767],c2[1828]));
+
+//row: 24
+     d2[336]=simde_mm_xor_si128(c2[2557],simde_mm_xor_si128(c2[2301],c2[4455]));
+
+//row: 25
+     d2[350]=simde_mm_xor_si128(c2[853],c2[4065]);
+
+//row: 26
+     d2[364]=simde_mm_xor_si128(c2[1404],simde_mm_xor_si128(c2[1684],simde_mm_xor_si128(c2[4204],simde_mm_xor_si128(c2[563],simde_mm_xor_si128(c2[843],simde_mm_xor_si128(c2[3363],simde_mm_xor_si128(c2[1689],simde_mm_xor_si128(c2[4209],simde_mm_xor_si128(c2[3114],simde_mm_xor_si128(c2[3394],simde_mm_xor_si128(c2[1435],simde_mm_xor_si128(c2[316],simde_mm_xor_si128(c2[2836],simde_mm_xor_si128(c2[1999],simde_mm_xor_si128(c2[4239],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[1177],simde_mm_xor_si128(c2[1457],simde_mm_xor_si128(c2[3977],simde_mm_xor_si128(c2[1176],simde_mm_xor_si128(c2[3416],simde_mm_xor_si128(c2[3696],simde_mm_xor_si128(c2[2861],simde_mm_xor_si128(c2[655],simde_mm_xor_si128(c2[935],simde_mm_xor_si128(c2[3455],simde_mm_xor_si128(c2[2613],simde_mm_xor_si128(c2[2893],simde_mm_xor_si128(c2[934],simde_mm_xor_si128(c2[3446],simde_mm_xor_si128(c2[1487],simde_mm_xor_si128(c2[119],simde_mm_xor_si128(c2[399],simde_mm_xor_si128(c2[2919],simde_mm_xor_si128(c2[3763],simde_mm_xor_si128(c2[1804],simde_mm_xor_si128(c2[674],simde_mm_xor_si128(c2[2914],simde_mm_xor_si128(c2[3194],simde_mm_xor_si128(c2[2669],simde_mm_xor_si128(c2[2949],simde_mm_xor_si128(c2[990],simde_mm_xor_si128(c2[2947],simde_mm_xor_si128(c2[708],simde_mm_xor_si128(c2[988],simde_mm_xor_si128(c2[3533],simde_mm_xor_si128(c2[3813],simde_mm_xor_si128(c2[1854],simde_mm_xor_si128(c2[175],simde_mm_xor_si128(c2[455],simde_mm_xor_si128(c2[2975],simde_mm_xor_si128(c2[2975],simde_mm_xor_si128(c2[736],simde_mm_xor_si128(c2[1016],simde_mm_xor_si128(c2[1608],simde_mm_xor_si128(c2[1888],simde_mm_xor_si128(c2[4408],simde_mm_xor_si128(c2[3562],simde_mm_xor_si128(c2[1323],simde_mm_xor_si128(c2[1603],simde_mm_xor_si128(c2[1876],simde_mm_xor_si128(c2[3025],simde_mm_xor_si128(c2[3305],simde_mm_xor_si128(c2[1346],simde_mm_xor_si128(c2[2749],simde_mm_xor_si128(c2[790],simde_mm_xor_si128(c2[3033],simde_mm_xor_si128(c2[794],simde_mm_xor_si128(c2[1074],simde_mm_xor_si128(c2[1652],simde_mm_xor_si128(c2[1932],simde_mm_xor_si128(c2[4452],simde_mm_xor_si128(c2[3613],simde_mm_xor_si128(c2[3893],simde_mm_xor_si128(c2[1934],simde_mm_xor_si128(c2[1100],simde_mm_xor_si128(c2[3340],c2[3620])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 27
+     d2[378]=simde_mm_xor_si128(c2[3081],c2[1855]);
+
+//row: 28
+     d2[392]=simde_mm_xor_si128(c2[2281],simde_mm_xor_si128(c2[1189],c2[432]));
+
+//row: 29
+     d2[406]=simde_mm_xor_si128(c2[3366],c2[3755]);
+
+//row: 30
+     d2[420]=simde_mm_xor_si128(c2[2862],simde_mm_xor_si128(c2[3502],simde_mm_xor_si128(c2[2727],c2[263])));
+
+//row: 31
+     d2[434]=simde_mm_xor_si128(c2[4201],simde_mm_xor_si128(c2[3360],simde_mm_xor_si128(c2[4206],simde_mm_xor_si128(c2[1432],simde_mm_xor_si128(c2[2833],simde_mm_xor_si128(c2[4236],simde_mm_xor_si128(c2[37],simde_mm_xor_si128(c2[877],simde_mm_xor_si128(c2[3988],simde_mm_xor_si128(c2[3427],simde_mm_xor_si128(c2[3707],simde_mm_xor_si128(c2[3452],simde_mm_xor_si128(c2[931],simde_mm_xor_si128(c2[1484],simde_mm_xor_si128(c2[2916],simde_mm_xor_si128(c2[1801],simde_mm_xor_si128(c2[2925],simde_mm_xor_si128(c2[3205],simde_mm_xor_si128(c2[987],simde_mm_xor_si128(c2[705],simde_mm_xor_si128(c2[985],simde_mm_xor_si128(c2[1851],simde_mm_xor_si128(c2[2972],simde_mm_xor_si128(c2[733],simde_mm_xor_si128(c2[1013],simde_mm_xor_si128(c2[4405],simde_mm_xor_si128(c2[1320],simde_mm_xor_si128(c2[1600],simde_mm_xor_si128(c2[1357],simde_mm_xor_si128(c2[787],simde_mm_xor_si128(c2[791],simde_mm_xor_si128(c2[1071],simde_mm_xor_si128(c2[4463],simde_mm_xor_si128(c2[1945],simde_mm_xor_si128(c2[3337],c2[3617])))))))))))))))))))))))))))))))))));
+
+//row: 32
+     d2[448]=simde_mm_xor_si128(c2[1691],simde_mm_xor_si128(c2[1971],simde_mm_xor_si128(c2[850],simde_mm_xor_si128(c2[1130],simde_mm_xor_si128(c2[1962],simde_mm_xor_si128(c2[3366],simde_mm_xor_si128(c2[3401],simde_mm_xor_si128(c2[3681],simde_mm_xor_si128(c2[589],simde_mm_xor_si128(c2[2272],simde_mm_xor_si128(c2[1464],simde_mm_xor_si128(c2[1744],simde_mm_xor_si128(c2[1463],simde_mm_xor_si128(c2[928],simde_mm_xor_si128(c2[1208],simde_mm_xor_si128(c2[2886],simde_mm_xor_si128(c2[3166],simde_mm_xor_si128(c2[3733],simde_mm_xor_si128(c2[392],simde_mm_xor_si128(c2[672],simde_mm_xor_si128(c2[4036],simde_mm_xor_si128(c2[961],simde_mm_xor_si128(c2[2942],simde_mm_xor_si128(c2[3222],simde_mm_xor_si128(c2[3220],simde_mm_xor_si128(c2[1541],simde_mm_xor_si128(c2[3820],simde_mm_xor_si128(c2[4100],simde_mm_xor_si128(c2[448],simde_mm_xor_si128(c2[728],simde_mm_xor_si128(c2[3248],simde_mm_xor_si128(c2[1881],simde_mm_xor_si128(c2[2161],simde_mm_xor_si128(c2[3849],simde_mm_xor_si128(c2[3312],simde_mm_xor_si128(c2[3592],simde_mm_xor_si128(c2[3036],simde_mm_xor_si128(c2[3306],simde_mm_xor_si128(c2[1939],simde_mm_xor_si128(c2[2219],simde_mm_xor_si128(c2[3900],simde_mm_xor_si128(c2[4180],c2[1373]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 33
+     d2[462]=simde_mm_xor_si128(c2[1411],simde_mm_xor_si128(c2[570],simde_mm_xor_si128(c2[1402],simde_mm_xor_si128(c2[3121],simde_mm_xor_si128(c2[29],simde_mm_xor_si128(c2[1712],simde_mm_xor_si128(c2[1184],simde_mm_xor_si128(c2[903],simde_mm_xor_si128(c2[2020],simde_mm_xor_si128(c2[648],simde_mm_xor_si128(c2[2606],simde_mm_xor_si128(c2[3173],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[3476],simde_mm_xor_si128(c2[401],simde_mm_xor_si128(c2[2662],simde_mm_xor_si128(c2[2660],simde_mm_xor_si128(c2[3540],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[2688],simde_mm_xor_si128(c2[1601],simde_mm_xor_si128(c2[3289],simde_mm_xor_si128(c2[3556],simde_mm_xor_si128(c2[3032],simde_mm_xor_si128(c2[2476],simde_mm_xor_si128(c2[2746],simde_mm_xor_si128(c2[1659],simde_mm_xor_si128(c2[3620],c2[813]))))))))))))))))))))))))))));
+
+//row: 34
+     d2[476]=simde_mm_xor_si128(c2[283],simde_mm_xor_si128(c2[563],simde_mm_xor_si128(c2[3928],simde_mm_xor_si128(c2[3921],simde_mm_xor_si128(c2[4201],simde_mm_xor_si128(c2[3087],simde_mm_xor_si128(c2[568],simde_mm_xor_si128(c2[3933],simde_mm_xor_si128(c2[3641],simde_mm_xor_si128(c2[1993],simde_mm_xor_si128(c2[2273],simde_mm_xor_si128(c2[1159],simde_mm_xor_si128(c2[3674],simde_mm_xor_si128(c2[2560],simde_mm_xor_si128(c2[878],simde_mm_xor_si128(c2[3949],simde_mm_xor_si128(c2[4229],simde_mm_xor_si128(c2[56],simde_mm_xor_si128(c2[336],simde_mm_xor_si128(c2[3701],simde_mm_xor_si128(c2[69],simde_mm_xor_si128(c2[3140],simde_mm_xor_si128(c2[3420],simde_mm_xor_si128(c2[4013],simde_mm_xor_si128(c2[4293],simde_mm_xor_si128(c2[3165],simde_mm_xor_si128(c2[1492],simde_mm_xor_si128(c2[1772],simde_mm_xor_si128(c2[644],simde_mm_xor_si128(c2[2325],simde_mm_xor_si128(c2[1211],simde_mm_xor_si128(c2[3477],simde_mm_xor_si128(c2[3757],simde_mm_xor_si128(c2[2643],simde_mm_xor_si128(c2[2642],simde_mm_xor_si128(c2[1514],simde_mm_xor_si128(c2[4032],simde_mm_xor_si128(c2[2638],simde_mm_xor_si128(c2[2918],simde_mm_xor_si128(c2[1548],simde_mm_xor_si128(c2[1828],simde_mm_xor_si128(c2[700],simde_mm_xor_si128(c2[1826],simde_mm_xor_si128(c2[432],simde_mm_xor_si128(c2[712],simde_mm_xor_si128(c2[2412],simde_mm_xor_si128(c2[2692],simde_mm_xor_si128(c2[1578],simde_mm_xor_si128(c2[3533],simde_mm_xor_si128(c2[3813],simde_mm_xor_si128(c2[2699],simde_mm_xor_si128(c2[1854],simde_mm_xor_si128(c2[460],simde_mm_xor_si128(c2[740],simde_mm_xor_si128(c2[487],simde_mm_xor_si128(c2[767],simde_mm_xor_si128(c2[4118],simde_mm_xor_si128(c2[2441],simde_mm_xor_si128(c2[1047],simde_mm_xor_si128(c2[1327],simde_mm_xor_si128(c2[1904],simde_mm_xor_si128(c2[2184],simde_mm_xor_si128(c2[1070],simde_mm_xor_si128(c2[1628],simde_mm_xor_si128(c2[514],simde_mm_xor_si128(c2[1912],simde_mm_xor_si128(c2[504],simde_mm_xor_si128(c2[784],simde_mm_xor_si128(c2[545],simde_mm_xor_si128(c2[825],simde_mm_xor_si128(c2[4176],simde_mm_xor_si128(c2[2492],simde_mm_xor_si128(c2[2772],simde_mm_xor_si128(c2[1658],simde_mm_xor_si128(c2[4458],simde_mm_xor_si128(c2[3064],c2[3344]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 35
+     d2[490]=simde_mm_xor_si128(c2[4207],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[3646],simde_mm_xor_si128(c2[13],simde_mm_xor_si128(c2[1438],simde_mm_xor_si128(c2[1718],simde_mm_xor_si128(c2[3119],simde_mm_xor_si128(c2[309],simde_mm_xor_si128(c2[3113],simde_mm_xor_si128(c2[3980],simde_mm_xor_si128(c2[4260],simde_mm_xor_si128(c2[3979],simde_mm_xor_si128(c2[3444],simde_mm_xor_si128(c2[3724],simde_mm_xor_si128(c2[1217],simde_mm_xor_si128(c2[1770],simde_mm_xor_si128(c2[3202],simde_mm_xor_si128(c2[2073],simde_mm_xor_si128(c2[3477],simde_mm_xor_si128(c2[1273],simde_mm_xor_si128(c2[1271],simde_mm_xor_si128(c2[1550],simde_mm_xor_si128(c2[1857],simde_mm_xor_si128(c2[2137],simde_mm_xor_si128(c2[3258],simde_mm_xor_si128(c2[1299],simde_mm_xor_si128(c2[198],simde_mm_xor_si128(c2[1886],simde_mm_xor_si128(c2[1629],simde_mm_xor_si128(c2[1073],simde_mm_xor_si128(c2[1357],simde_mm_xor_si128(c2[4455],simde_mm_xor_si128(c2[256],simde_mm_xor_si128(c2[2217],c2[3903]))))))))))))))))))))))))))))))))));
+
+//row: 36
+     d2[504]=simde_mm_xor_si128(c2[280],simde_mm_xor_si128(c2[4266],c2[1041]));
+
+//row: 37
+     d2[518]=simde_mm_xor_si128(c2[7],simde_mm_xor_si128(c2[1405],simde_mm_xor_si128(c2[3645],simde_mm_xor_si128(c2[564],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[1410],simde_mm_xor_si128(c2[1717],simde_mm_xor_si128(c2[3115],simde_mm_xor_si128(c2[3118],simde_mm_xor_si128(c2[37],simde_mm_xor_si128(c2[308],simde_mm_xor_si128(c2[1440],simde_mm_xor_si128(c2[1720],simde_mm_xor_si128(c2[4259],simde_mm_xor_si128(c2[1178],simde_mm_xor_si128(c2[3978],simde_mm_xor_si128(c2[617],simde_mm_xor_si128(c2[897],simde_mm_xor_si128(c2[3737],simde_mm_xor_si128(c2[656],simde_mm_xor_si128(c2[1216],simde_mm_xor_si128(c2[2614],simde_mm_xor_si128(c2[1769],simde_mm_xor_si128(c2[3167],simde_mm_xor_si128(c2[3201],simde_mm_xor_si128(c2[120],simde_mm_xor_si128(c2[2072],simde_mm_xor_si128(c2[3484],simde_mm_xor_si128(c2[3476],simde_mm_xor_si128(c2[115],simde_mm_xor_si128(c2[395],simde_mm_xor_si128(c2[1272],simde_mm_xor_si128(c2[2670],simde_mm_xor_si128(c2[1270],simde_mm_xor_si128(c2[2388],simde_mm_xor_si128(c2[2668],simde_mm_xor_si128(c2[2136],simde_mm_xor_si128(c2[3534],simde_mm_xor_si128(c2[3257],simde_mm_xor_si128(c2[176],simde_mm_xor_si128(c2[1298],simde_mm_xor_si128(c2[2416],simde_mm_xor_si128(c2[2696],simde_mm_xor_si128(c2[197],simde_mm_xor_si128(c2[1609],simde_mm_xor_si128(c2[1885],simde_mm_xor_si128(c2[3003],simde_mm_xor_si128(c2[3283],simde_mm_xor_si128(c2[1628],simde_mm_xor_si128(c2[3026],simde_mm_xor_si128(c2[1072],simde_mm_xor_si128(c2[2470],simde_mm_xor_si128(c2[1356],simde_mm_xor_si128(c2[2474],simde_mm_xor_si128(c2[2754],simde_mm_xor_si128(c2[255],simde_mm_xor_si128(c2[1653],simde_mm_xor_si128(c2[2216],simde_mm_xor_si128(c2[3614],simde_mm_xor_si128(c2[3902],simde_mm_xor_si128(c2[541],c2[821])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 38
+     d2[532]=simde_mm_xor_si128(c2[1687],simde_mm_xor_si128(c2[1967],simde_mm_xor_si128(c2[1126],simde_mm_xor_si128(c2[1972],simde_mm_xor_si128(c2[3397],simde_mm_xor_si128(c2[3677],simde_mm_xor_si128(c2[599],simde_mm_xor_si128(c2[2268],simde_mm_xor_si128(c2[1430],simde_mm_xor_si128(c2[1460],simde_mm_xor_si128(c2[1740],simde_mm_xor_si128(c2[1459],simde_mm_xor_si128(c2[924],simde_mm_xor_si128(c2[1204],simde_mm_xor_si128(c2[3176],simde_mm_xor_si128(c2[3729],simde_mm_xor_si128(c2[682],simde_mm_xor_si128(c2[4032],simde_mm_xor_si128(c2[957],simde_mm_xor_si128(c2[3232],simde_mm_xor_si128(c2[3230],simde_mm_xor_si128(c2[426],simde_mm_xor_si128(c2[3816],simde_mm_xor_si128(c2[4096],simde_mm_xor_si128(c2[738],simde_mm_xor_si128(c2[3258],simde_mm_xor_si128(c2[2157],simde_mm_xor_si128(c2[3845],simde_mm_xor_si128(c2[3588],simde_mm_xor_si128(c2[3032],simde_mm_xor_si128(c2[3316],simde_mm_xor_si128(c2[1935],simde_mm_xor_si128(c2[2215],simde_mm_xor_si128(c2[4176],c2[1383]))))))))))))))))))))))))))))))))));
+
+//row: 39
+     d2[546]=simde_mm_xor_si128(c2[560],simde_mm_xor_si128(c2[840],simde_mm_xor_si128(c2[4212],simde_mm_xor_si128(c2[13],simde_mm_xor_si128(c2[845],simde_mm_xor_si128(c2[2806],simde_mm_xor_si128(c2[2270],simde_mm_xor_si128(c2[2550],simde_mm_xor_si128(c2[3951],simde_mm_xor_si128(c2[1155],simde_mm_xor_si128(c2[347],simde_mm_xor_si128(c2[627],simde_mm_xor_si128(c2[346],simde_mm_xor_si128(c2[4290],simde_mm_xor_si128(c2[91],simde_mm_xor_si128(c2[1769],simde_mm_xor_si128(c2[2049],simde_mm_xor_si128(c2[2616],simde_mm_xor_si128(c2[3754],simde_mm_xor_si128(c2[4034],simde_mm_xor_si128(c2[2919],simde_mm_xor_si128(c2[4323],simde_mm_xor_si128(c2[1825],simde_mm_xor_si128(c2[2105],simde_mm_xor_si128(c2[2103],simde_mm_xor_si128(c2[2689],simde_mm_xor_si128(c2[2969],simde_mm_xor_si128(c2[3810],simde_mm_xor_si128(c2[4090],simde_mm_xor_si128(c2[2131],simde_mm_xor_si128(c2[764],simde_mm_xor_si128(c2[1044],simde_mm_xor_si128(c2[2718],simde_mm_xor_si128(c2[1607],simde_mm_xor_si128(c2[2195],simde_mm_xor_si128(c2[2475],simde_mm_xor_si128(c2[1905],simde_mm_xor_si128(c2[2189],simde_mm_xor_si128(c2[822],simde_mm_xor_si128(c2[1102],simde_mm_xor_si128(c2[2783],simde_mm_xor_si128(c2[3063],c2[256]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 40
+     d2[560]=simde_mm_xor_si128(c2[3086],simde_mm_xor_si128(c2[3646],simde_mm_xor_si128(c2[2245],simde_mm_xor_si128(c2[2805],simde_mm_xor_si128(c2[3091],simde_mm_xor_si128(c2[3651],simde_mm_xor_si128(c2[317],simde_mm_xor_si128(c2[877],simde_mm_xor_si128(c2[1718],simde_mm_xor_si128(c2[2278],simde_mm_xor_si128(c2[3401],simde_mm_xor_si128(c2[3681],simde_mm_xor_si128(c2[3961],simde_mm_xor_si128(c2[2859],simde_mm_xor_si128(c2[3419],simde_mm_xor_si128(c2[2578],simde_mm_xor_si128(c2[2858],simde_mm_xor_si128(c2[3138],simde_mm_xor_si128(c2[622],simde_mm_xor_si128(c2[2337],simde_mm_xor_si128(c2[2897],simde_mm_xor_si128(c2[4295],simde_mm_xor_si128(c2[376],simde_mm_xor_si128(c2[369],simde_mm_xor_si128(c2[929],simde_mm_xor_si128(c2[1801],simde_mm_xor_si128(c2[2361],simde_mm_xor_si128(c2[672],simde_mm_xor_si128(c2[1232],simde_mm_xor_si128(c2[2076],simde_mm_xor_si128(c2[2356],simde_mm_xor_si128(c2[2636],simde_mm_xor_si128(c2[4351],simde_mm_xor_si128(c2[432],simde_mm_xor_si128(c2[4349],simde_mm_xor_si128(c2[150],simde_mm_xor_si128(c2[430],simde_mm_xor_si128(c2[736],simde_mm_xor_si128(c2[1296],simde_mm_xor_si128(c2[1857],simde_mm_xor_si128(c2[2417],simde_mm_xor_si128(c2[4377],simde_mm_xor_si128(c2[178],simde_mm_xor_si128(c2[458],simde_mm_xor_si128(c2[3276],simde_mm_xor_si128(c2[3836],simde_mm_xor_si128(c2[485],simde_mm_xor_si128(c2[765],simde_mm_xor_si128(c2[1045],simde_mm_xor_si128(c2[228],simde_mm_xor_si128(c2[788],simde_mm_xor_si128(c2[4151],simde_mm_xor_si128(c2[232],simde_mm_xor_si128(c2[4435],simde_mm_xor_si128(c2[236],simde_mm_xor_si128(c2[516],simde_mm_xor_si128(c2[3334],simde_mm_xor_si128(c2[3894],simde_mm_xor_si128(c2[816],simde_mm_xor_si128(c2[1376],simde_mm_xor_si128(c2[2502],simde_mm_xor_si128(c2[2782],c2[3062]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 41
+     d2[574]=simde_mm_xor_si128(c2[2529],simde_mm_xor_si128(c2[2809],simde_mm_xor_si128(c2[1968],simde_mm_xor_si128(c2[2800],simde_mm_xor_si128(c2[4239],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[1441],simde_mm_xor_si128(c2[3110],simde_mm_xor_si128(c2[2275],simde_mm_xor_si128(c2[2302],simde_mm_xor_si128(c2[2582],simde_mm_xor_si128(c2[2301],simde_mm_xor_si128(c2[1766],simde_mm_xor_si128(c2[2046],simde_mm_xor_si128(c2[4004],simde_mm_xor_si128(c2[92],simde_mm_xor_si128(c2[1524],simde_mm_xor_si128(c2[395],simde_mm_xor_si128(c2[1799],simde_mm_xor_si128(c2[4060],simde_mm_xor_si128(c2[4072],simde_mm_xor_si128(c2[1546],simde_mm_xor_si128(c2[179],simde_mm_xor_si128(c2[459],simde_mm_xor_si128(c2[1580],simde_mm_xor_si128(c2[4100],simde_mm_xor_si128(c2[2999],simde_mm_xor_si128(c2[208],simde_mm_xor_si128(c2[4430],simde_mm_xor_si128(c2[3874],simde_mm_xor_si128(c2[4144],simde_mm_xor_si128(c2[2777],simde_mm_xor_si128(c2[3057],simde_mm_xor_si128(c2[539],c2[2225]))))))))))))))))))))))))))))))))));
+  }
+}
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc240_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc240_byte.c
index 997a34f19872f8e0afadf043869cf9f0424ea473..17e4f72d303313b17414a83e4c65e991410abcac 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc240_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc240_byte.c
@@ -1,9 +1,9 @@
 #include "PHY/sse_intrin.h"
 // generated code for Zc=240, byte encoding
 static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG2_Zc240_byte(uint8_t *c,uint8_t *d) {
-  __m128i *csimd=(__m128i *)c,*dsimd=(__m128i *)d;
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
 
-  __m128i *c2,*d2;
+  simde__m128i *c2,*d2;
 
   int i2;
   for (i2=0; i2<15; i2++) {
@@ -11,129 +11,129 @@ static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG
      d2=&dsimd[i2];
 
 //row: 0
-     d2[0]=_mm_xor_si128(c2[309],_mm_xor_si128(c2[4208],_mm_xor_si128(c2[3004],_mm_xor_si128(c2[938],_mm_xor_si128(c2[4531],_mm_xor_si128(c2[4544],_mm_xor_si128(c2[2164],_mm_xor_si128(c2[3370],_mm_xor_si128(c2[1591],_mm_xor_si128(c2[4300],_mm_xor_si128(c2[990],_mm_xor_si128(c2[2231],_mm_xor_si128(c2[1623],_mm_xor_si128(c2[4625],_mm_xor_si128(c2[3451],_mm_xor_si128(c2[4356],_mm_xor_si128(c2[2281],_mm_xor_si128(c2[186],_mm_xor_si128(c2[4689],_mm_xor_si128(c2[2311],_mm_xor_si128(c2[2322],_mm_xor_si128(c2[2940],_mm_xor_si128(c2[3549],_mm_xor_si128(c2[3542],_mm_xor_si128(c2[277],_mm_xor_si128(c2[2380],c2[3574]))))))))))))))))))))))))));
+     d2[0]=simde_mm_xor_si128(c2[309],simde_mm_xor_si128(c2[4208],simde_mm_xor_si128(c2[3004],simde_mm_xor_si128(c2[938],simde_mm_xor_si128(c2[4531],simde_mm_xor_si128(c2[4544],simde_mm_xor_si128(c2[2164],simde_mm_xor_si128(c2[3370],simde_mm_xor_si128(c2[1591],simde_mm_xor_si128(c2[4300],simde_mm_xor_si128(c2[990],simde_mm_xor_si128(c2[2231],simde_mm_xor_si128(c2[1623],simde_mm_xor_si128(c2[4625],simde_mm_xor_si128(c2[3451],simde_mm_xor_si128(c2[4356],simde_mm_xor_si128(c2[2281],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[4689],simde_mm_xor_si128(c2[2311],simde_mm_xor_si128(c2[2322],simde_mm_xor_si128(c2[2940],simde_mm_xor_si128(c2[3549],simde_mm_xor_si128(c2[3542],simde_mm_xor_si128(c2[277],simde_mm_xor_si128(c2[2380],c2[3574]))))))))))))))))))))))))));
 
 //row: 1
-     d2[15]=_mm_xor_si128(c2[309],_mm_xor_si128(c2[609],_mm_xor_si128(c2[4508],_mm_xor_si128(c2[3304],_mm_xor_si128(c2[938],_mm_xor_si128(c2[1238],_mm_xor_si128(c2[32],_mm_xor_si128(c2[30],_mm_xor_si128(c2[2164],_mm_xor_si128(c2[2464],_mm_xor_si128(c2[3670],_mm_xor_si128(c2[1591],_mm_xor_si128(c2[1891],_mm_xor_si128(c2[4600],_mm_xor_si128(c2[1290],_mm_xor_si128(c2[2531],_mm_xor_si128(c2[1923],_mm_xor_si128(c2[126],_mm_xor_si128(c2[3751],_mm_xor_si128(c2[4656],_mm_xor_si128(c2[2281],_mm_xor_si128(c2[2581],_mm_xor_si128(c2[486],_mm_xor_si128(c2[190],_mm_xor_si128(c2[2611],_mm_xor_si128(c2[2622],_mm_xor_si128(c2[3240],_mm_xor_si128(c2[3849],_mm_xor_si128(c2[3842],_mm_xor_si128(c2[277],_mm_xor_si128(c2[577],_mm_xor_si128(c2[2680],c2[3874]))))))))))))))))))))))))))))))));
+     d2[15]=simde_mm_xor_si128(c2[309],simde_mm_xor_si128(c2[609],simde_mm_xor_si128(c2[4508],simde_mm_xor_si128(c2[3304],simde_mm_xor_si128(c2[938],simde_mm_xor_si128(c2[1238],simde_mm_xor_si128(c2[32],simde_mm_xor_si128(c2[30],simde_mm_xor_si128(c2[2164],simde_mm_xor_si128(c2[2464],simde_mm_xor_si128(c2[3670],simde_mm_xor_si128(c2[1591],simde_mm_xor_si128(c2[1891],simde_mm_xor_si128(c2[4600],simde_mm_xor_si128(c2[1290],simde_mm_xor_si128(c2[2531],simde_mm_xor_si128(c2[1923],simde_mm_xor_si128(c2[126],simde_mm_xor_si128(c2[3751],simde_mm_xor_si128(c2[4656],simde_mm_xor_si128(c2[2281],simde_mm_xor_si128(c2[2581],simde_mm_xor_si128(c2[486],simde_mm_xor_si128(c2[190],simde_mm_xor_si128(c2[2611],simde_mm_xor_si128(c2[2622],simde_mm_xor_si128(c2[3240],simde_mm_xor_si128(c2[3849],simde_mm_xor_si128(c2[3842],simde_mm_xor_si128(c2[277],simde_mm_xor_si128(c2[577],simde_mm_xor_si128(c2[2680],c2[3874]))))))))))))))))))))))))))))))));
 
 //row: 2
-     d2[30]=_mm_xor_si128(c2[309],_mm_xor_si128(c2[609],_mm_xor_si128(c2[4208],_mm_xor_si128(c2[4508],_mm_xor_si128(c2[3304],_mm_xor_si128(c2[938],_mm_xor_si128(c2[1238],_mm_xor_si128(c2[32],_mm_xor_si128(c2[30],_mm_xor_si128(c2[2164],_mm_xor_si128(c2[2464],_mm_xor_si128(c2[3670],_mm_xor_si128(c2[1591],_mm_xor_si128(c2[1891],_mm_xor_si128(c2[4300],_mm_xor_si128(c2[4600],_mm_xor_si128(c2[1290],_mm_xor_si128(c2[2231],_mm_xor_si128(c2[2531],_mm_xor_si128(c2[1923],_mm_xor_si128(c2[126],_mm_xor_si128(c2[3451],_mm_xor_si128(c2[3751],_mm_xor_si128(c2[4656],_mm_xor_si128(c2[2281],_mm_xor_si128(c2[2581],_mm_xor_si128(c2[186],_mm_xor_si128(c2[486],_mm_xor_si128(c2[190],_mm_xor_si128(c2[2311],_mm_xor_si128(c2[2611],_mm_xor_si128(c2[2622],_mm_xor_si128(c2[2940],_mm_xor_si128(c2[3240],_mm_xor_si128(c2[3849],_mm_xor_si128(c2[3842],_mm_xor_si128(c2[277],_mm_xor_si128(c2[577],_mm_xor_si128(c2[2380],_mm_xor_si128(c2[2680],c2[3874]))))))))))))))))))))))))))))))))))))))));
+     d2[30]=simde_mm_xor_si128(c2[309],simde_mm_xor_si128(c2[609],simde_mm_xor_si128(c2[4208],simde_mm_xor_si128(c2[4508],simde_mm_xor_si128(c2[3304],simde_mm_xor_si128(c2[938],simde_mm_xor_si128(c2[1238],simde_mm_xor_si128(c2[32],simde_mm_xor_si128(c2[30],simde_mm_xor_si128(c2[2164],simde_mm_xor_si128(c2[2464],simde_mm_xor_si128(c2[3670],simde_mm_xor_si128(c2[1591],simde_mm_xor_si128(c2[1891],simde_mm_xor_si128(c2[4300],simde_mm_xor_si128(c2[4600],simde_mm_xor_si128(c2[1290],simde_mm_xor_si128(c2[2231],simde_mm_xor_si128(c2[2531],simde_mm_xor_si128(c2[1923],simde_mm_xor_si128(c2[126],simde_mm_xor_si128(c2[3451],simde_mm_xor_si128(c2[3751],simde_mm_xor_si128(c2[4656],simde_mm_xor_si128(c2[2281],simde_mm_xor_si128(c2[2581],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[486],simde_mm_xor_si128(c2[190],simde_mm_xor_si128(c2[2311],simde_mm_xor_si128(c2[2611],simde_mm_xor_si128(c2[2622],simde_mm_xor_si128(c2[2940],simde_mm_xor_si128(c2[3240],simde_mm_xor_si128(c2[3849],simde_mm_xor_si128(c2[3842],simde_mm_xor_si128(c2[277],simde_mm_xor_si128(c2[577],simde_mm_xor_si128(c2[2380],simde_mm_xor_si128(c2[2680],c2[3874]))))))))))))))))))))))))))))))))))))))));
 
 //row: 3
-     d2[45]=_mm_xor_si128(c2[609],_mm_xor_si128(c2[4508],_mm_xor_si128(c2[3304],_mm_xor_si128(c2[1238],_mm_xor_si128(c2[32],_mm_xor_si128(c2[4544],_mm_xor_si128(c2[30],_mm_xor_si128(c2[2464],_mm_xor_si128(c2[3370],_mm_xor_si128(c2[3670],_mm_xor_si128(c2[1891],_mm_xor_si128(c2[4600],_mm_xor_si128(c2[1290],_mm_xor_si128(c2[2531],_mm_xor_si128(c2[1923],_mm_xor_si128(c2[4625],_mm_xor_si128(c2[126],_mm_xor_si128(c2[3751],_mm_xor_si128(c2[4356],_mm_xor_si128(c2[4656],_mm_xor_si128(c2[2581],_mm_xor_si128(c2[486],_mm_xor_si128(c2[4689],_mm_xor_si128(c2[190],_mm_xor_si128(c2[2611],_mm_xor_si128(c2[2322],_mm_xor_si128(c2[2622],_mm_xor_si128(c2[3240],_mm_xor_si128(c2[3849],_mm_xor_si128(c2[3542],_mm_xor_si128(c2[3842],_mm_xor_si128(c2[577],_mm_xor_si128(c2[2680],_mm_xor_si128(c2[3574],c2[3874]))))))))))))))))))))))))))))))))));
+     d2[45]=simde_mm_xor_si128(c2[609],simde_mm_xor_si128(c2[4508],simde_mm_xor_si128(c2[3304],simde_mm_xor_si128(c2[1238],simde_mm_xor_si128(c2[32],simde_mm_xor_si128(c2[4544],simde_mm_xor_si128(c2[30],simde_mm_xor_si128(c2[2464],simde_mm_xor_si128(c2[3370],simde_mm_xor_si128(c2[3670],simde_mm_xor_si128(c2[1891],simde_mm_xor_si128(c2[4600],simde_mm_xor_si128(c2[1290],simde_mm_xor_si128(c2[2531],simde_mm_xor_si128(c2[1923],simde_mm_xor_si128(c2[4625],simde_mm_xor_si128(c2[126],simde_mm_xor_si128(c2[3751],simde_mm_xor_si128(c2[4356],simde_mm_xor_si128(c2[4656],simde_mm_xor_si128(c2[2581],simde_mm_xor_si128(c2[486],simde_mm_xor_si128(c2[4689],simde_mm_xor_si128(c2[190],simde_mm_xor_si128(c2[2611],simde_mm_xor_si128(c2[2322],simde_mm_xor_si128(c2[2622],simde_mm_xor_si128(c2[3240],simde_mm_xor_si128(c2[3849],simde_mm_xor_si128(c2[3542],simde_mm_xor_si128(c2[3842],simde_mm_xor_si128(c2[577],simde_mm_xor_si128(c2[2680],simde_mm_xor_si128(c2[3574],c2[3874]))))))))))))))))))))))))))))))))));
 
 //row: 4
-     d2[60]=_mm_xor_si128(c2[1505],_mm_xor_si128(c2[1805],_mm_xor_si128(c2[905],_mm_xor_si128(c2[4500],_mm_xor_si128(c2[3901],_mm_xor_si128(c2[2134],_mm_xor_si128(c2[2434],_mm_xor_si128(c2[1243],_mm_xor_si128(c2[1241],_mm_xor_si128(c2[3638],_mm_xor_si128(c2[3360],_mm_xor_si128(c2[3660],_mm_xor_si128(c2[67],_mm_xor_si128(c2[2802],_mm_xor_si128(c2[3102],_mm_xor_si128(c2[997],_mm_xor_si128(c2[2501],_mm_xor_si128(c2[3727],_mm_xor_si128(c2[3134],_mm_xor_si128(c2[1322],_mm_xor_si128(c2[163],_mm_xor_si128(c2[1053],_mm_xor_si128(c2[3492],_mm_xor_si128(c2[3792],_mm_xor_si128(c2[1682],_mm_xor_si128(c2[1386],_mm_xor_si128(c2[3822],_mm_xor_si128(c2[3818],_mm_xor_si128(c2[4451],_mm_xor_si128(c2[246],_mm_xor_si128(c2[254],_mm_xor_si128(c2[1473],_mm_xor_si128(c2[1773],_mm_xor_si128(c2[3876],c2[271]))))))))))))))))))))))))))))))))));
+     d2[60]=simde_mm_xor_si128(c2[1505],simde_mm_xor_si128(c2[1805],simde_mm_xor_si128(c2[905],simde_mm_xor_si128(c2[4500],simde_mm_xor_si128(c2[3901],simde_mm_xor_si128(c2[2134],simde_mm_xor_si128(c2[2434],simde_mm_xor_si128(c2[1243],simde_mm_xor_si128(c2[1241],simde_mm_xor_si128(c2[3638],simde_mm_xor_si128(c2[3360],simde_mm_xor_si128(c2[3660],simde_mm_xor_si128(c2[67],simde_mm_xor_si128(c2[2802],simde_mm_xor_si128(c2[3102],simde_mm_xor_si128(c2[997],simde_mm_xor_si128(c2[2501],simde_mm_xor_si128(c2[3727],simde_mm_xor_si128(c2[3134],simde_mm_xor_si128(c2[1322],simde_mm_xor_si128(c2[163],simde_mm_xor_si128(c2[1053],simde_mm_xor_si128(c2[3492],simde_mm_xor_si128(c2[3792],simde_mm_xor_si128(c2[1682],simde_mm_xor_si128(c2[1386],simde_mm_xor_si128(c2[3822],simde_mm_xor_si128(c2[3818],simde_mm_xor_si128(c2[4451],simde_mm_xor_si128(c2[246],simde_mm_xor_si128(c2[254],simde_mm_xor_si128(c2[1473],simde_mm_xor_si128(c2[1773],simde_mm_xor_si128(c2[3876],c2[271]))))))))))))))))))))))))))))))))));
 
 //row: 5
-     d2[75]=_mm_xor_si128(c2[7],_mm_xor_si128(c2[307],_mm_xor_si128(c2[4206],_mm_xor_si128(c2[3002],_mm_xor_si128(c2[2707],_mm_xor_si128(c2[636],_mm_xor_si128(c2[936],_mm_xor_si128(c2[4544],_mm_xor_si128(c2[4542],_mm_xor_si128(c2[2732],_mm_xor_si128(c2[1862],_mm_xor_si128(c2[2162],_mm_xor_si128(c2[3368],_mm_xor_si128(c2[1304],_mm_xor_si128(c2[1604],_mm_xor_si128(c2[4298],_mm_xor_si128(c2[1003],_mm_xor_si128(c2[2229],_mm_xor_si128(c2[1621],_mm_xor_si128(c2[4623],_mm_xor_si128(c2[3464],_mm_xor_si128(c2[4354],_mm_xor_si128(c2[2860],_mm_xor_si128(c2[1994],_mm_xor_si128(c2[2294],_mm_xor_si128(c2[184],_mm_xor_si128(c2[4687],_mm_xor_si128(c2[2324],_mm_xor_si128(c2[2320],_mm_xor_si128(c2[2615],_mm_xor_si128(c2[2953],_mm_xor_si128(c2[3547],_mm_xor_si128(c2[3540],_mm_xor_si128(c2[4774],_mm_xor_si128(c2[275],_mm_xor_si128(c2[2378],c2[3572]))))))))))))))))))))))))))))))))))));
+     d2[75]=simde_mm_xor_si128(c2[7],simde_mm_xor_si128(c2[307],simde_mm_xor_si128(c2[4206],simde_mm_xor_si128(c2[3002],simde_mm_xor_si128(c2[2707],simde_mm_xor_si128(c2[636],simde_mm_xor_si128(c2[936],simde_mm_xor_si128(c2[4544],simde_mm_xor_si128(c2[4542],simde_mm_xor_si128(c2[2732],simde_mm_xor_si128(c2[1862],simde_mm_xor_si128(c2[2162],simde_mm_xor_si128(c2[3368],simde_mm_xor_si128(c2[1304],simde_mm_xor_si128(c2[1604],simde_mm_xor_si128(c2[4298],simde_mm_xor_si128(c2[1003],simde_mm_xor_si128(c2[2229],simde_mm_xor_si128(c2[1621],simde_mm_xor_si128(c2[4623],simde_mm_xor_si128(c2[3464],simde_mm_xor_si128(c2[4354],simde_mm_xor_si128(c2[2860],simde_mm_xor_si128(c2[1994],simde_mm_xor_si128(c2[2294],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[4687],simde_mm_xor_si128(c2[2324],simde_mm_xor_si128(c2[2320],simde_mm_xor_si128(c2[2615],simde_mm_xor_si128(c2[2953],simde_mm_xor_si128(c2[3547],simde_mm_xor_si128(c2[3540],simde_mm_xor_si128(c2[4774],simde_mm_xor_si128(c2[275],simde_mm_xor_si128(c2[2378],c2[3572]))))))))))))))))))))))))))))))))))));
 
 //row: 6
-     d2[90]=_mm_xor_si128(c2[2712],_mm_xor_si128(c2[3012],_mm_xor_si128(c2[2112],_mm_xor_si128(c2[908],_mm_xor_si128(c2[2708],_mm_xor_si128(c2[3341],_mm_xor_si128(c2[3641],_mm_xor_si128(c2[2435],_mm_xor_si128(c2[2433],_mm_xor_si128(c2[4567],_mm_xor_si128(c2[68],_mm_xor_si128(c2[1274],_mm_xor_si128(c2[3994],_mm_xor_si128(c2[4294],_mm_xor_si128(c2[2204],_mm_xor_si128(c2[3693],_mm_xor_si128(c2[120],_mm_xor_si128(c2[4326],_mm_xor_si128(c2[2529],_mm_xor_si128(c2[1355],_mm_xor_si128(c2[2260],_mm_xor_si128(c2[2554],_mm_xor_si128(c2[4684],_mm_xor_si128(c2[185],_mm_xor_si128(c2[2889],_mm_xor_si128(c2[2593],_mm_xor_si128(c2[215],_mm_xor_si128(c2[211],_mm_xor_si128(c2[3820],_mm_xor_si128(c2[844],_mm_xor_si128(c2[1453],_mm_xor_si128(c2[1446],_mm_xor_si128(c2[2680],_mm_xor_si128(c2[2980],_mm_xor_si128(c2[284],_mm_xor_si128(c2[1478],c2[3877]))))))))))))))))))))))))))))))))))));
+     d2[90]=simde_mm_xor_si128(c2[2712],simde_mm_xor_si128(c2[3012],simde_mm_xor_si128(c2[2112],simde_mm_xor_si128(c2[908],simde_mm_xor_si128(c2[2708],simde_mm_xor_si128(c2[3341],simde_mm_xor_si128(c2[3641],simde_mm_xor_si128(c2[2435],simde_mm_xor_si128(c2[2433],simde_mm_xor_si128(c2[4567],simde_mm_xor_si128(c2[68],simde_mm_xor_si128(c2[1274],simde_mm_xor_si128(c2[3994],simde_mm_xor_si128(c2[4294],simde_mm_xor_si128(c2[2204],simde_mm_xor_si128(c2[3693],simde_mm_xor_si128(c2[120],simde_mm_xor_si128(c2[4326],simde_mm_xor_si128(c2[2529],simde_mm_xor_si128(c2[1355],simde_mm_xor_si128(c2[2260],simde_mm_xor_si128(c2[2554],simde_mm_xor_si128(c2[4684],simde_mm_xor_si128(c2[185],simde_mm_xor_si128(c2[2889],simde_mm_xor_si128(c2[2593],simde_mm_xor_si128(c2[215],simde_mm_xor_si128(c2[211],simde_mm_xor_si128(c2[3820],simde_mm_xor_si128(c2[844],simde_mm_xor_si128(c2[1453],simde_mm_xor_si128(c2[1446],simde_mm_xor_si128(c2[2680],simde_mm_xor_si128(c2[2980],simde_mm_xor_si128(c2[284],simde_mm_xor_si128(c2[1478],c2[3877]))))))))))))))))))))))))))))))))))));
 
 //row: 7
-     d2[105]=_mm_xor_si128(c2[3904],_mm_xor_si128(c2[4204],_mm_xor_si128(c2[3603],_mm_xor_si128(c2[3304],_mm_xor_si128(c2[2703],_mm_xor_si128(c2[2100],_mm_xor_si128(c2[1514],_mm_xor_si128(c2[4533],_mm_xor_si128(c2[34],_mm_xor_si128(c2[4232],_mm_xor_si128(c2[3642],_mm_xor_si128(c2[3041],_mm_xor_si128(c2[3640],_mm_xor_si128(c2[2739],_mm_xor_si128(c2[3039],_mm_xor_si128(c2[1835],_mm_xor_si128(c2[960],_mm_xor_si128(c2[1260],_mm_xor_si128(c2[674],_mm_xor_si128(c2[2466],_mm_xor_si128(c2[1565],_mm_xor_si128(c2[1865],_mm_xor_si128(c2[402],_mm_xor_si128(c2[702],_mm_xor_si128(c2[101],_mm_xor_si128(c2[3396],_mm_xor_si128(c2[2795],_mm_xor_si128(c2[101],_mm_xor_si128(c2[4299],_mm_xor_si128(c2[1327],_mm_xor_si128(c2[726],_mm_xor_si128(c2[734],_mm_xor_si128(c2[133],_mm_xor_si128(c2[3721],_mm_xor_si128(c2[2820],_mm_xor_si128(c2[3120],_mm_xor_si128(c2[2562],_mm_xor_si128(c2[1961],_mm_xor_si128(c2[3452],_mm_xor_si128(c2[2551],_mm_xor_si128(c2[2851],_mm_xor_si128(c2[3161],_mm_xor_si128(c2[1092],_mm_xor_si128(c2[1392],_mm_xor_si128(c2[791],_mm_xor_si128(c2[4081],_mm_xor_si128(c2[3480],_mm_xor_si128(c2[3785],_mm_xor_si128(c2[2884],_mm_xor_si128(c2[3184],_mm_xor_si128(c2[1422],_mm_xor_si128(c2[821],_mm_xor_si128(c2[1418],_mm_xor_si128(c2[517],_mm_xor_si128(c2[817],_mm_xor_si128(c2[2315],_mm_xor_si128(c2[2051],_mm_xor_si128(c2[1450],_mm_xor_si128(c2[2645],_mm_xor_si128(c2[2044],_mm_xor_si128(c2[2653],_mm_xor_si128(c2[1752],_mm_xor_si128(c2[2052],_mm_xor_si128(c2[3872],_mm_xor_si128(c2[4172],_mm_xor_si128(c2[3571],_mm_xor_si128(c2[1476],_mm_xor_si128(c2[875],_mm_xor_si128(c2[2670],_mm_xor_si128(c2[1784],c2[2084]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[105]=simde_mm_xor_si128(c2[3904],simde_mm_xor_si128(c2[4204],simde_mm_xor_si128(c2[3603],simde_mm_xor_si128(c2[3304],simde_mm_xor_si128(c2[2703],simde_mm_xor_si128(c2[2100],simde_mm_xor_si128(c2[1514],simde_mm_xor_si128(c2[4533],simde_mm_xor_si128(c2[34],simde_mm_xor_si128(c2[4232],simde_mm_xor_si128(c2[3642],simde_mm_xor_si128(c2[3041],simde_mm_xor_si128(c2[3640],simde_mm_xor_si128(c2[2739],simde_mm_xor_si128(c2[3039],simde_mm_xor_si128(c2[1835],simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[1260],simde_mm_xor_si128(c2[674],simde_mm_xor_si128(c2[2466],simde_mm_xor_si128(c2[1565],simde_mm_xor_si128(c2[1865],simde_mm_xor_si128(c2[402],simde_mm_xor_si128(c2[702],simde_mm_xor_si128(c2[101],simde_mm_xor_si128(c2[3396],simde_mm_xor_si128(c2[2795],simde_mm_xor_si128(c2[101],simde_mm_xor_si128(c2[4299],simde_mm_xor_si128(c2[1327],simde_mm_xor_si128(c2[726],simde_mm_xor_si128(c2[734],simde_mm_xor_si128(c2[133],simde_mm_xor_si128(c2[3721],simde_mm_xor_si128(c2[2820],simde_mm_xor_si128(c2[3120],simde_mm_xor_si128(c2[2562],simde_mm_xor_si128(c2[1961],simde_mm_xor_si128(c2[3452],simde_mm_xor_si128(c2[2551],simde_mm_xor_si128(c2[2851],simde_mm_xor_si128(c2[3161],simde_mm_xor_si128(c2[1092],simde_mm_xor_si128(c2[1392],simde_mm_xor_si128(c2[791],simde_mm_xor_si128(c2[4081],simde_mm_xor_si128(c2[3480],simde_mm_xor_si128(c2[3785],simde_mm_xor_si128(c2[2884],simde_mm_xor_si128(c2[3184],simde_mm_xor_si128(c2[1422],simde_mm_xor_si128(c2[821],simde_mm_xor_si128(c2[1418],simde_mm_xor_si128(c2[517],simde_mm_xor_si128(c2[817],simde_mm_xor_si128(c2[2315],simde_mm_xor_si128(c2[2051],simde_mm_xor_si128(c2[1450],simde_mm_xor_si128(c2[2645],simde_mm_xor_si128(c2[2044],simde_mm_xor_si128(c2[2653],simde_mm_xor_si128(c2[1752],simde_mm_xor_si128(c2[2052],simde_mm_xor_si128(c2[3872],simde_mm_xor_si128(c2[4172],simde_mm_xor_si128(c2[3571],simde_mm_xor_si128(c2[1476],simde_mm_xor_si128(c2[875],simde_mm_xor_si128(c2[2670],simde_mm_xor_si128(c2[1784],c2[2084]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 8
-     d2[120]=_mm_xor_si128(c2[608],_mm_xor_si128(c2[908],_mm_xor_si128(c2[4507],_mm_xor_si128(c2[8],_mm_xor_si128(c2[3603],_mm_xor_si128(c2[11],_mm_xor_si128(c2[1237],_mm_xor_si128(c2[1537],_mm_xor_si128(c2[331],_mm_xor_si128(c2[344],_mm_xor_si128(c2[2740],_mm_xor_si128(c2[2463],_mm_xor_si128(c2[2763],_mm_xor_si128(c2[3969],_mm_xor_si128(c2[1890],_mm_xor_si128(c2[2190],_mm_xor_si128(c2[4599],_mm_xor_si128(c2[100],_mm_xor_si128(c2[1604],_mm_xor_si128(c2[2530],_mm_xor_si128(c2[2830],_mm_xor_si128(c2[2222],_mm_xor_si128(c2[425],_mm_xor_si128(c2[3750],_mm_xor_si128(c2[4050],_mm_xor_si128(c2[156],_mm_xor_si128(c2[2580],_mm_xor_si128(c2[2880],_mm_xor_si128(c2[485],_mm_xor_si128(c2[785],_mm_xor_si128(c2[489],_mm_xor_si128(c2[2610],_mm_xor_si128(c2[2910],_mm_xor_si128(c2[2921],_mm_xor_si128(c2[3254],_mm_xor_si128(c2[3554],_mm_xor_si128(c2[4148],_mm_xor_si128(c2[4141],_mm_xor_si128(c2[576],_mm_xor_si128(c2[876],_mm_xor_si128(c2[2679],_mm_xor_si128(c2[2979],c2[4173]))))))))))))))))))))))))))))))))))))))))));
+     d2[120]=simde_mm_xor_si128(c2[608],simde_mm_xor_si128(c2[908],simde_mm_xor_si128(c2[4507],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[3603],simde_mm_xor_si128(c2[11],simde_mm_xor_si128(c2[1237],simde_mm_xor_si128(c2[1537],simde_mm_xor_si128(c2[331],simde_mm_xor_si128(c2[344],simde_mm_xor_si128(c2[2740],simde_mm_xor_si128(c2[2463],simde_mm_xor_si128(c2[2763],simde_mm_xor_si128(c2[3969],simde_mm_xor_si128(c2[1890],simde_mm_xor_si128(c2[2190],simde_mm_xor_si128(c2[4599],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[1604],simde_mm_xor_si128(c2[2530],simde_mm_xor_si128(c2[2830],simde_mm_xor_si128(c2[2222],simde_mm_xor_si128(c2[425],simde_mm_xor_si128(c2[3750],simde_mm_xor_si128(c2[4050],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[2580],simde_mm_xor_si128(c2[2880],simde_mm_xor_si128(c2[485],simde_mm_xor_si128(c2[785],simde_mm_xor_si128(c2[489],simde_mm_xor_si128(c2[2610],simde_mm_xor_si128(c2[2910],simde_mm_xor_si128(c2[2921],simde_mm_xor_si128(c2[3254],simde_mm_xor_si128(c2[3554],simde_mm_xor_si128(c2[4148],simde_mm_xor_si128(c2[4141],simde_mm_xor_si128(c2[576],simde_mm_xor_si128(c2[876],simde_mm_xor_si128(c2[2679],simde_mm_xor_si128(c2[2979],c2[4173]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 9
-     d2[135]=_mm_xor_si128(c2[312],_mm_xor_si128(c2[1513],_mm_xor_si128(c2[1813],_mm_xor_si128(c2[4211],_mm_xor_si128(c2[913],_mm_xor_si128(c2[3007],_mm_xor_si128(c2[4508],_mm_xor_si128(c2[941],_mm_xor_si128(c2[2142],_mm_xor_si128(c2[2442],_mm_xor_si128(c2[4534],_mm_xor_si128(c2[1236],_mm_xor_si128(c2[4532],_mm_xor_si128(c2[1234],_mm_xor_si128(c2[2140],_mm_xor_si128(c2[2167],_mm_xor_si128(c2[3368],_mm_xor_si128(c2[3668],_mm_xor_si128(c2[3373],_mm_xor_si128(c2[60],_mm_xor_si128(c2[1594],_mm_xor_si128(c2[2795],_mm_xor_si128(c2[3095],_mm_xor_si128(c2[4303],_mm_xor_si128(c2[990],_mm_xor_si128(c2[993],_mm_xor_si128(c2[2494],_mm_xor_si128(c2[2234],_mm_xor_si128(c2[3720],_mm_xor_si128(c2[1626],_mm_xor_si128(c2[3127],_mm_xor_si128(c2[4628],_mm_xor_si128(c2[1330],_mm_xor_si128(c2[3454],_mm_xor_si128(c2[156],_mm_xor_si128(c2[4359],_mm_xor_si128(c2[1061],_mm_xor_si128(c2[2284],_mm_xor_si128(c2[3485],_mm_xor_si128(c2[3785],_mm_xor_si128(c2[189],_mm_xor_si128(c2[1690],_mm_xor_si128(c2[4692],_mm_xor_si128(c2[1394],_mm_xor_si128(c2[2314],_mm_xor_si128(c2[3815],_mm_xor_si128(c2[2310],_mm_xor_si128(c2[3811],_mm_xor_si128(c2[2943],_mm_xor_si128(c2[4444],_mm_xor_si128(c2[3552],_mm_xor_si128(c2[254],_mm_xor_si128(c2[3545],_mm_xor_si128(c2[247],_mm_xor_si128(c2[4454],_mm_xor_si128(c2[280],_mm_xor_si128(c2[1481],_mm_xor_si128(c2[1781],_mm_xor_si128(c2[2383],_mm_xor_si128(c2[3884],_mm_xor_si128(c2[3577],c2[279])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[135]=simde_mm_xor_si128(c2[312],simde_mm_xor_si128(c2[1513],simde_mm_xor_si128(c2[1813],simde_mm_xor_si128(c2[4211],simde_mm_xor_si128(c2[913],simde_mm_xor_si128(c2[3007],simde_mm_xor_si128(c2[4508],simde_mm_xor_si128(c2[941],simde_mm_xor_si128(c2[2142],simde_mm_xor_si128(c2[2442],simde_mm_xor_si128(c2[4534],simde_mm_xor_si128(c2[1236],simde_mm_xor_si128(c2[4532],simde_mm_xor_si128(c2[1234],simde_mm_xor_si128(c2[2140],simde_mm_xor_si128(c2[2167],simde_mm_xor_si128(c2[3368],simde_mm_xor_si128(c2[3668],simde_mm_xor_si128(c2[3373],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[1594],simde_mm_xor_si128(c2[2795],simde_mm_xor_si128(c2[3095],simde_mm_xor_si128(c2[4303],simde_mm_xor_si128(c2[990],simde_mm_xor_si128(c2[993],simde_mm_xor_si128(c2[2494],simde_mm_xor_si128(c2[2234],simde_mm_xor_si128(c2[3720],simde_mm_xor_si128(c2[1626],simde_mm_xor_si128(c2[3127],simde_mm_xor_si128(c2[4628],simde_mm_xor_si128(c2[1330],simde_mm_xor_si128(c2[3454],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[4359],simde_mm_xor_si128(c2[1061],simde_mm_xor_si128(c2[2284],simde_mm_xor_si128(c2[3485],simde_mm_xor_si128(c2[3785],simde_mm_xor_si128(c2[189],simde_mm_xor_si128(c2[1690],simde_mm_xor_si128(c2[4692],simde_mm_xor_si128(c2[1394],simde_mm_xor_si128(c2[2314],simde_mm_xor_si128(c2[3815],simde_mm_xor_si128(c2[2310],simde_mm_xor_si128(c2[3811],simde_mm_xor_si128(c2[2943],simde_mm_xor_si128(c2[4444],simde_mm_xor_si128(c2[3552],simde_mm_xor_si128(c2[254],simde_mm_xor_si128(c2[3545],simde_mm_xor_si128(c2[247],simde_mm_xor_si128(c2[4454],simde_mm_xor_si128(c2[280],simde_mm_xor_si128(c2[1481],simde_mm_xor_si128(c2[1781],simde_mm_xor_si128(c2[2383],simde_mm_xor_si128(c2[3884],simde_mm_xor_si128(c2[3577],c2[279])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 10
-     d2[150]=_mm_xor_si128(c2[1802],_mm_xor_si128(c2[2743],_mm_xor_si128(c2[193],c2[2624])));
+     d2[150]=simde_mm_xor_si128(c2[1802],simde_mm_xor_si128(c2[2743],simde_mm_xor_si128(c2[193],c2[2624])));
 
 //row: 11
-     d2[165]=_mm_xor_si128(c2[1512],_mm_xor_si128(c2[612],_mm_xor_si128(c2[4207],_mm_xor_si128(c2[611],_mm_xor_si128(c2[2141],_mm_xor_si128(c2[935],_mm_xor_si128(c2[633],_mm_xor_si128(c2[933],_mm_xor_si128(c2[3367],_mm_xor_si128(c2[4273],_mm_xor_si128(c2[4573],_mm_xor_si128(c2[2794],_mm_xor_si128(c2[704],_mm_xor_si128(c2[2193],_mm_xor_si128(c2[3434],_mm_xor_si128(c2[2826],_mm_xor_si128(c2[729],_mm_xor_si128(c2[1029],_mm_xor_si128(c2[4654],_mm_xor_si128(c2[460],_mm_xor_si128(c2[760],_mm_xor_si128(c2[3484],_mm_xor_si128(c2[1389],_mm_xor_si128(c2[793],_mm_xor_si128(c2[1093],_mm_xor_si128(c2[3514],_mm_xor_si128(c2[3210],_mm_xor_si128(c2[3510],_mm_xor_si128(c2[2023],_mm_xor_si128(c2[4143],_mm_xor_si128(c2[4752],_mm_xor_si128(c2[4445],_mm_xor_si128(c2[4745],_mm_xor_si128(c2[1480],_mm_xor_si128(c2[3583],_mm_xor_si128(c2[4477],_mm_xor_si128(c2[4777],c2[2680])))))))))))))))))))))))))))))))))))));
+     d2[165]=simde_mm_xor_si128(c2[1512],simde_mm_xor_si128(c2[612],simde_mm_xor_si128(c2[4207],simde_mm_xor_si128(c2[611],simde_mm_xor_si128(c2[2141],simde_mm_xor_si128(c2[935],simde_mm_xor_si128(c2[633],simde_mm_xor_si128(c2[933],simde_mm_xor_si128(c2[3367],simde_mm_xor_si128(c2[4273],simde_mm_xor_si128(c2[4573],simde_mm_xor_si128(c2[2794],simde_mm_xor_si128(c2[704],simde_mm_xor_si128(c2[2193],simde_mm_xor_si128(c2[3434],simde_mm_xor_si128(c2[2826],simde_mm_xor_si128(c2[729],simde_mm_xor_si128(c2[1029],simde_mm_xor_si128(c2[4654],simde_mm_xor_si128(c2[460],simde_mm_xor_si128(c2[760],simde_mm_xor_si128(c2[3484],simde_mm_xor_si128(c2[1389],simde_mm_xor_si128(c2[793],simde_mm_xor_si128(c2[1093],simde_mm_xor_si128(c2[3514],simde_mm_xor_si128(c2[3210],simde_mm_xor_si128(c2[3510],simde_mm_xor_si128(c2[2023],simde_mm_xor_si128(c2[4143],simde_mm_xor_si128(c2[4752],simde_mm_xor_si128(c2[4445],simde_mm_xor_si128(c2[4745],simde_mm_xor_si128(c2[1480],simde_mm_xor_si128(c2[3583],simde_mm_xor_si128(c2[4477],simde_mm_xor_si128(c2[4777],c2[2680])))))))))))))))))))))))))))))))))))));
 
 //row: 12
-     d2[180]=_mm_xor_si128(c2[2713],_mm_xor_si128(c2[3013],_mm_xor_si128(c2[2113],_mm_xor_si128(c2[909],_mm_xor_si128(c2[3342],_mm_xor_si128(c2[3642],_mm_xor_si128(c2[2436],_mm_xor_si128(c2[2434],_mm_xor_si128(c2[3637],_mm_xor_si128(c2[4568],_mm_xor_si128(c2[69],_mm_xor_si128(c2[1260],_mm_xor_si128(c2[3995],_mm_xor_si128(c2[4295],_mm_xor_si128(c2[2190],_mm_xor_si128(c2[3694],_mm_xor_si128(c2[3097],_mm_xor_si128(c2[121],_mm_xor_si128(c2[4327],_mm_xor_si128(c2[2530],_mm_xor_si128(c2[1356],_mm_xor_si128(c2[2261],_mm_xor_si128(c2[4685],_mm_xor_si128(c2[186],_mm_xor_si128(c2[2890],_mm_xor_si128(c2[2594],_mm_xor_si128(c2[216],_mm_xor_si128(c2[212],_mm_xor_si128(c2[845],_mm_xor_si128(c2[1454],_mm_xor_si128(c2[1447],_mm_xor_si128(c2[2681],_mm_xor_si128(c2[2981],_mm_xor_si128(c2[270],c2[1479]))))))))))))))))))))))))))))))))));
+     d2[180]=simde_mm_xor_si128(c2[2713],simde_mm_xor_si128(c2[3013],simde_mm_xor_si128(c2[2113],simde_mm_xor_si128(c2[909],simde_mm_xor_si128(c2[3342],simde_mm_xor_si128(c2[3642],simde_mm_xor_si128(c2[2436],simde_mm_xor_si128(c2[2434],simde_mm_xor_si128(c2[3637],simde_mm_xor_si128(c2[4568],simde_mm_xor_si128(c2[69],simde_mm_xor_si128(c2[1260],simde_mm_xor_si128(c2[3995],simde_mm_xor_si128(c2[4295],simde_mm_xor_si128(c2[2190],simde_mm_xor_si128(c2[3694],simde_mm_xor_si128(c2[3097],simde_mm_xor_si128(c2[121],simde_mm_xor_si128(c2[4327],simde_mm_xor_si128(c2[2530],simde_mm_xor_si128(c2[1356],simde_mm_xor_si128(c2[2261],simde_mm_xor_si128(c2[4685],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[2890],simde_mm_xor_si128(c2[2594],simde_mm_xor_si128(c2[216],simde_mm_xor_si128(c2[212],simde_mm_xor_si128(c2[845],simde_mm_xor_si128(c2[1454],simde_mm_xor_si128(c2[1447],simde_mm_xor_si128(c2[2681],simde_mm_xor_si128(c2[2981],simde_mm_xor_si128(c2[270],c2[1479]))))))))))))))))))))))))))))))))));
 
 //row: 13
-     d2[195]=_mm_xor_si128(c2[3907],_mm_xor_si128(c2[3007],_mm_xor_si128(c2[1803],_mm_xor_si128(c2[3],_mm_xor_si128(c2[4536],_mm_xor_si128(c2[3330],_mm_xor_si128(c2[3043],_mm_xor_si128(c2[3343],_mm_xor_si128(c2[2733],_mm_xor_si128(c2[963],_mm_xor_si128(c2[1869],_mm_xor_si128(c2[2169],_mm_xor_si128(c2[390],_mm_xor_si128(c2[3099],_mm_xor_si128(c2[4603],_mm_xor_si128(c2[1030],_mm_xor_si128(c2[422],_mm_xor_si128(c2[3124],_mm_xor_si128(c2[3424],_mm_xor_si128(c2[2250],_mm_xor_si128(c2[2855],_mm_xor_si128(c2[3155],_mm_xor_si128(c2[1080],_mm_xor_si128(c2[3784],_mm_xor_si128(c2[3188],_mm_xor_si128(c2[3488],_mm_xor_si128(c2[1110],_mm_xor_si128(c2[821],_mm_xor_si128(c2[1121],_mm_xor_si128(c2[1754],_mm_xor_si128(c2[2348],_mm_xor_si128(c2[2041],_mm_xor_si128(c2[2341],_mm_xor_si128(c2[2350],_mm_xor_si128(c2[3875],_mm_xor_si128(c2[1179],_mm_xor_si128(c2[2073],c2[2373])))))))))))))))))))))))))))))))))))));
+     d2[195]=simde_mm_xor_si128(c2[3907],simde_mm_xor_si128(c2[3007],simde_mm_xor_si128(c2[1803],simde_mm_xor_si128(c2[3],simde_mm_xor_si128(c2[4536],simde_mm_xor_si128(c2[3330],simde_mm_xor_si128(c2[3043],simde_mm_xor_si128(c2[3343],simde_mm_xor_si128(c2[2733],simde_mm_xor_si128(c2[963],simde_mm_xor_si128(c2[1869],simde_mm_xor_si128(c2[2169],simde_mm_xor_si128(c2[390],simde_mm_xor_si128(c2[3099],simde_mm_xor_si128(c2[4603],simde_mm_xor_si128(c2[1030],simde_mm_xor_si128(c2[422],simde_mm_xor_si128(c2[3124],simde_mm_xor_si128(c2[3424],simde_mm_xor_si128(c2[2250],simde_mm_xor_si128(c2[2855],simde_mm_xor_si128(c2[3155],simde_mm_xor_si128(c2[1080],simde_mm_xor_si128(c2[3784],simde_mm_xor_si128(c2[3188],simde_mm_xor_si128(c2[3488],simde_mm_xor_si128(c2[1110],simde_mm_xor_si128(c2[821],simde_mm_xor_si128(c2[1121],simde_mm_xor_si128(c2[1754],simde_mm_xor_si128(c2[2348],simde_mm_xor_si128(c2[2041],simde_mm_xor_si128(c2[2341],simde_mm_xor_si128(c2[2350],simde_mm_xor_si128(c2[3875],simde_mm_xor_si128(c2[1179],simde_mm_xor_si128(c2[2073],c2[2373])))))))))))))))))))))))))))))))))))));
 
 //row: 14
-     d2[210]=_mm_xor_si128(c2[3906],_mm_xor_si128(c2[4206],_mm_xor_si128(c2[1204],_mm_xor_si128(c2[3306],_mm_xor_si128(c2[304],_mm_xor_si128(c2[2102],_mm_xor_si128(c2[3914],_mm_xor_si128(c2[4535],_mm_xor_si128(c2[36],_mm_xor_si128(c2[1833],_mm_xor_si128(c2[3644],_mm_xor_si128(c2[642],_mm_xor_si128(c2[3642],_mm_xor_si128(c2[340],_mm_xor_si128(c2[640],_mm_xor_si128(c2[635],_mm_xor_si128(c2[962],_mm_xor_si128(c2[1262],_mm_xor_si128(c2[3074],_mm_xor_si128(c2[2468],_mm_xor_si128(c2[3965],_mm_xor_si128(c2[4265],_mm_xor_si128(c2[404],_mm_xor_si128(c2[704],_mm_xor_si128(c2[2501],_mm_xor_si128(c2[3398],_mm_xor_si128(c2[396],_mm_xor_si128(c2[103],_mm_xor_si128(c2[1900],_mm_xor_si128(c2[1329],_mm_xor_si128(c2[3126],_mm_xor_si128(c2[721],_mm_xor_si128(c2[2533],_mm_xor_si128(c2[3723],_mm_xor_si128(c2[421],_mm_xor_si128(c2[721],_mm_xor_si128(c2[2564],_mm_xor_si128(c2[4361],_mm_xor_si128(c2[3454],_mm_xor_si128(c2[152],_mm_xor_si128(c2[452],_mm_xor_si128(c2[1094],_mm_xor_si128(c2[1394],_mm_xor_si128(c2[3191],_mm_xor_si128(c2[4083],_mm_xor_si128(c2[1081],_mm_xor_si128(c2[3787],_mm_xor_si128(c2[485],_mm_xor_si128(c2[785],_mm_xor_si128(c2[2594],_mm_xor_si128(c2[1424],_mm_xor_si128(c2[3221],_mm_xor_si128(c2[1420],_mm_xor_si128(c2[2917],_mm_xor_si128(c2[3217],_mm_xor_si128(c2[2053],_mm_xor_si128(c2[3850],_mm_xor_si128(c2[2647],_mm_xor_si128(c2[4444],_mm_xor_si128(c2[2640],_mm_xor_si128(c2[4152],_mm_xor_si128(c2[4452],_mm_xor_si128(c2[3874],_mm_xor_si128(c2[4174],_mm_xor_si128(c2[1172],_mm_xor_si128(c2[1478],_mm_xor_si128(c2[3275],_mm_xor_si128(c2[2672],_mm_xor_si128(c2[4184],c2[4484])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[210]=simde_mm_xor_si128(c2[3906],simde_mm_xor_si128(c2[4206],simde_mm_xor_si128(c2[1204],simde_mm_xor_si128(c2[3306],simde_mm_xor_si128(c2[304],simde_mm_xor_si128(c2[2102],simde_mm_xor_si128(c2[3914],simde_mm_xor_si128(c2[4535],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[1833],simde_mm_xor_si128(c2[3644],simde_mm_xor_si128(c2[642],simde_mm_xor_si128(c2[3642],simde_mm_xor_si128(c2[340],simde_mm_xor_si128(c2[640],simde_mm_xor_si128(c2[635],simde_mm_xor_si128(c2[962],simde_mm_xor_si128(c2[1262],simde_mm_xor_si128(c2[3074],simde_mm_xor_si128(c2[2468],simde_mm_xor_si128(c2[3965],simde_mm_xor_si128(c2[4265],simde_mm_xor_si128(c2[404],simde_mm_xor_si128(c2[704],simde_mm_xor_si128(c2[2501],simde_mm_xor_si128(c2[3398],simde_mm_xor_si128(c2[396],simde_mm_xor_si128(c2[103],simde_mm_xor_si128(c2[1900],simde_mm_xor_si128(c2[1329],simde_mm_xor_si128(c2[3126],simde_mm_xor_si128(c2[721],simde_mm_xor_si128(c2[2533],simde_mm_xor_si128(c2[3723],simde_mm_xor_si128(c2[421],simde_mm_xor_si128(c2[721],simde_mm_xor_si128(c2[2564],simde_mm_xor_si128(c2[4361],simde_mm_xor_si128(c2[3454],simde_mm_xor_si128(c2[152],simde_mm_xor_si128(c2[452],simde_mm_xor_si128(c2[1094],simde_mm_xor_si128(c2[1394],simde_mm_xor_si128(c2[3191],simde_mm_xor_si128(c2[4083],simde_mm_xor_si128(c2[1081],simde_mm_xor_si128(c2[3787],simde_mm_xor_si128(c2[485],simde_mm_xor_si128(c2[785],simde_mm_xor_si128(c2[2594],simde_mm_xor_si128(c2[1424],simde_mm_xor_si128(c2[3221],simde_mm_xor_si128(c2[1420],simde_mm_xor_si128(c2[2917],simde_mm_xor_si128(c2[3217],simde_mm_xor_si128(c2[2053],simde_mm_xor_si128(c2[3850],simde_mm_xor_si128(c2[2647],simde_mm_xor_si128(c2[4444],simde_mm_xor_si128(c2[2640],simde_mm_xor_si128(c2[4152],simde_mm_xor_si128(c2[4452],simde_mm_xor_si128(c2[3874],simde_mm_xor_si128(c2[4174],simde_mm_xor_si128(c2[1172],simde_mm_xor_si128(c2[1478],simde_mm_xor_si128(c2[3275],simde_mm_xor_si128(c2[2672],simde_mm_xor_si128(c2[4184],c2[4484])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 15
-     d2[225]=_mm_xor_si128(c2[3007],_mm_xor_si128(c2[4203],_mm_xor_si128(c2[4503],_mm_xor_si128(c2[2107],_mm_xor_si128(c2[3603],_mm_xor_si128(c2[903],_mm_xor_si128(c2[2414],_mm_xor_si128(c2[1802],_mm_xor_si128(c2[3636],_mm_xor_si128(c2[33],_mm_xor_si128(c2[333],_mm_xor_si128(c2[2430],_mm_xor_si128(c2[3941],_mm_xor_si128(c2[2443],_mm_xor_si128(c2[3939],_mm_xor_si128(c2[63],_mm_xor_si128(c2[1274],_mm_xor_si128(c2[1574],_mm_xor_si128(c2[1269],_mm_xor_si128(c2[2765],_mm_xor_si128(c2[4304],_mm_xor_si128(c2[701],_mm_xor_si128(c2[1001],_mm_xor_si128(c2[2199],_mm_xor_si128(c2[3695],_mm_xor_si128(c2[3703],_mm_xor_si128(c2[400],_mm_xor_si128(c2[130],_mm_xor_si128(c2[1626],_mm_xor_si128(c2[4321],_mm_xor_si128(c2[1033],_mm_xor_si128(c2[2524],_mm_xor_si128(c2[4020],_mm_xor_si128(c2[1350],_mm_xor_si128(c2[2861],_mm_xor_si128(c2[2255],_mm_xor_si128(c2[3751],_mm_xor_si128(c2[180],_mm_xor_si128(c2[1391],_mm_xor_si128(c2[1691],_mm_xor_si128(c2[2884],_mm_xor_si128(c2[4380],_mm_xor_si128(c2[2588],_mm_xor_si128(c2[4084],_mm_xor_si128(c2[210],_mm_xor_si128(c2[1721],_mm_xor_si128(c2[221],_mm_xor_si128(c2[1717],_mm_xor_si128(c2[854],_mm_xor_si128(c2[2350],_mm_xor_si128(c2[1448],_mm_xor_si128(c2[2944],_mm_xor_si128(c2[1441],_mm_xor_si128(c2[2952],_mm_xor_si128(c2[2975],_mm_xor_si128(c2[4171],_mm_xor_si128(c2[4471],_mm_xor_si128(c2[279],_mm_xor_si128(c2[1775],_mm_xor_si128(c2[1473],c2[2984]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[225]=simde_mm_xor_si128(c2[3007],simde_mm_xor_si128(c2[4203],simde_mm_xor_si128(c2[4503],simde_mm_xor_si128(c2[2107],simde_mm_xor_si128(c2[3603],simde_mm_xor_si128(c2[903],simde_mm_xor_si128(c2[2414],simde_mm_xor_si128(c2[1802],simde_mm_xor_si128(c2[3636],simde_mm_xor_si128(c2[33],simde_mm_xor_si128(c2[333],simde_mm_xor_si128(c2[2430],simde_mm_xor_si128(c2[3941],simde_mm_xor_si128(c2[2443],simde_mm_xor_si128(c2[3939],simde_mm_xor_si128(c2[63],simde_mm_xor_si128(c2[1274],simde_mm_xor_si128(c2[1574],simde_mm_xor_si128(c2[1269],simde_mm_xor_si128(c2[2765],simde_mm_xor_si128(c2[4304],simde_mm_xor_si128(c2[701],simde_mm_xor_si128(c2[1001],simde_mm_xor_si128(c2[2199],simde_mm_xor_si128(c2[3695],simde_mm_xor_si128(c2[3703],simde_mm_xor_si128(c2[400],simde_mm_xor_si128(c2[130],simde_mm_xor_si128(c2[1626],simde_mm_xor_si128(c2[4321],simde_mm_xor_si128(c2[1033],simde_mm_xor_si128(c2[2524],simde_mm_xor_si128(c2[4020],simde_mm_xor_si128(c2[1350],simde_mm_xor_si128(c2[2861],simde_mm_xor_si128(c2[2255],simde_mm_xor_si128(c2[3751],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[1391],simde_mm_xor_si128(c2[1691],simde_mm_xor_si128(c2[2884],simde_mm_xor_si128(c2[4380],simde_mm_xor_si128(c2[2588],simde_mm_xor_si128(c2[4084],simde_mm_xor_si128(c2[210],simde_mm_xor_si128(c2[1721],simde_mm_xor_si128(c2[221],simde_mm_xor_si128(c2[1717],simde_mm_xor_si128(c2[854],simde_mm_xor_si128(c2[2350],simde_mm_xor_si128(c2[1448],simde_mm_xor_si128(c2[2944],simde_mm_xor_si128(c2[1441],simde_mm_xor_si128(c2[2952],simde_mm_xor_si128(c2[2975],simde_mm_xor_si128(c2[4171],simde_mm_xor_si128(c2[4471],simde_mm_xor_si128(c2[279],simde_mm_xor_si128(c2[1775],simde_mm_xor_si128(c2[1473],c2[2984]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 16
-     d2[240]=_mm_xor_si128(c2[5],_mm_xor_si128(c2[305],_mm_xor_si128(c2[3306],_mm_xor_si128(c2[3606],_mm_xor_si128(c2[4204],_mm_xor_si128(c2[2406],_mm_xor_si128(c2[2706],_mm_xor_si128(c2[3000],_mm_xor_si128(c2[1502],_mm_xor_si128(c2[634],_mm_xor_si128(c2[934],_mm_xor_si128(c2[3935],_mm_xor_si128(c2[4235],_mm_xor_si128(c2[4542],_mm_xor_si128(c2[3044],_mm_xor_si128(c2[4540],_mm_xor_si128(c2[3042],_mm_xor_si128(c2[3040],_mm_xor_si128(c2[1860],_mm_xor_si128(c2[2160],_mm_xor_si128(c2[362],_mm_xor_si128(c2[662],_mm_xor_si128(c2[3366],_mm_xor_si128(c2[1868],_mm_xor_si128(c2[1302],_mm_xor_si128(c2[1602],_mm_xor_si128(c2[4603],_mm_xor_si128(c2[104],_mm_xor_si128(c2[4296],_mm_xor_si128(c2[2498],_mm_xor_si128(c2[2798],_mm_xor_si128(c2[1001],_mm_xor_si128(c2[4302],_mm_xor_si128(c2[2227],_mm_xor_si128(c2[429],_mm_xor_si128(c2[729],_mm_xor_si128(c2[1634],_mm_xor_si128(c2[121],_mm_xor_si128(c2[4621],_mm_xor_si128(c2[3123],_mm_xor_si128(c2[3462],_mm_xor_si128(c2[1664],_mm_xor_si128(c2[1964],_mm_xor_si128(c2[4352],_mm_xor_si128(c2[2854],_mm_xor_si128(c2[1992],_mm_xor_si128(c2[2292],_mm_xor_si128(c2[494],_mm_xor_si128(c2[794],_mm_xor_si128(c2[182],_mm_xor_si128(c2[3183],_mm_xor_si128(c2[3483],_mm_xor_si128(c2[4685],_mm_xor_si128(c2[3187],_mm_xor_si128(c2[2322],_mm_xor_si128(c2[524],_mm_xor_si128(c2[824],_mm_xor_si128(c2[2318],_mm_xor_si128(c2[820],_mm_xor_si128(c2[2951],_mm_xor_si128(c2[1153],_mm_xor_si128(c2[1453],_mm_xor_si128(c2[3545],_mm_xor_si128(c2[2047],_mm_xor_si128(c2[3553],_mm_xor_si128(c2[2040],_mm_xor_si128(c2[4772],_mm_xor_si128(c2[273],_mm_xor_si128(c2[3274],_mm_xor_si128(c2[3574],_mm_xor_si128(c2[2376],_mm_xor_si128(c2[578],_mm_xor_si128(c2[878],_mm_xor_si128(c2[3570],_mm_xor_si128(c2[2072],c2[2371])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[240]=simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[305],simde_mm_xor_si128(c2[3306],simde_mm_xor_si128(c2[3606],simde_mm_xor_si128(c2[4204],simde_mm_xor_si128(c2[2406],simde_mm_xor_si128(c2[2706],simde_mm_xor_si128(c2[3000],simde_mm_xor_si128(c2[1502],simde_mm_xor_si128(c2[634],simde_mm_xor_si128(c2[934],simde_mm_xor_si128(c2[3935],simde_mm_xor_si128(c2[4235],simde_mm_xor_si128(c2[4542],simde_mm_xor_si128(c2[3044],simde_mm_xor_si128(c2[4540],simde_mm_xor_si128(c2[3042],simde_mm_xor_si128(c2[3040],simde_mm_xor_si128(c2[1860],simde_mm_xor_si128(c2[2160],simde_mm_xor_si128(c2[362],simde_mm_xor_si128(c2[662],simde_mm_xor_si128(c2[3366],simde_mm_xor_si128(c2[1868],simde_mm_xor_si128(c2[1302],simde_mm_xor_si128(c2[1602],simde_mm_xor_si128(c2[4603],simde_mm_xor_si128(c2[104],simde_mm_xor_si128(c2[4296],simde_mm_xor_si128(c2[2498],simde_mm_xor_si128(c2[2798],simde_mm_xor_si128(c2[1001],simde_mm_xor_si128(c2[4302],simde_mm_xor_si128(c2[2227],simde_mm_xor_si128(c2[429],simde_mm_xor_si128(c2[729],simde_mm_xor_si128(c2[1634],simde_mm_xor_si128(c2[121],simde_mm_xor_si128(c2[4621],simde_mm_xor_si128(c2[3123],simde_mm_xor_si128(c2[3462],simde_mm_xor_si128(c2[1664],simde_mm_xor_si128(c2[1964],simde_mm_xor_si128(c2[4352],simde_mm_xor_si128(c2[2854],simde_mm_xor_si128(c2[1992],simde_mm_xor_si128(c2[2292],simde_mm_xor_si128(c2[494],simde_mm_xor_si128(c2[794],simde_mm_xor_si128(c2[182],simde_mm_xor_si128(c2[3183],simde_mm_xor_si128(c2[3483],simde_mm_xor_si128(c2[4685],simde_mm_xor_si128(c2[3187],simde_mm_xor_si128(c2[2322],simde_mm_xor_si128(c2[524],simde_mm_xor_si128(c2[824],simde_mm_xor_si128(c2[2318],simde_mm_xor_si128(c2[820],simde_mm_xor_si128(c2[2951],simde_mm_xor_si128(c2[1153],simde_mm_xor_si128(c2[1453],simde_mm_xor_si128(c2[3545],simde_mm_xor_si128(c2[2047],simde_mm_xor_si128(c2[3553],simde_mm_xor_si128(c2[2040],simde_mm_xor_si128(c2[4772],simde_mm_xor_si128(c2[273],simde_mm_xor_si128(c2[3274],simde_mm_xor_si128(c2[3574],simde_mm_xor_si128(c2[2376],simde_mm_xor_si128(c2[578],simde_mm_xor_si128(c2[878],simde_mm_xor_si128(c2[3570],simde_mm_xor_si128(c2[2072],c2[2371])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 17
-     d2[255]=_mm_xor_si128(c2[1206],_mm_xor_si128(c2[1506],_mm_xor_si128(c2[3307],_mm_xor_si128(c2[3607],_mm_xor_si128(c2[606],_mm_xor_si128(c2[2407],_mm_xor_si128(c2[2707],_mm_xor_si128(c2[4201],_mm_xor_si128(c2[1503],_mm_xor_si128(c2[1835],_mm_xor_si128(c2[2135],_mm_xor_si128(c2[3936],_mm_xor_si128(c2[4236],_mm_xor_si128(c2[944],_mm_xor_si128(c2[3030],_mm_xor_si128(c2[942],_mm_xor_si128(c2[3043],_mm_xor_si128(c2[1242],_mm_xor_si128(c2[3061],_mm_xor_si128(c2[3361],_mm_xor_si128(c2[363],_mm_xor_si128(c2[663],_mm_xor_si128(c2[4567],_mm_xor_si128(c2[1869],_mm_xor_si128(c2[2503],_mm_xor_si128(c2[2803],_mm_xor_si128(c2[4604],_mm_xor_si128(c2[90],_mm_xor_si128(c2[698],_mm_xor_si128(c2[2499],_mm_xor_si128(c2[2799],_mm_xor_si128(c2[2202],_mm_xor_si128(c2[4303],_mm_xor_si128(c2[3428],_mm_xor_si128(c2[430],_mm_xor_si128(c2[730],_mm_xor_si128(c2[2820],_mm_xor_si128(c2[122],_mm_xor_si128(c2[1023],_mm_xor_si128(c2[3124],_mm_xor_si128(c2[4663],_mm_xor_si128(c2[1650],_mm_xor_si128(c2[1950],_mm_xor_si128(c2[754],_mm_xor_si128(c2[2855],_mm_xor_si128(c2[4060],_mm_xor_si128(c2[3193],_mm_xor_si128(c2[3493],_mm_xor_si128(c2[480],_mm_xor_si128(c2[780],_mm_xor_si128(c2[1383],_mm_xor_si128(c2[3184],_mm_xor_si128(c2[3484],_mm_xor_si128(c2[1087],_mm_xor_si128(c2[3188],_mm_xor_si128(c2[3523],_mm_xor_si128(c2[510],_mm_xor_si128(c2[810],_mm_xor_si128(c2[3519],_mm_xor_si128(c2[821],_mm_xor_si128(c2[4152],_mm_xor_si128(c2[1154],_mm_xor_si128(c2[1454],_mm_xor_si128(c2[4746],_mm_xor_si128(c2[2048],_mm_xor_si128(c2[4754],_mm_xor_si128(c2[2041],_mm_xor_si128(c2[1174],_mm_xor_si128(c2[1474],_mm_xor_si128(c2[3275],_mm_xor_si128(c2[3575],_mm_xor_si128(c2[3577],_mm_xor_si128(c2[579],_mm_xor_si128(c2[879],_mm_xor_si128(c2[4771],c2[2073])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[255]=simde_mm_xor_si128(c2[1206],simde_mm_xor_si128(c2[1506],simde_mm_xor_si128(c2[3307],simde_mm_xor_si128(c2[3607],simde_mm_xor_si128(c2[606],simde_mm_xor_si128(c2[2407],simde_mm_xor_si128(c2[2707],simde_mm_xor_si128(c2[4201],simde_mm_xor_si128(c2[1503],simde_mm_xor_si128(c2[1835],simde_mm_xor_si128(c2[2135],simde_mm_xor_si128(c2[3936],simde_mm_xor_si128(c2[4236],simde_mm_xor_si128(c2[944],simde_mm_xor_si128(c2[3030],simde_mm_xor_si128(c2[942],simde_mm_xor_si128(c2[3043],simde_mm_xor_si128(c2[1242],simde_mm_xor_si128(c2[3061],simde_mm_xor_si128(c2[3361],simde_mm_xor_si128(c2[363],simde_mm_xor_si128(c2[663],simde_mm_xor_si128(c2[4567],simde_mm_xor_si128(c2[1869],simde_mm_xor_si128(c2[2503],simde_mm_xor_si128(c2[2803],simde_mm_xor_si128(c2[4604],simde_mm_xor_si128(c2[90],simde_mm_xor_si128(c2[698],simde_mm_xor_si128(c2[2499],simde_mm_xor_si128(c2[2799],simde_mm_xor_si128(c2[2202],simde_mm_xor_si128(c2[4303],simde_mm_xor_si128(c2[3428],simde_mm_xor_si128(c2[430],simde_mm_xor_si128(c2[730],simde_mm_xor_si128(c2[2820],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[1023],simde_mm_xor_si128(c2[3124],simde_mm_xor_si128(c2[4663],simde_mm_xor_si128(c2[1650],simde_mm_xor_si128(c2[1950],simde_mm_xor_si128(c2[754],simde_mm_xor_si128(c2[2855],simde_mm_xor_si128(c2[4060],simde_mm_xor_si128(c2[3193],simde_mm_xor_si128(c2[3493],simde_mm_xor_si128(c2[480],simde_mm_xor_si128(c2[780],simde_mm_xor_si128(c2[1383],simde_mm_xor_si128(c2[3184],simde_mm_xor_si128(c2[3484],simde_mm_xor_si128(c2[1087],simde_mm_xor_si128(c2[3188],simde_mm_xor_si128(c2[3523],simde_mm_xor_si128(c2[510],simde_mm_xor_si128(c2[810],simde_mm_xor_si128(c2[3519],simde_mm_xor_si128(c2[821],simde_mm_xor_si128(c2[4152],simde_mm_xor_si128(c2[1154],simde_mm_xor_si128(c2[1454],simde_mm_xor_si128(c2[4746],simde_mm_xor_si128(c2[2048],simde_mm_xor_si128(c2[4754],simde_mm_xor_si128(c2[2041],simde_mm_xor_si128(c2[1174],simde_mm_xor_si128(c2[1474],simde_mm_xor_si128(c2[3275],simde_mm_xor_si128(c2[3575],simde_mm_xor_si128(c2[3577],simde_mm_xor_si128(c2[579],simde_mm_xor_si128(c2[879],simde_mm_xor_si128(c2[4771],c2[2073])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 18
-     d2[270]=_mm_xor_si128(c2[8],_mm_xor_si128(c2[1093],c2[823]));
+     d2[270]=simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[1093],c2[823]));
 
 //row: 19
-     d2[285]=_mm_xor_si128(c2[2714],_mm_xor_si128(c2[1814],_mm_xor_si128(c2[610],_mm_xor_si128(c2[2102],_mm_xor_si128(c2[3343],_mm_xor_si128(c2[2137],_mm_xor_si128(c2[2135],_mm_xor_si128(c2[1235],_mm_xor_si128(c2[4569],_mm_xor_si128(c2[961],_mm_xor_si128(c2[3996],_mm_xor_si128(c2[1891],_mm_xor_si128(c2[3395],_mm_xor_si128(c2[4621],_mm_xor_si128(c2[4028],_mm_xor_si128(c2[2231],_mm_xor_si128(c2[1057],_mm_xor_si128(c2[1962],_mm_xor_si128(c2[4686],_mm_xor_si128(c2[2591],_mm_xor_si128(c2[2280],_mm_xor_si128(c2[4716],_mm_xor_si128(c2[4712],_mm_xor_si128(c2[546],_mm_xor_si128(c2[1140],_mm_xor_si128(c2[1148],_mm_xor_si128(c2[2682],_mm_xor_si128(c2[4770],c2[1180]))))))))))))))))))))))))))));
+     d2[285]=simde_mm_xor_si128(c2[2714],simde_mm_xor_si128(c2[1814],simde_mm_xor_si128(c2[610],simde_mm_xor_si128(c2[2102],simde_mm_xor_si128(c2[3343],simde_mm_xor_si128(c2[2137],simde_mm_xor_si128(c2[2135],simde_mm_xor_si128(c2[1235],simde_mm_xor_si128(c2[4569],simde_mm_xor_si128(c2[961],simde_mm_xor_si128(c2[3996],simde_mm_xor_si128(c2[1891],simde_mm_xor_si128(c2[3395],simde_mm_xor_si128(c2[4621],simde_mm_xor_si128(c2[4028],simde_mm_xor_si128(c2[2231],simde_mm_xor_si128(c2[1057],simde_mm_xor_si128(c2[1962],simde_mm_xor_si128(c2[4686],simde_mm_xor_si128(c2[2591],simde_mm_xor_si128(c2[2280],simde_mm_xor_si128(c2[4716],simde_mm_xor_si128(c2[4712],simde_mm_xor_si128(c2[546],simde_mm_xor_si128(c2[1140],simde_mm_xor_si128(c2[1148],simde_mm_xor_si128(c2[2682],simde_mm_xor_si128(c2[4770],c2[1180]))))))))))))))))))))))))))));
 
 //row: 20
-     d2[300]=_mm_xor_si128(c2[2109],_mm_xor_si128(c2[2409],_mm_xor_si128(c2[1509],_mm_xor_si128(c2[305],_mm_xor_si128(c2[2738],_mm_xor_si128(c2[3038],_mm_xor_si128(c2[1832],_mm_xor_si128(c2[1830],_mm_xor_si128(c2[1537],_mm_xor_si128(c2[3964],_mm_xor_si128(c2[4264],_mm_xor_si128(c2[671],_mm_xor_si128(c2[3391],_mm_xor_si128(c2[3691],_mm_xor_si128(c2[1601],_mm_xor_si128(c2[3090],_mm_xor_si128(c2[4331],_mm_xor_si128(c2[3723],_mm_xor_si128(c2[1926],_mm_xor_si128(c2[1034],_mm_xor_si128(c2[752],_mm_xor_si128(c2[1657],_mm_xor_si128(c2[4081],_mm_xor_si128(c2[4381],_mm_xor_si128(c2[2286],_mm_xor_si128(c2[1990],_mm_xor_si128(c2[4411],_mm_xor_si128(c2[4422],_mm_xor_si128(c2[241],_mm_xor_si128(c2[850],_mm_xor_si128(c2[843],_mm_xor_si128(c2[2077],_mm_xor_si128(c2[2377],_mm_xor_si128(c2[4480],c2[875]))))))))))))))))))))))))))))))))));
+     d2[300]=simde_mm_xor_si128(c2[2109],simde_mm_xor_si128(c2[2409],simde_mm_xor_si128(c2[1509],simde_mm_xor_si128(c2[305],simde_mm_xor_si128(c2[2738],simde_mm_xor_si128(c2[3038],simde_mm_xor_si128(c2[1832],simde_mm_xor_si128(c2[1830],simde_mm_xor_si128(c2[1537],simde_mm_xor_si128(c2[3964],simde_mm_xor_si128(c2[4264],simde_mm_xor_si128(c2[671],simde_mm_xor_si128(c2[3391],simde_mm_xor_si128(c2[3691],simde_mm_xor_si128(c2[1601],simde_mm_xor_si128(c2[3090],simde_mm_xor_si128(c2[4331],simde_mm_xor_si128(c2[3723],simde_mm_xor_si128(c2[1926],simde_mm_xor_si128(c2[1034],simde_mm_xor_si128(c2[752],simde_mm_xor_si128(c2[1657],simde_mm_xor_si128(c2[4081],simde_mm_xor_si128(c2[4381],simde_mm_xor_si128(c2[2286],simde_mm_xor_si128(c2[1990],simde_mm_xor_si128(c2[4411],simde_mm_xor_si128(c2[4422],simde_mm_xor_si128(c2[241],simde_mm_xor_si128(c2[850],simde_mm_xor_si128(c2[843],simde_mm_xor_si128(c2[2077],simde_mm_xor_si128(c2[2377],simde_mm_xor_si128(c2[4480],c2[875]))))))))))))))))))))))))))))))))));
 
 //row: 21
-     d2[315]=_mm_xor_si128(c2[3909],_mm_xor_si128(c2[3009],_mm_xor_si128(c2[1805],_mm_xor_si128(c2[4214],_mm_xor_si128(c2[4538],_mm_xor_si128(c2[3332],_mm_xor_si128(c2[3030],_mm_xor_si128(c2[3330],_mm_xor_si128(c2[965],_mm_xor_si128(c2[1871],_mm_xor_si128(c2[2171],_mm_xor_si128(c2[392],_mm_xor_si128(c2[3101],_mm_xor_si128(c2[4590],_mm_xor_si128(c2[1032],_mm_xor_si128(c2[424],_mm_xor_si128(c2[3126],_mm_xor_si128(c2[3426],_mm_xor_si128(c2[2252],_mm_xor_si128(c2[2857],_mm_xor_si128(c2[3157],_mm_xor_si128(c2[1082],_mm_xor_si128(c2[3786],_mm_xor_si128(c2[3190],_mm_xor_si128(c2[3490],_mm_xor_si128(c2[1112],_mm_xor_si128(c2[823],_mm_xor_si128(c2[1123],_mm_xor_si128(c2[1741],_mm_xor_si128(c2[2350],_mm_xor_si128(c2[2043],_mm_xor_si128(c2[2343],_mm_xor_si128(c2[4140],_mm_xor_si128(c2[3877],_mm_xor_si128(c2[1181],_mm_xor_si128(c2[2075],c2[2375]))))))))))))))))))))))))))))))))))));
+     d2[315]=simde_mm_xor_si128(c2[3909],simde_mm_xor_si128(c2[3009],simde_mm_xor_si128(c2[1805],simde_mm_xor_si128(c2[4214],simde_mm_xor_si128(c2[4538],simde_mm_xor_si128(c2[3332],simde_mm_xor_si128(c2[3030],simde_mm_xor_si128(c2[3330],simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[1871],simde_mm_xor_si128(c2[2171],simde_mm_xor_si128(c2[392],simde_mm_xor_si128(c2[3101],simde_mm_xor_si128(c2[4590],simde_mm_xor_si128(c2[1032],simde_mm_xor_si128(c2[424],simde_mm_xor_si128(c2[3126],simde_mm_xor_si128(c2[3426],simde_mm_xor_si128(c2[2252],simde_mm_xor_si128(c2[2857],simde_mm_xor_si128(c2[3157],simde_mm_xor_si128(c2[1082],simde_mm_xor_si128(c2[3786],simde_mm_xor_si128(c2[3190],simde_mm_xor_si128(c2[3490],simde_mm_xor_si128(c2[1112],simde_mm_xor_si128(c2[823],simde_mm_xor_si128(c2[1123],simde_mm_xor_si128(c2[1741],simde_mm_xor_si128(c2[2350],simde_mm_xor_si128(c2[2043],simde_mm_xor_si128(c2[2343],simde_mm_xor_si128(c2[4140],simde_mm_xor_si128(c2[3877],simde_mm_xor_si128(c2[1181],simde_mm_xor_si128(c2[2075],c2[2375]))))))))))))))))))))))))))))))))))));
 
 //row: 22
-     d2[330]=_mm_xor_si128(c2[942],c2[3662]);
+     d2[330]=simde_mm_xor_si128(c2[942],c2[3662]);
 
 //row: 23
-     d2[345]=_mm_xor_si128(c2[1500],_mm_xor_si128(c2[4295],c2[4656]));
+     d2[345]=simde_mm_xor_si128(c2[1500],simde_mm_xor_si128(c2[4295],c2[4656]));
 
 //row: 24
-     d2[360]=_mm_xor_si128(c2[335],_mm_xor_si128(c2[961],c2[878]));
+     d2[360]=simde_mm_xor_si128(c2[335],simde_mm_xor_si128(c2[961],c2[878]));
 
 //row: 25
-     d2[375]=_mm_xor_si128(c2[604],c2[4655]);
+     d2[375]=simde_mm_xor_si128(c2[604],c2[4655]);
 
 //row: 26
-     d2[390]=_mm_xor_si128(c2[4505],_mm_xor_si128(c2[6],_mm_xor_si128(c2[2414],_mm_xor_si128(c2[3605],_mm_xor_si128(c2[3905],_mm_xor_si128(c2[1514],_mm_xor_si128(c2[2701],_mm_xor_si128(c2[310],_mm_xor_si128(c2[335],_mm_xor_si128(c2[635],_mm_xor_si128(c2[3043],_mm_xor_si128(c2[4243],_mm_xor_si128(c2[1837],_mm_xor_si128(c2[4241],_mm_xor_si128(c2[1535],_mm_xor_si128(c2[1835],_mm_xor_si128(c2[1561],_mm_xor_si128(c2[1861],_mm_xor_si128(c2[4269],_mm_xor_si128(c2[3067],_mm_xor_si128(c2[361],_mm_xor_si128(c2[661],_mm_xor_si128(c2[669],_mm_xor_si128(c2[1003],_mm_xor_si128(c2[1303],_mm_xor_si128(c2[3696],_mm_xor_si128(c2[3697],_mm_xor_si128(c2[3997],_mm_xor_si128(c2[1591],_mm_xor_si128(c2[702],_mm_xor_si128(c2[3095],_mm_xor_si128(c2[1628],_mm_xor_si128(c2[1928],_mm_xor_si128(c2[4321],_mm_xor_si128(c2[1320],_mm_xor_si128(c2[3728],_mm_xor_si128(c2[4322],_mm_xor_si128(c2[1631],_mm_xor_si128(c2[1931],_mm_xor_si128(c2[2863],_mm_xor_si128(c2[3163],_mm_xor_si128(c2[757],_mm_xor_si128(c2[4053],_mm_xor_si128(c2[1362],_mm_xor_si128(c2[1662],_mm_xor_si128(c2[1693],_mm_xor_si128(c2[1993],_mm_xor_si128(c2[4386],_mm_xor_si128(c2[4382],_mm_xor_si128(c2[4682],_mm_xor_si128(c2[2291],_mm_xor_si128(c2[4386],_mm_xor_si128(c2[1680],_mm_xor_si128(c2[1980],_mm_xor_si128(c2[1723],_mm_xor_si128(c2[2023],_mm_xor_si128(c2[4416],_mm_xor_si128(c2[2019],_mm_xor_si128(c2[4112],_mm_xor_si128(c2[4412],_mm_xor_si128(c2[814],_mm_xor_si128(c2[2352],_mm_xor_si128(c2[2652],_mm_xor_si128(c2[246],_mm_xor_si128(c2[3246],_mm_xor_si128(c2[840],_mm_xor_si128(c2[3254],_mm_xor_si128(c2[548],_mm_xor_si128(c2[848],_mm_xor_si128(c2[4473],_mm_xor_si128(c2[4773],_mm_xor_si128(c2[2382],_mm_xor_si128(c2[1777],_mm_xor_si128(c2[2077],_mm_xor_si128(c2[4470],_mm_xor_si128(c2[3271],_mm_xor_si128(c2[580],c2[880])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[390]=simde_mm_xor_si128(c2[4505],simde_mm_xor_si128(c2[6],simde_mm_xor_si128(c2[2414],simde_mm_xor_si128(c2[3605],simde_mm_xor_si128(c2[3905],simde_mm_xor_si128(c2[1514],simde_mm_xor_si128(c2[2701],simde_mm_xor_si128(c2[310],simde_mm_xor_si128(c2[335],simde_mm_xor_si128(c2[635],simde_mm_xor_si128(c2[3043],simde_mm_xor_si128(c2[4243],simde_mm_xor_si128(c2[1837],simde_mm_xor_si128(c2[4241],simde_mm_xor_si128(c2[1535],simde_mm_xor_si128(c2[1835],simde_mm_xor_si128(c2[1561],simde_mm_xor_si128(c2[1861],simde_mm_xor_si128(c2[4269],simde_mm_xor_si128(c2[3067],simde_mm_xor_si128(c2[361],simde_mm_xor_si128(c2[661],simde_mm_xor_si128(c2[669],simde_mm_xor_si128(c2[1003],simde_mm_xor_si128(c2[1303],simde_mm_xor_si128(c2[3696],simde_mm_xor_si128(c2[3697],simde_mm_xor_si128(c2[3997],simde_mm_xor_si128(c2[1591],simde_mm_xor_si128(c2[702],simde_mm_xor_si128(c2[3095],simde_mm_xor_si128(c2[1628],simde_mm_xor_si128(c2[1928],simde_mm_xor_si128(c2[4321],simde_mm_xor_si128(c2[1320],simde_mm_xor_si128(c2[3728],simde_mm_xor_si128(c2[4322],simde_mm_xor_si128(c2[1631],simde_mm_xor_si128(c2[1931],simde_mm_xor_si128(c2[2863],simde_mm_xor_si128(c2[3163],simde_mm_xor_si128(c2[757],simde_mm_xor_si128(c2[4053],simde_mm_xor_si128(c2[1362],simde_mm_xor_si128(c2[1662],simde_mm_xor_si128(c2[1693],simde_mm_xor_si128(c2[1993],simde_mm_xor_si128(c2[4386],simde_mm_xor_si128(c2[4382],simde_mm_xor_si128(c2[4682],simde_mm_xor_si128(c2[2291],simde_mm_xor_si128(c2[4386],simde_mm_xor_si128(c2[1680],simde_mm_xor_si128(c2[1980],simde_mm_xor_si128(c2[1723],simde_mm_xor_si128(c2[2023],simde_mm_xor_si128(c2[4416],simde_mm_xor_si128(c2[2019],simde_mm_xor_si128(c2[4112],simde_mm_xor_si128(c2[4412],simde_mm_xor_si128(c2[814],simde_mm_xor_si128(c2[2352],simde_mm_xor_si128(c2[2652],simde_mm_xor_si128(c2[246],simde_mm_xor_si128(c2[3246],simde_mm_xor_si128(c2[840],simde_mm_xor_si128(c2[3254],simde_mm_xor_si128(c2[548],simde_mm_xor_si128(c2[848],simde_mm_xor_si128(c2[4473],simde_mm_xor_si128(c2[4773],simde_mm_xor_si128(c2[2382],simde_mm_xor_si128(c2[1777],simde_mm_xor_si128(c2[2077],simde_mm_xor_si128(c2[4470],simde_mm_xor_si128(c2[3271],simde_mm_xor_si128(c2[580],c2[880])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 27
-     d2[405]=_mm_xor_si128(c2[4],c2[1691]);
+     d2[405]=simde_mm_xor_si128(c2[4],c2[1691]);
 
 //row: 28
-     d2[420]=_mm_xor_si128(c2[2130],_mm_xor_si128(c2[69],c2[151]));
+     d2[420]=simde_mm_xor_si128(c2[2130],simde_mm_xor_si128(c2[69],c2[151]));
 
 //row: 29
-     d2[435]=_mm_xor_si128(c2[2701],c2[2823]);
+     d2[435]=simde_mm_xor_si128(c2[2701],c2[2823]);
 
 //row: 30
-     d2[450]=_mm_xor_si128(c2[1562],_mm_xor_si128(c2[3458],_mm_xor_si128(c2[4123],c2[571])));
+     d2[450]=simde_mm_xor_si128(c2[1562],simde_mm_xor_si128(c2[3458],simde_mm_xor_si128(c2[4123],c2[571])));
 
 //row: 31
-     d2[465]=_mm_xor_si128(c2[12],_mm_xor_si128(c2[3911],_mm_xor_si128(c2[2707],_mm_xor_si128(c2[641],_mm_xor_si128(c2[4234],_mm_xor_si128(c2[3932],_mm_xor_si128(c2[4232],_mm_xor_si128(c2[2742],_mm_xor_si128(c2[1867],_mm_xor_si128(c2[2773],_mm_xor_si128(c2[3073],_mm_xor_si128(c2[1294],_mm_xor_si128(c2[4003],_mm_xor_si128(c2[693],_mm_xor_si128(c2[1934],_mm_xor_si128(c2[1326],_mm_xor_si128(c2[4028],_mm_xor_si128(c2[4328],_mm_xor_si128(c2[3154],_mm_xor_si128(c2[3759],_mm_xor_si128(c2[4059],_mm_xor_si128(c2[1984],_mm_xor_si128(c2[4688],_mm_xor_si128(c2[4092],_mm_xor_si128(c2[4392],_mm_xor_si128(c2[2014],_mm_xor_si128(c2[1710],_mm_xor_si128(c2[2010],_mm_xor_si128(c2[2643],_mm_xor_si128(c2[3252],_mm_xor_si128(c2[2945],_mm_xor_si128(c2[3245],_mm_xor_si128(c2[4779],_mm_xor_si128(c2[2083],_mm_xor_si128(c2[2977],c2[3277])))))))))))))))))))))))))))))))))));
+     d2[465]=simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[3911],simde_mm_xor_si128(c2[2707],simde_mm_xor_si128(c2[641],simde_mm_xor_si128(c2[4234],simde_mm_xor_si128(c2[3932],simde_mm_xor_si128(c2[4232],simde_mm_xor_si128(c2[2742],simde_mm_xor_si128(c2[1867],simde_mm_xor_si128(c2[2773],simde_mm_xor_si128(c2[3073],simde_mm_xor_si128(c2[1294],simde_mm_xor_si128(c2[4003],simde_mm_xor_si128(c2[693],simde_mm_xor_si128(c2[1934],simde_mm_xor_si128(c2[1326],simde_mm_xor_si128(c2[4028],simde_mm_xor_si128(c2[4328],simde_mm_xor_si128(c2[3154],simde_mm_xor_si128(c2[3759],simde_mm_xor_si128(c2[4059],simde_mm_xor_si128(c2[1984],simde_mm_xor_si128(c2[4688],simde_mm_xor_si128(c2[4092],simde_mm_xor_si128(c2[4392],simde_mm_xor_si128(c2[2014],simde_mm_xor_si128(c2[1710],simde_mm_xor_si128(c2[2010],simde_mm_xor_si128(c2[2643],simde_mm_xor_si128(c2[3252],simde_mm_xor_si128(c2[2945],simde_mm_xor_si128(c2[3245],simde_mm_xor_si128(c2[4779],simde_mm_xor_si128(c2[2083],simde_mm_xor_si128(c2[2977],c2[3277])))))))))))))))))))))))))))))))))));
 
 //row: 32
-     d2[480]=_mm_xor_si128(c2[1501],_mm_xor_si128(c2[1801],_mm_xor_si128(c2[601],_mm_xor_si128(c2[901],_mm_xor_si128(c2[4511],_mm_xor_si128(c2[911],_mm_xor_si128(c2[2130],_mm_xor_si128(c2[2430],_mm_xor_si128(c2[1239],_mm_xor_si128(c2[1237],_mm_xor_si128(c2[3371],_mm_xor_si128(c2[3671],_mm_xor_si128(c2[63],_mm_xor_si128(c2[2798],_mm_xor_si128(c2[3098],_mm_xor_si128(c2[693],_mm_xor_si128(c2[993],_mm_xor_si128(c2[2497],_mm_xor_si128(c2[3423],_mm_xor_si128(c2[3723],_mm_xor_si128(c2[3130],_mm_xor_si128(c2[1333],_mm_xor_si128(c2[4658],_mm_xor_si128(c2[159],_mm_xor_si128(c2[1064],_mm_xor_si128(c2[4350],_mm_xor_si128(c2[3488],_mm_xor_si128(c2[3788],_mm_xor_si128(c2[1393],_mm_xor_si128(c2[1693],_mm_xor_si128(c2[1382],_mm_xor_si128(c2[3518],_mm_xor_si128(c2[3818],_mm_xor_si128(c2[3814],_mm_xor_si128(c2[4147],_mm_xor_si128(c2[4447],_mm_xor_si128(c2[242],_mm_xor_si128(c2[250],_mm_xor_si128(c2[1484],_mm_xor_si128(c2[1784],_mm_xor_si128(c2[3572],_mm_xor_si128(c2[3872],c2[282]))))))))))))))))))))))))))))))))))))))))));
+     d2[480]=simde_mm_xor_si128(c2[1501],simde_mm_xor_si128(c2[1801],simde_mm_xor_si128(c2[601],simde_mm_xor_si128(c2[901],simde_mm_xor_si128(c2[4511],simde_mm_xor_si128(c2[911],simde_mm_xor_si128(c2[2130],simde_mm_xor_si128(c2[2430],simde_mm_xor_si128(c2[1239],simde_mm_xor_si128(c2[1237],simde_mm_xor_si128(c2[3371],simde_mm_xor_si128(c2[3671],simde_mm_xor_si128(c2[63],simde_mm_xor_si128(c2[2798],simde_mm_xor_si128(c2[3098],simde_mm_xor_si128(c2[693],simde_mm_xor_si128(c2[993],simde_mm_xor_si128(c2[2497],simde_mm_xor_si128(c2[3423],simde_mm_xor_si128(c2[3723],simde_mm_xor_si128(c2[3130],simde_mm_xor_si128(c2[1333],simde_mm_xor_si128(c2[4658],simde_mm_xor_si128(c2[159],simde_mm_xor_si128(c2[1064],simde_mm_xor_si128(c2[4350],simde_mm_xor_si128(c2[3488],simde_mm_xor_si128(c2[3788],simde_mm_xor_si128(c2[1393],simde_mm_xor_si128(c2[1693],simde_mm_xor_si128(c2[1382],simde_mm_xor_si128(c2[3518],simde_mm_xor_si128(c2[3818],simde_mm_xor_si128(c2[3814],simde_mm_xor_si128(c2[4147],simde_mm_xor_si128(c2[4447],simde_mm_xor_si128(c2[242],simde_mm_xor_si128(c2[250],simde_mm_xor_si128(c2[1484],simde_mm_xor_si128(c2[1784],simde_mm_xor_si128(c2[3572],simde_mm_xor_si128(c2[3872],c2[282]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 33
-     d2[495]=_mm_xor_si128(c2[3300],_mm_xor_si128(c2[2400],_mm_xor_si128(c2[1211],_mm_xor_si128(c2[3944],_mm_xor_si128(c2[2738],_mm_xor_si128(c2[2736],_mm_xor_si128(c2[371],_mm_xor_si128(c2[1562],_mm_xor_si128(c2[4262],_mm_xor_si128(c2[4597],_mm_xor_si128(c2[2492],_mm_xor_si128(c2[3996],_mm_xor_si128(c2[423],_mm_xor_si128(c2[4629],_mm_xor_si128(c2[2832],_mm_xor_si128(c2[1658],_mm_xor_si128(c2[2563],_mm_xor_si128(c2[488],_mm_xor_si128(c2[3192],_mm_xor_si128(c2[2881],_mm_xor_si128(c2[518],_mm_xor_si128(c2[514],_mm_xor_si128(c2[810],_mm_xor_si128(c2[1147],_mm_xor_si128(c2[1741],_mm_xor_si128(c2[1749],_mm_xor_si128(c2[3283],_mm_xor_si128(c2[572],c2[1781]))))))))))))))))))))))))))));
+     d2[495]=simde_mm_xor_si128(c2[3300],simde_mm_xor_si128(c2[2400],simde_mm_xor_si128(c2[1211],simde_mm_xor_si128(c2[3944],simde_mm_xor_si128(c2[2738],simde_mm_xor_si128(c2[2736],simde_mm_xor_si128(c2[371],simde_mm_xor_si128(c2[1562],simde_mm_xor_si128(c2[4262],simde_mm_xor_si128(c2[4597],simde_mm_xor_si128(c2[2492],simde_mm_xor_si128(c2[3996],simde_mm_xor_si128(c2[423],simde_mm_xor_si128(c2[4629],simde_mm_xor_si128(c2[2832],simde_mm_xor_si128(c2[1658],simde_mm_xor_si128(c2[2563],simde_mm_xor_si128(c2[488],simde_mm_xor_si128(c2[3192],simde_mm_xor_si128(c2[2881],simde_mm_xor_si128(c2[518],simde_mm_xor_si128(c2[514],simde_mm_xor_si128(c2[810],simde_mm_xor_si128(c2[1147],simde_mm_xor_si128(c2[1741],simde_mm_xor_si128(c2[1749],simde_mm_xor_si128(c2[3283],simde_mm_xor_si128(c2[572],c2[1781]))))))))))))))))))))))))))));
 
 //row: 34
-     d2[510]=_mm_xor_si128(c2[2402],_mm_xor_si128(c2[2702],_mm_xor_si128(c2[4502],_mm_xor_si128(c2[1502],_mm_xor_si128(c2[1802],_mm_xor_si128(c2[3602],_mm_xor_si128(c2[613],_mm_xor_si128(c2[2413],_mm_xor_si128(c2[2411],_mm_xor_si128(c2[3031],_mm_xor_si128(c2[3331],_mm_xor_si128(c2[332],_mm_xor_si128(c2[2140],_mm_xor_si128(c2[3940],_mm_xor_si128(c2[2138],_mm_xor_si128(c2[3638],_mm_xor_si128(c2[3938],_mm_xor_si128(c2[4272],_mm_xor_si128(c2[4572],_mm_xor_si128(c2[1573],_mm_xor_si128(c2[964],_mm_xor_si128(c2[2464],_mm_xor_si128(c2[2764],_mm_xor_si128(c2[3699],_mm_xor_si128(c2[3999],_mm_xor_si128(c2[1000],_mm_xor_si128(c2[1594],_mm_xor_si128(c2[1894],_mm_xor_si128(c2[3694],_mm_xor_si128(c2[3398],_mm_xor_si128(c2[399],_mm_xor_si128(c2[4324],_mm_xor_si128(c2[4624],_mm_xor_si128(c2[1625],_mm_xor_si128(c2[4031],_mm_xor_si128(c2[1032],_mm_xor_si128(c2[2234],_mm_xor_si128(c2[3734],_mm_xor_si128(c2[4034],_mm_xor_si128(c2[760],_mm_xor_si128(c2[1060],_mm_xor_si128(c2[2860],_mm_xor_si128(c2[1950],_mm_xor_si128(c2[3450],_mm_xor_si128(c2[3750],_mm_xor_si128(c2[4389],_mm_xor_si128(c2[4689],_mm_xor_si128(c2[1690],_mm_xor_si128(c2[2294],_mm_xor_si128(c2[2594],_mm_xor_si128(c2[4394],_mm_xor_si128(c2[2283],_mm_xor_si128(c2[3783],_mm_xor_si128(c2[4083],_mm_xor_si128(c2[4419],_mm_xor_si128(c2[4719],_mm_xor_si128(c2[1720],_mm_xor_si128(c2[4715],_mm_xor_si128(c2[1416],_mm_xor_si128(c2[1716],_mm_xor_si128(c2[249],_mm_xor_si128(c2[549],_mm_xor_si128(c2[2349],_mm_xor_si128(c2[1143],_mm_xor_si128(c2[2943],_mm_xor_si128(c2[1151],_mm_xor_si128(c2[2651],_mm_xor_si128(c2[2951],_mm_xor_si128(c2[2370],_mm_xor_si128(c2[2670],_mm_xor_si128(c2[4470],_mm_xor_si128(c2[4473],_mm_xor_si128(c2[4773],_mm_xor_si128(c2[1774],_mm_xor_si128(c2[1183],_mm_xor_si128(c2[2683],c2[2983]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[510]=simde_mm_xor_si128(c2[2402],simde_mm_xor_si128(c2[2702],simde_mm_xor_si128(c2[4502],simde_mm_xor_si128(c2[1502],simde_mm_xor_si128(c2[1802],simde_mm_xor_si128(c2[3602],simde_mm_xor_si128(c2[613],simde_mm_xor_si128(c2[2413],simde_mm_xor_si128(c2[2411],simde_mm_xor_si128(c2[3031],simde_mm_xor_si128(c2[3331],simde_mm_xor_si128(c2[332],simde_mm_xor_si128(c2[2140],simde_mm_xor_si128(c2[3940],simde_mm_xor_si128(c2[2138],simde_mm_xor_si128(c2[3638],simde_mm_xor_si128(c2[3938],simde_mm_xor_si128(c2[4272],simde_mm_xor_si128(c2[4572],simde_mm_xor_si128(c2[1573],simde_mm_xor_si128(c2[964],simde_mm_xor_si128(c2[2464],simde_mm_xor_si128(c2[2764],simde_mm_xor_si128(c2[3699],simde_mm_xor_si128(c2[3999],simde_mm_xor_si128(c2[1000],simde_mm_xor_si128(c2[1594],simde_mm_xor_si128(c2[1894],simde_mm_xor_si128(c2[3694],simde_mm_xor_si128(c2[3398],simde_mm_xor_si128(c2[399],simde_mm_xor_si128(c2[4324],simde_mm_xor_si128(c2[4624],simde_mm_xor_si128(c2[1625],simde_mm_xor_si128(c2[4031],simde_mm_xor_si128(c2[1032],simde_mm_xor_si128(c2[2234],simde_mm_xor_si128(c2[3734],simde_mm_xor_si128(c2[4034],simde_mm_xor_si128(c2[760],simde_mm_xor_si128(c2[1060],simde_mm_xor_si128(c2[2860],simde_mm_xor_si128(c2[1950],simde_mm_xor_si128(c2[3450],simde_mm_xor_si128(c2[3750],simde_mm_xor_si128(c2[4389],simde_mm_xor_si128(c2[4689],simde_mm_xor_si128(c2[1690],simde_mm_xor_si128(c2[2294],simde_mm_xor_si128(c2[2594],simde_mm_xor_si128(c2[4394],simde_mm_xor_si128(c2[2283],simde_mm_xor_si128(c2[3783],simde_mm_xor_si128(c2[4083],simde_mm_xor_si128(c2[4419],simde_mm_xor_si128(c2[4719],simde_mm_xor_si128(c2[1720],simde_mm_xor_si128(c2[4715],simde_mm_xor_si128(c2[1416],simde_mm_xor_si128(c2[1716],simde_mm_xor_si128(c2[249],simde_mm_xor_si128(c2[549],simde_mm_xor_si128(c2[2349],simde_mm_xor_si128(c2[1143],simde_mm_xor_si128(c2[2943],simde_mm_xor_si128(c2[1151],simde_mm_xor_si128(c2[2651],simde_mm_xor_si128(c2[2951],simde_mm_xor_si128(c2[2370],simde_mm_xor_si128(c2[2670],simde_mm_xor_si128(c2[4470],simde_mm_xor_si128(c2[4473],simde_mm_xor_si128(c2[4773],simde_mm_xor_si128(c2[1774],simde_mm_xor_si128(c2[1183],simde_mm_xor_si128(c2[2683],c2[2983]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 35
-     d2[525]=_mm_xor_si128(c2[5],_mm_xor_si128(c2[305],_mm_xor_si128(c2[4204],_mm_xor_si128(c2[3000],_mm_xor_si128(c2[634],_mm_xor_si128(c2[934],_mm_xor_si128(c2[4542],_mm_xor_si128(c2[4540],_mm_xor_si128(c2[1535],_mm_xor_si128(c2[1860],_mm_xor_si128(c2[2160],_mm_xor_si128(c2[3366],_mm_xor_si128(c2[1302],_mm_xor_si128(c2[1602],_mm_xor_si128(c2[4296],_mm_xor_si128(c2[1001],_mm_xor_si128(c2[2227],_mm_xor_si128(c2[1634],_mm_xor_si128(c2[4621],_mm_xor_si128(c2[3462],_mm_xor_si128(c2[4352],_mm_xor_si128(c2[464],_mm_xor_si128(c2[1992],_mm_xor_si128(c2[2292],_mm_xor_si128(c2[182],_mm_xor_si128(c2[4685],_mm_xor_si128(c2[2322],_mm_xor_si128(c2[2318],_mm_xor_si128(c2[2951],_mm_xor_si128(c2[3545],_mm_xor_si128(c2[3553],_mm_xor_si128(c2[4772],_mm_xor_si128(c2[273],_mm_xor_si128(c2[2376],c2[3570]))))))))))))))))))))))))))))))))));
+     d2[525]=simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[305],simde_mm_xor_si128(c2[4204],simde_mm_xor_si128(c2[3000],simde_mm_xor_si128(c2[634],simde_mm_xor_si128(c2[934],simde_mm_xor_si128(c2[4542],simde_mm_xor_si128(c2[4540],simde_mm_xor_si128(c2[1535],simde_mm_xor_si128(c2[1860],simde_mm_xor_si128(c2[2160],simde_mm_xor_si128(c2[3366],simde_mm_xor_si128(c2[1302],simde_mm_xor_si128(c2[1602],simde_mm_xor_si128(c2[4296],simde_mm_xor_si128(c2[1001],simde_mm_xor_si128(c2[2227],simde_mm_xor_si128(c2[1634],simde_mm_xor_si128(c2[4621],simde_mm_xor_si128(c2[3462],simde_mm_xor_si128(c2[4352],simde_mm_xor_si128(c2[464],simde_mm_xor_si128(c2[1992],simde_mm_xor_si128(c2[2292],simde_mm_xor_si128(c2[182],simde_mm_xor_si128(c2[4685],simde_mm_xor_si128(c2[2322],simde_mm_xor_si128(c2[2318],simde_mm_xor_si128(c2[2951],simde_mm_xor_si128(c2[3545],simde_mm_xor_si128(c2[3553],simde_mm_xor_si128(c2[4772],simde_mm_xor_si128(c2[273],simde_mm_xor_si128(c2[2376],c2[3570]))))))))))))))))))))))))))))))))));
 
 //row: 36
-     d2[540]=_mm_xor_si128(c2[611],_mm_xor_si128(c2[67],c2[3216]));
+     d2[540]=simde_mm_xor_si128(c2[611],simde_mm_xor_si128(c2[67],c2[3216]));
 
 //row: 37
-     d2[555]=_mm_xor_si128(c2[3303],_mm_xor_si128(c2[1201],_mm_xor_si128(c2[2403],_mm_xor_si128(c2[301],_mm_xor_si128(c2[1214],_mm_xor_si128(c2[3911],_mm_xor_si128(c2[3932],_mm_xor_si128(c2[1830],_mm_xor_si128(c2[2741],_mm_xor_si128(c2[639],_mm_xor_si128(c2[2739],_mm_xor_si128(c2[337],_mm_xor_si128(c2[637],_mm_xor_si128(c2[374],_mm_xor_si128(c2[3071],_mm_xor_si128(c2[1565],_mm_xor_si128(c2[3962],_mm_xor_si128(c2[4262],_mm_xor_si128(c2[4600],_mm_xor_si128(c2[2498],_mm_xor_si128(c2[2495],_mm_xor_si128(c2[393],_mm_xor_si128(c2[3999],_mm_xor_si128(c2[1897],_mm_xor_si128(c2[426],_mm_xor_si128(c2[3123],_mm_xor_si128(c2[4632],_mm_xor_si128(c2[2530],_mm_xor_si128(c2[2820],_mm_xor_si128(c2[433],_mm_xor_si128(c2[733],_mm_xor_si128(c2[1661],_mm_xor_si128(c2[4358],_mm_xor_si128(c2[2551],_mm_xor_si128(c2[164],_mm_xor_si128(c2[464],_mm_xor_si128(c2[491],_mm_xor_si128(c2[3188],_mm_xor_si128(c2[3180],_mm_xor_si128(c2[1093],_mm_xor_si128(c2[2884],_mm_xor_si128(c2[482],_mm_xor_si128(c2[782],_mm_xor_si128(c2[521],_mm_xor_si128(c2[3218],_mm_xor_si128(c2[517],_mm_xor_si128(c2[2914],_mm_xor_si128(c2[3214],_mm_xor_si128(c2[1150],_mm_xor_si128(c2[3847],_mm_xor_si128(c2[1744],_mm_xor_si128(c2[4441],_mm_xor_si128(c2[1752],_mm_xor_si128(c2[4149],_mm_xor_si128(c2[4449],_mm_xor_si128(c2[3271],_mm_xor_si128(c2[1184],_mm_xor_si128(c2[575],_mm_xor_si128(c2[3272],_mm_xor_si128(c2[1784],_mm_xor_si128(c2[4181],c2[4481])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[555]=simde_mm_xor_si128(c2[3303],simde_mm_xor_si128(c2[1201],simde_mm_xor_si128(c2[2403],simde_mm_xor_si128(c2[301],simde_mm_xor_si128(c2[1214],simde_mm_xor_si128(c2[3911],simde_mm_xor_si128(c2[3932],simde_mm_xor_si128(c2[1830],simde_mm_xor_si128(c2[2741],simde_mm_xor_si128(c2[639],simde_mm_xor_si128(c2[2739],simde_mm_xor_si128(c2[337],simde_mm_xor_si128(c2[637],simde_mm_xor_si128(c2[374],simde_mm_xor_si128(c2[3071],simde_mm_xor_si128(c2[1565],simde_mm_xor_si128(c2[3962],simde_mm_xor_si128(c2[4262],simde_mm_xor_si128(c2[4600],simde_mm_xor_si128(c2[2498],simde_mm_xor_si128(c2[2495],simde_mm_xor_si128(c2[393],simde_mm_xor_si128(c2[3999],simde_mm_xor_si128(c2[1897],simde_mm_xor_si128(c2[426],simde_mm_xor_si128(c2[3123],simde_mm_xor_si128(c2[4632],simde_mm_xor_si128(c2[2530],simde_mm_xor_si128(c2[2820],simde_mm_xor_si128(c2[433],simde_mm_xor_si128(c2[733],simde_mm_xor_si128(c2[1661],simde_mm_xor_si128(c2[4358],simde_mm_xor_si128(c2[2551],simde_mm_xor_si128(c2[164],simde_mm_xor_si128(c2[464],simde_mm_xor_si128(c2[491],simde_mm_xor_si128(c2[3188],simde_mm_xor_si128(c2[3180],simde_mm_xor_si128(c2[1093],simde_mm_xor_si128(c2[2884],simde_mm_xor_si128(c2[482],simde_mm_xor_si128(c2[782],simde_mm_xor_si128(c2[521],simde_mm_xor_si128(c2[3218],simde_mm_xor_si128(c2[517],simde_mm_xor_si128(c2[2914],simde_mm_xor_si128(c2[3214],simde_mm_xor_si128(c2[1150],simde_mm_xor_si128(c2[3847],simde_mm_xor_si128(c2[1744],simde_mm_xor_si128(c2[4441],simde_mm_xor_si128(c2[1752],simde_mm_xor_si128(c2[4149],simde_mm_xor_si128(c2[4449],simde_mm_xor_si128(c2[3271],simde_mm_xor_si128(c2[1184],simde_mm_xor_si128(c2[575],simde_mm_xor_si128(c2[3272],simde_mm_xor_si128(c2[1784],simde_mm_xor_si128(c2[4181],c2[4481])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 38
-     d2[570]=_mm_xor_si128(c2[3000],_mm_xor_si128(c2[3300],_mm_xor_si128(c2[2400],_mm_xor_si128(c2[1211],_mm_xor_si128(c2[3644],_mm_xor_si128(c2[3944],_mm_xor_si128(c2[2738],_mm_xor_si128(c2[2736],_mm_xor_si128(c2[3032],_mm_xor_si128(c2[71],_mm_xor_si128(c2[371],_mm_xor_si128(c2[1562],_mm_xor_si128(c2[4297],_mm_xor_si128(c2[4597],_mm_xor_si128(c2[2492],_mm_xor_si128(c2[3996],_mm_xor_si128(c2[423],_mm_xor_si128(c2[4629],_mm_xor_si128(c2[2832],_mm_xor_si128(c2[1658],_mm_xor_si128(c2[2563],_mm_xor_si128(c2[2852],_mm_xor_si128(c2[188],_mm_xor_si128(c2[488],_mm_xor_si128(c2[3192],_mm_xor_si128(c2[2881],_mm_xor_si128(c2[518],_mm_xor_si128(c2[514],_mm_xor_si128(c2[1147],_mm_xor_si128(c2[1741],_mm_xor_si128(c2[1749],_mm_xor_si128(c2[2983],_mm_xor_si128(c2[3283],_mm_xor_si128(c2[572],c2[1781]))))))))))))))))))))))))))))))))));
+     d2[570]=simde_mm_xor_si128(c2[3000],simde_mm_xor_si128(c2[3300],simde_mm_xor_si128(c2[2400],simde_mm_xor_si128(c2[1211],simde_mm_xor_si128(c2[3644],simde_mm_xor_si128(c2[3944],simde_mm_xor_si128(c2[2738],simde_mm_xor_si128(c2[2736],simde_mm_xor_si128(c2[3032],simde_mm_xor_si128(c2[71],simde_mm_xor_si128(c2[371],simde_mm_xor_si128(c2[1562],simde_mm_xor_si128(c2[4297],simde_mm_xor_si128(c2[4597],simde_mm_xor_si128(c2[2492],simde_mm_xor_si128(c2[3996],simde_mm_xor_si128(c2[423],simde_mm_xor_si128(c2[4629],simde_mm_xor_si128(c2[2832],simde_mm_xor_si128(c2[1658],simde_mm_xor_si128(c2[2563],simde_mm_xor_si128(c2[2852],simde_mm_xor_si128(c2[188],simde_mm_xor_si128(c2[488],simde_mm_xor_si128(c2[3192],simde_mm_xor_si128(c2[2881],simde_mm_xor_si128(c2[518],simde_mm_xor_si128(c2[514],simde_mm_xor_si128(c2[1147],simde_mm_xor_si128(c2[1741],simde_mm_xor_si128(c2[1749],simde_mm_xor_si128(c2[2983],simde_mm_xor_si128(c2[3283],simde_mm_xor_si128(c2[572],c2[1781]))))))))))))))))))))))))))))))))));
 
 //row: 39
-     d2[585]=_mm_xor_si128(c2[4205],_mm_xor_si128(c2[4505],_mm_xor_si128(c2[3305],_mm_xor_si128(c2[3605],_mm_xor_si128(c2[2401],_mm_xor_si128(c2[2110],_mm_xor_si128(c2[35],_mm_xor_si128(c2[335],_mm_xor_si128(c2[3943],_mm_xor_si128(c2[3941],_mm_xor_si128(c2[1261],_mm_xor_si128(c2[1561],_mm_xor_si128(c2[2767],_mm_xor_si128(c2[703],_mm_xor_si128(c2[1003],_mm_xor_si128(c2[3397],_mm_xor_si128(c2[3697],_mm_xor_si128(c2[402],_mm_xor_si128(c2[1328],_mm_xor_si128(c2[1628],_mm_xor_si128(c2[1020],_mm_xor_si128(c2[4022],_mm_xor_si128(c2[2563],_mm_xor_si128(c2[2863],_mm_xor_si128(c2[3753],_mm_xor_si128(c2[1393],_mm_xor_si128(c2[1693],_mm_xor_si128(c2[4082],_mm_xor_si128(c2[4382],_mm_xor_si128(c2[4086],_mm_xor_si128(c2[1423],_mm_xor_si128(c2[1723],_mm_xor_si128(c2[1719],_mm_xor_si128(c2[4112],_mm_xor_si128(c2[2052],_mm_xor_si128(c2[2352],_mm_xor_si128(c2[2946],_mm_xor_si128(c2[2954],_mm_xor_si128(c2[4173],_mm_xor_si128(c2[4473],_mm_xor_si128(c2[1477],_mm_xor_si128(c2[1777],c2[2971]))))))))))))))))))))))))))))))))))))))))));
+     d2[585]=simde_mm_xor_si128(c2[4205],simde_mm_xor_si128(c2[4505],simde_mm_xor_si128(c2[3305],simde_mm_xor_si128(c2[3605],simde_mm_xor_si128(c2[2401],simde_mm_xor_si128(c2[2110],simde_mm_xor_si128(c2[35],simde_mm_xor_si128(c2[335],simde_mm_xor_si128(c2[3943],simde_mm_xor_si128(c2[3941],simde_mm_xor_si128(c2[1261],simde_mm_xor_si128(c2[1561],simde_mm_xor_si128(c2[2767],simde_mm_xor_si128(c2[703],simde_mm_xor_si128(c2[1003],simde_mm_xor_si128(c2[3397],simde_mm_xor_si128(c2[3697],simde_mm_xor_si128(c2[402],simde_mm_xor_si128(c2[1328],simde_mm_xor_si128(c2[1628],simde_mm_xor_si128(c2[1020],simde_mm_xor_si128(c2[4022],simde_mm_xor_si128(c2[2563],simde_mm_xor_si128(c2[2863],simde_mm_xor_si128(c2[3753],simde_mm_xor_si128(c2[1393],simde_mm_xor_si128(c2[1693],simde_mm_xor_si128(c2[4082],simde_mm_xor_si128(c2[4382],simde_mm_xor_si128(c2[4086],simde_mm_xor_si128(c2[1423],simde_mm_xor_si128(c2[1723],simde_mm_xor_si128(c2[1719],simde_mm_xor_si128(c2[4112],simde_mm_xor_si128(c2[2052],simde_mm_xor_si128(c2[2352],simde_mm_xor_si128(c2[2946],simde_mm_xor_si128(c2[2954],simde_mm_xor_si128(c2[4173],simde_mm_xor_si128(c2[4473],simde_mm_xor_si128(c2[1477],simde_mm_xor_si128(c2[1777],c2[2971]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 40
-     d2[600]=_mm_xor_si128(c2[1213],_mm_xor_si128(c2[1805],_mm_xor_si128(c2[313],_mm_xor_si128(c2[905],_mm_xor_si128(c2[3908],_mm_xor_si128(c2[4500],_mm_xor_si128(c2[1842],_mm_xor_si128(c2[2434],_mm_xor_si128(c2[636],_mm_xor_si128(c2[1243],_mm_xor_si128(c2[634],_mm_xor_si128(c2[941],_mm_xor_si128(c2[1241],_mm_xor_si128(c2[3068],_mm_xor_si128(c2[3660],_mm_xor_si128(c2[4274],_mm_xor_si128(c2[4566],_mm_xor_si128(c2[67],_mm_xor_si128(c2[4264],_mm_xor_si128(c2[2495],_mm_xor_si128(c2[3102],_mm_xor_si128(c2[390],_mm_xor_si128(c2[997],_mm_xor_si128(c2[1894],_mm_xor_si128(c2[2501],_mm_xor_si128(c2[3120],_mm_xor_si128(c2[3727],_mm_xor_si128(c2[2527],_mm_xor_si128(c2[3134],_mm_xor_si128(c2[730],_mm_xor_si128(c2[1022],_mm_xor_si128(c2[1322],_mm_xor_si128(c2[4355],_mm_xor_si128(c2[163],_mm_xor_si128(c2[461],_mm_xor_si128(c2[753],_mm_xor_si128(c2[1053],_mm_xor_si128(c2[3185],_mm_xor_si128(c2[3792],_mm_xor_si128(c2[1090],_mm_xor_si128(c2[1682],_mm_xor_si128(c2[794],_mm_xor_si128(c2[1086],_mm_xor_si128(c2[1386],_mm_xor_si128(c2[3215],_mm_xor_si128(c2[3822],_mm_xor_si128(c2[3211],_mm_xor_si128(c2[3518],_mm_xor_si128(c2[3818],_mm_xor_si128(c2[3844],_mm_xor_si128(c2[4451],_mm_xor_si128(c2[4453],_mm_xor_si128(c2[246],_mm_xor_si128(c2[4446],_mm_xor_si128(c2[4753],_mm_xor_si128(c2[254],_mm_xor_si128(c2[1181],_mm_xor_si128(c2[1773],_mm_xor_si128(c2[3284],_mm_xor_si128(c2[3876],_mm_xor_si128(c2[4478],_mm_xor_si128(c2[4770],c2[271]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[600]=simde_mm_xor_si128(c2[1213],simde_mm_xor_si128(c2[1805],simde_mm_xor_si128(c2[313],simde_mm_xor_si128(c2[905],simde_mm_xor_si128(c2[3908],simde_mm_xor_si128(c2[4500],simde_mm_xor_si128(c2[1842],simde_mm_xor_si128(c2[2434],simde_mm_xor_si128(c2[636],simde_mm_xor_si128(c2[1243],simde_mm_xor_si128(c2[634],simde_mm_xor_si128(c2[941],simde_mm_xor_si128(c2[1241],simde_mm_xor_si128(c2[3068],simde_mm_xor_si128(c2[3660],simde_mm_xor_si128(c2[4274],simde_mm_xor_si128(c2[4566],simde_mm_xor_si128(c2[67],simde_mm_xor_si128(c2[4264],simde_mm_xor_si128(c2[2495],simde_mm_xor_si128(c2[3102],simde_mm_xor_si128(c2[390],simde_mm_xor_si128(c2[997],simde_mm_xor_si128(c2[1894],simde_mm_xor_si128(c2[2501],simde_mm_xor_si128(c2[3120],simde_mm_xor_si128(c2[3727],simde_mm_xor_si128(c2[2527],simde_mm_xor_si128(c2[3134],simde_mm_xor_si128(c2[730],simde_mm_xor_si128(c2[1022],simde_mm_xor_si128(c2[1322],simde_mm_xor_si128(c2[4355],simde_mm_xor_si128(c2[163],simde_mm_xor_si128(c2[461],simde_mm_xor_si128(c2[753],simde_mm_xor_si128(c2[1053],simde_mm_xor_si128(c2[3185],simde_mm_xor_si128(c2[3792],simde_mm_xor_si128(c2[1090],simde_mm_xor_si128(c2[1682],simde_mm_xor_si128(c2[794],simde_mm_xor_si128(c2[1086],simde_mm_xor_si128(c2[1386],simde_mm_xor_si128(c2[3215],simde_mm_xor_si128(c2[3822],simde_mm_xor_si128(c2[3211],simde_mm_xor_si128(c2[3518],simde_mm_xor_si128(c2[3818],simde_mm_xor_si128(c2[3844],simde_mm_xor_si128(c2[4451],simde_mm_xor_si128(c2[4453],simde_mm_xor_si128(c2[246],simde_mm_xor_si128(c2[4446],simde_mm_xor_si128(c2[4753],simde_mm_xor_si128(c2[254],simde_mm_xor_si128(c2[1181],simde_mm_xor_si128(c2[1773],simde_mm_xor_si128(c2[3284],simde_mm_xor_si128(c2[3876],simde_mm_xor_si128(c2[4478],simde_mm_xor_si128(c2[4770],c2[271]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 41
-     d2[615]=_mm_xor_si128(c2[2108],_mm_xor_si128(c2[2408],_mm_xor_si128(c2[1508],_mm_xor_si128(c2[304],_mm_xor_si128(c2[2737],_mm_xor_si128(c2[3037],_mm_xor_si128(c2[1831],_mm_xor_si128(c2[1844],_mm_xor_si128(c2[1533],_mm_xor_si128(c2[3963],_mm_xor_si128(c2[4263],_mm_xor_si128(c2[670],_mm_xor_si128(c2[3390],_mm_xor_si128(c2[3690],_mm_xor_si128(c2[1600],_mm_xor_si128(c2[3104],_mm_xor_si128(c2[4330],_mm_xor_si128(c2[3722],_mm_xor_si128(c2[1925],_mm_xor_si128(c2[751],_mm_xor_si128(c2[1656],_mm_xor_si128(c2[2263],_mm_xor_si128(c2[4080],_mm_xor_si128(c2[4380],_mm_xor_si128(c2[2285],_mm_xor_si128(c2[1989],_mm_xor_si128(c2[4410],_mm_xor_si128(c2[4421],_mm_xor_si128(c2[240],_mm_xor_si128(c2[849],_mm_xor_si128(c2[842],_mm_xor_si128(c2[2076],_mm_xor_si128(c2[2376],_mm_xor_si128(c2[4479],c2[874]))))))))))))))))))))))))))))))))));
+     d2[615]=simde_mm_xor_si128(c2[2108],simde_mm_xor_si128(c2[2408],simde_mm_xor_si128(c2[1508],simde_mm_xor_si128(c2[304],simde_mm_xor_si128(c2[2737],simde_mm_xor_si128(c2[3037],simde_mm_xor_si128(c2[1831],simde_mm_xor_si128(c2[1844],simde_mm_xor_si128(c2[1533],simde_mm_xor_si128(c2[3963],simde_mm_xor_si128(c2[4263],simde_mm_xor_si128(c2[670],simde_mm_xor_si128(c2[3390],simde_mm_xor_si128(c2[3690],simde_mm_xor_si128(c2[1600],simde_mm_xor_si128(c2[3104],simde_mm_xor_si128(c2[4330],simde_mm_xor_si128(c2[3722],simde_mm_xor_si128(c2[1925],simde_mm_xor_si128(c2[751],simde_mm_xor_si128(c2[1656],simde_mm_xor_si128(c2[2263],simde_mm_xor_si128(c2[4080],simde_mm_xor_si128(c2[4380],simde_mm_xor_si128(c2[2285],simde_mm_xor_si128(c2[1989],simde_mm_xor_si128(c2[4410],simde_mm_xor_si128(c2[4421],simde_mm_xor_si128(c2[240],simde_mm_xor_si128(c2[849],simde_mm_xor_si128(c2[842],simde_mm_xor_si128(c2[2076],simde_mm_xor_si128(c2[2376],simde_mm_xor_si128(c2[4479],c2[874]))))))))))))))))))))))))))))))))));
   }
 }
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc256_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc256_byte.c
index cb7ce41314642033f42b16451e89aa30f9748670..d04ca7769044c2cae3e30dd443a93e94f54a511c 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc256_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc256_byte.c
@@ -1,9 +1,10 @@
+#ifdef __AVX2__
 #include "PHY/sse_intrin.h"
 // generated code for Zc=256, byte encoding
 static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG2_Zc256_byte(uint8_t *c,uint8_t *d) {
-  __m256i *csimd=(__m256i *)c,*dsimd=(__m256i *)d;
+  simde__m256i *csimd=(simde__m256i *)c,*dsimd=(simde__m256i *)d;
 
-  __m256i *c2,*d2;
+  simde__m256i *c2,*d2;
 
   int i2;
   for (i2=0; i2<8; i2++) {
@@ -137,3 +138,4 @@ static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG
      d2[328]=simde_mm256_xor_si256(c2[4963],simde_mm256_xor_si256(c2[4803],simde_mm256_xor_si256(c2[4480],simde_mm256_xor_si256(c2[966],simde_mm256_xor_si256(c2[1783],simde_mm256_xor_si256(c2[1623],simde_mm256_xor_si256(c2[1143],simde_mm256_xor_si256(c2[4659],simde_mm256_xor_si256(c2[180],simde_mm256_xor_si256(c2[354],simde_mm256_xor_si256(c2[194],simde_mm256_xor_si256(c2[2437],simde_mm256_xor_si256(c2[2612],simde_mm256_xor_si256(c2[2452],simde_mm256_xor_si256(c2[4368],simde_mm256_xor_si256(c2[213],simde_mm256_xor_si256(c2[2947],simde_mm256_xor_si256(c2[1509],simde_mm256_xor_si256(c2[3104],simde_mm256_xor_si256(c2[2967],simde_mm256_xor_si256(c2[4726],simde_mm256_xor_si256(c2[887],simde_mm256_xor_si256(c2[3137],simde_mm256_xor_si256(c2[2977],simde_mm256_xor_si256(c2[3778],simde_mm256_xor_si256(c2[1058],simde_mm256_xor_si256(c2[2832],simde_mm256_xor_si256(c2[1877],simde_mm256_xor_si256(c2[3490],simde_mm256_xor_si256(c2[931],simde_mm256_xor_si256(c2[1252],simde_mm256_xor_si256(c2[626],simde_mm256_xor_si256(c2[466],simde_mm256_xor_si256(c2[2867],c2[3511]))))))))))))))))))))))))))))))))));
   }
 }
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc256_byte_128.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc256_byte_128.c
new file mode 100644
index 0000000000000000000000000000000000000000..adfa8a47392250b5259fe6a8ce698f9764ccf327
--- /dev/null
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc256_byte_128.c
@@ -0,0 +1,141 @@
+#ifndef __AVX2__
+#include "PHY/sse_intrin.h"
+// generated code for Zc=256, byte encoding
+static inline void ldpc_BG2_Zc256_byte(uint8_t *c,uint8_t *d) {
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
+
+  simde__m128i *c2,*d2;
+
+  int i2;
+  for (i2=0; i2<16; i2++) {
+     c2=&csimd[i2];
+     d2=&dsimd[i2];
+
+//row: 0
+     d2[0]=simde_mm_xor_si128(c2[2560],simde_mm_xor_si128(c2[1930],simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[1319],simde_mm_xor_si128(c2[359],simde_mm_xor_si128(c2[2272],simde_mm_xor_si128(c2[3596],simde_mm_xor_si128(c2[2947],simde_mm_xor_si128(c2[2977],simde_mm_xor_si128(c2[1706],simde_mm_xor_si128(c2[3618],simde_mm_xor_si128(c2[3983],simde_mm_xor_si128(c2[1091],simde_mm_xor_si128(c2[4297],simde_mm_xor_si128(c2[4007],simde_mm_xor_si128(c2[2406],simde_mm_xor_si128(c2[4043],simde_mm_xor_si128(c2[526],simde_mm_xor_si128(c2[205],simde_mm_xor_si128(c2[3753],simde_mm_xor_si128(c2[1827],simde_mm_xor_si128(c2[5069],simde_mm_xor_si128(c2[5070],simde_mm_xor_si128(c2[577],simde_mm_xor_si128(c2[4140],simde_mm_xor_si128(c2[3823],c2[5095]))))))))))))))))))))))))));
+
+//row: 1
+     d2[16]=simde_mm_xor_si128(c2[2880],simde_mm_xor_si128(c2[2560],simde_mm_xor_si128(c2[1930],simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[1639],simde_mm_xor_si128(c2[1319],simde_mm_xor_si128(c2[359],simde_mm_xor_si128(c2[2272],simde_mm_xor_si128(c2[3916],simde_mm_xor_si128(c2[3596],simde_mm_xor_si128(c2[2947],simde_mm_xor_si128(c2[3297],simde_mm_xor_si128(c2[2977],simde_mm_xor_si128(c2[1706],simde_mm_xor_si128(c2[3618],simde_mm_xor_si128(c2[3983],simde_mm_xor_si128(c2[1091],simde_mm_xor_si128(c2[4297],simde_mm_xor_si128(c2[4007],simde_mm_xor_si128(c2[2406],simde_mm_xor_si128(c2[4363],simde_mm_xor_si128(c2[4043],simde_mm_xor_si128(c2[526],simde_mm_xor_si128(c2[205],simde_mm_xor_si128(c2[3753],simde_mm_xor_si128(c2[1827],simde_mm_xor_si128(c2[5069],simde_mm_xor_si128(c2[5070],simde_mm_xor_si128(c2[577],simde_mm_xor_si128(c2[4460],simde_mm_xor_si128(c2[4140],simde_mm_xor_si128(c2[3823],c2[5095]))))))))))))))))))))))))))))))));
+
+//row: 2
+     d2[32]=simde_mm_xor_si128(c2[2880],simde_mm_xor_si128(c2[2560],simde_mm_xor_si128(c2[2250],simde_mm_xor_si128(c2[1930],simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[1639],simde_mm_xor_si128(c2[1319],simde_mm_xor_si128(c2[359],simde_mm_xor_si128(c2[2272],simde_mm_xor_si128(c2[3916],simde_mm_xor_si128(c2[3596],simde_mm_xor_si128(c2[2947],simde_mm_xor_si128(c2[3297],simde_mm_xor_si128(c2[2977],simde_mm_xor_si128(c2[2026],simde_mm_xor_si128(c2[1706],simde_mm_xor_si128(c2[3618],simde_mm_xor_si128(c2[4303],simde_mm_xor_si128(c2[3983],simde_mm_xor_si128(c2[1091],simde_mm_xor_si128(c2[4297],simde_mm_xor_si128(c2[4327],simde_mm_xor_si128(c2[4007],simde_mm_xor_si128(c2[2406],simde_mm_xor_si128(c2[4363],simde_mm_xor_si128(c2[4043],simde_mm_xor_si128(c2[846],simde_mm_xor_si128(c2[526],simde_mm_xor_si128(c2[205],simde_mm_xor_si128(c2[4073],simde_mm_xor_si128(c2[3753],simde_mm_xor_si128(c2[1827],simde_mm_xor_si128(c2[270],simde_mm_xor_si128(c2[5069],simde_mm_xor_si128(c2[5070],simde_mm_xor_si128(c2[577],simde_mm_xor_si128(c2[4460],simde_mm_xor_si128(c2[4140],simde_mm_xor_si128(c2[4143],simde_mm_xor_si128(c2[3823],c2[5095]))))))))))))))))))))))))))))))))))))))));
+
+//row: 3
+     d2[48]=simde_mm_xor_si128(c2[2560],simde_mm_xor_si128(c2[1930],simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[1319],simde_mm_xor_si128(c2[359],simde_mm_xor_si128(c2[2592],simde_mm_xor_si128(c2[2272],simde_mm_xor_si128(c2[3596],simde_mm_xor_si128(c2[3267],simde_mm_xor_si128(c2[2947],simde_mm_xor_si128(c2[2977],simde_mm_xor_si128(c2[1706],simde_mm_xor_si128(c2[3618],simde_mm_xor_si128(c2[3983],simde_mm_xor_si128(c2[1091],simde_mm_xor_si128(c2[4617],simde_mm_xor_si128(c2[4297],simde_mm_xor_si128(c2[4007],simde_mm_xor_si128(c2[2726],simde_mm_xor_si128(c2[2406],simde_mm_xor_si128(c2[4043],simde_mm_xor_si128(c2[526],simde_mm_xor_si128(c2[525],simde_mm_xor_si128(c2[205],simde_mm_xor_si128(c2[3753],simde_mm_xor_si128(c2[2147],simde_mm_xor_si128(c2[1827],simde_mm_xor_si128(c2[5069],simde_mm_xor_si128(c2[5070],simde_mm_xor_si128(c2[897],simde_mm_xor_si128(c2[577],simde_mm_xor_si128(c2[4140],simde_mm_xor_si128(c2[3823],simde_mm_xor_si128(c2[296],c2[5095]))))))))))))))))))))))))))))))))));
+
+//row: 4
+     d2[64]=simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[4804],simde_mm_xor_si128(c2[4174],simde_mm_xor_si128(c2[2249],simde_mm_xor_si128(c2[971],simde_mm_xor_si128(c2[3883],simde_mm_xor_si128(c2[3563],simde_mm_xor_si128(c2[2603],simde_mm_xor_si128(c2[4516],simde_mm_xor_si128(c2[1965],simde_mm_xor_si128(c2[1025],simde_mm_xor_si128(c2[705],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[422],simde_mm_xor_si128(c2[102],simde_mm_xor_si128(c2[3950],simde_mm_xor_si128(c2[743],simde_mm_xor_si128(c2[1092],simde_mm_xor_si128(c2[3335],simde_mm_xor_si128(c2[1422],simde_mm_xor_si128(c2[1132],simde_mm_xor_si128(c2[4650],simde_mm_xor_si128(c2[1472],simde_mm_xor_si128(c2[1152],simde_mm_xor_si128(c2[2754],simde_mm_xor_si128(c2[2433],simde_mm_xor_si128(c2[878],simde_mm_xor_si128(c2[4071],simde_mm_xor_si128(c2[2178],simde_mm_xor_si128(c2[2179],simde_mm_xor_si128(c2[2821],simde_mm_xor_si128(c2[1569],simde_mm_xor_si128(c2[1249],simde_mm_xor_si128(c2[932],c2[2220]))))))))))))))))))))))))))))))))));
+
+//row: 5
+     d2[80]=simde_mm_xor_si128(c2[7],simde_mm_xor_si128(c2[4806],simde_mm_xor_si128(c2[4160],simde_mm_xor_si128(c2[2251],simde_mm_xor_si128(c2[2254],simde_mm_xor_si128(c2[3885],simde_mm_xor_si128(c2[3565],simde_mm_xor_si128(c2[2605],simde_mm_xor_si128(c2[4518],simde_mm_xor_si128(c2[2914],simde_mm_xor_si128(c2[1027],simde_mm_xor_si128(c2[707],simde_mm_xor_si128(c2[74],simde_mm_xor_si128(c2[424],simde_mm_xor_si128(c2[104],simde_mm_xor_si128(c2[3936],simde_mm_xor_si128(c2[745],simde_mm_xor_si128(c2[1094],simde_mm_xor_si128(c2[3337],simde_mm_xor_si128(c2[1408],simde_mm_xor_si128(c2[1134],simde_mm_xor_si128(c2[4652],simde_mm_xor_si128(c2[812],simde_mm_xor_si128(c2[1474],simde_mm_xor_si128(c2[1154],simde_mm_xor_si128(c2[2756],simde_mm_xor_si128(c2[2435],simde_mm_xor_si128(c2[864],simde_mm_xor_si128(c2[4073],simde_mm_xor_si128(c2[5033],simde_mm_xor_si128(c2[2180],simde_mm_xor_si128(c2[2181],simde_mm_xor_si128(c2[2823],simde_mm_xor_si128(c2[1571],simde_mm_xor_si128(c2[1251],simde_mm_xor_si128(c2[934],c2[2222]))))))))))))))))))))))))))))))))))));
+
+//row: 6
+     d2[96]=simde_mm_xor_si128(c2[2250],simde_mm_xor_si128(c2[1930],simde_mm_xor_si128(c2[1284],simde_mm_xor_si128(c2[4494],simde_mm_xor_si128(c2[3529],simde_mm_xor_si128(c2[993],simde_mm_xor_si128(c2[673],simde_mm_xor_si128(c2[4832],simde_mm_xor_si128(c2[1642],simde_mm_xor_si128(c2[3270],simde_mm_xor_si128(c2[2950],simde_mm_xor_si128(c2[2317],simde_mm_xor_si128(c2[2667],simde_mm_xor_si128(c2[2347],simde_mm_xor_si128(c2[1060],simde_mm_xor_si128(c2[2988],simde_mm_xor_si128(c2[3337],simde_mm_xor_si128(c2[461],simde_mm_xor_si128(c2[3651],simde_mm_xor_si128(c2[3361],simde_mm_xor_si128(c2[1760],simde_mm_xor_si128(c2[1454],simde_mm_xor_si128(c2[3717],simde_mm_xor_si128(c2[3397],simde_mm_xor_si128(c2[4999],simde_mm_xor_si128(c2[4678],simde_mm_xor_si128(c2[3107],simde_mm_xor_si128(c2[1197],simde_mm_xor_si128(c2[4386],simde_mm_xor_si128(c2[4423],simde_mm_xor_si128(c2[4424],simde_mm_xor_si128(c2[5066],simde_mm_xor_si128(c2[3814],simde_mm_xor_si128(c2[3494],simde_mm_xor_si128(c2[3177],simde_mm_xor_si128(c2[4449],c2[4129]))))))))))))))))))))))))))))))))))));
+
+//row: 7
+     d2[112]=simde_mm_xor_si128(c2[3840],simde_mm_xor_si128(c2[3520],simde_mm_xor_si128(c2[3847],simde_mm_xor_si128(c2[2890],simde_mm_xor_si128(c2[3201],simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[1292],simde_mm_xor_si128(c2[2599],simde_mm_xor_si128(c2[2279],simde_mm_xor_si128(c2[2606],simde_mm_xor_si128(c2[1319],simde_mm_xor_si128(c2[1646],simde_mm_xor_si128(c2[3879],simde_mm_xor_si128(c2[3232],simde_mm_xor_si128(c2[3559],simde_mm_xor_si128(c2[360],simde_mm_xor_si128(c2[4876],simde_mm_xor_si128(c2[4556],simde_mm_xor_si128(c2[4867],simde_mm_xor_si128(c2[4554],simde_mm_xor_si128(c2[3907],simde_mm_xor_si128(c2[4234],simde_mm_xor_si128(c2[4257],simde_mm_xor_si128(c2[3937],simde_mm_xor_si128(c2[4264],simde_mm_xor_si128(c2[2666],simde_mm_xor_si128(c2[2977],simde_mm_xor_si128(c2[4578],simde_mm_xor_si128(c2[4905],simde_mm_xor_si128(c2[4943],simde_mm_xor_si128(c2[135],simde_mm_xor_si128(c2[2051],simde_mm_xor_si128(c2[2378],simde_mm_xor_si128(c2[769],simde_mm_xor_si128(c2[138],simde_mm_xor_si128(c2[449],simde_mm_xor_si128(c2[4967],simde_mm_xor_si128(c2[175],simde_mm_xor_si128(c2[4013],simde_mm_xor_si128(c2[3366],simde_mm_xor_si128(c2[3693],simde_mm_xor_si128(c2[1129],simde_mm_xor_si128(c2[204],simde_mm_xor_si128(c2[5003],simde_mm_xor_si128(c2[195],simde_mm_xor_si128(c2[1486],simde_mm_xor_si128(c2[1797],simde_mm_xor_si128(c2[1796],simde_mm_xor_si128(c2[1165],simde_mm_xor_si128(c2[1476],simde_mm_xor_si128(c2[4713],simde_mm_xor_si128(c2[5024],simde_mm_xor_si128(c2[3434],simde_mm_xor_si128(c2[2787],simde_mm_xor_si128(c2[3114],simde_mm_xor_si128(c2[4072],simde_mm_xor_si128(c2[910],simde_mm_xor_si128(c2[1221],simde_mm_xor_si128(c2[911],simde_mm_xor_si128(c2[1222],simde_mm_xor_si128(c2[2184],simde_mm_xor_si128(c2[1537],simde_mm_xor_si128(c2[1864],simde_mm_xor_si128(c2[301],simde_mm_xor_si128(c2[5100],simde_mm_xor_si128(c2[292],simde_mm_xor_si128(c2[4783],simde_mm_xor_si128(c2[5094],simde_mm_xor_si128(c2[1583],simde_mm_xor_si128(c2[936],c2[1263]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 8
+     d2[128]=simde_mm_xor_si128(c2[4814],simde_mm_xor_si128(c2[4494],simde_mm_xor_si128(c2[4168],simde_mm_xor_si128(c2[3848],simde_mm_xor_si128(c2[1923],simde_mm_xor_si128(c2[4488],simde_mm_xor_si128(c2[3557],simde_mm_xor_si128(c2[3237],simde_mm_xor_si128(c2[2277],simde_mm_xor_si128(c2[4206],simde_mm_xor_si128(c2[4517],simde_mm_xor_si128(c2[715],simde_mm_xor_si128(c2[395],simde_mm_xor_si128(c2[4865],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[4911],simde_mm_xor_si128(c2[3944],simde_mm_xor_si128(c2[3624],simde_mm_xor_si128(c2[417],simde_mm_xor_si128(c2[1102],simde_mm_xor_si128(c2[782],simde_mm_xor_si128(c2[3009],simde_mm_xor_si128(c2[1096],simde_mm_xor_si128(c2[1126],simde_mm_xor_si128(c2[806],simde_mm_xor_si128(c2[4324],simde_mm_xor_si128(c2[1162],simde_mm_xor_si128(c2[842],simde_mm_xor_si128(c2[2764],simde_mm_xor_si128(c2[2444],simde_mm_xor_si128(c2[2123],simde_mm_xor_si128(c2[872],simde_mm_xor_si128(c2[552],simde_mm_xor_si128(c2[3745],simde_mm_xor_si128(c2[2188],simde_mm_xor_si128(c2[1868],simde_mm_xor_si128(c2[1869],simde_mm_xor_si128(c2[2511],simde_mm_xor_si128(c2[1259],simde_mm_xor_si128(c2[939],simde_mm_xor_si128(c2[942],simde_mm_xor_si128(c2[622],c2[1894]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 9
+     d2[144]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[1604],simde_mm_xor_si128(c2[4815],simde_mm_xor_si128(c2[974],simde_mm_xor_si128(c2[4169],simde_mm_xor_si128(c2[4168],simde_mm_xor_si128(c2[2244],simde_mm_xor_si128(c2[3878],simde_mm_xor_si128(c2[363],simde_mm_xor_si128(c2[3558],simde_mm_xor_si128(c2[4522],simde_mm_xor_si128(c2[2598],simde_mm_xor_si128(c2[1316],simde_mm_xor_si128(c2[4527],simde_mm_xor_si128(c2[3564],simde_mm_xor_si128(c2[1036],simde_mm_xor_si128(c2[2624],simde_mm_xor_si128(c2[716],simde_mm_xor_si128(c2[1991],simde_mm_xor_si128(c2[67],simde_mm_xor_si128(c2[417],simde_mm_xor_si128(c2[2021],simde_mm_xor_si128(c2[97],simde_mm_xor_si128(c2[750],simde_mm_xor_si128(c2[3945],simde_mm_xor_si128(c2[2662],simde_mm_xor_si128(c2[738],simde_mm_xor_si128(c2[3011],simde_mm_xor_si128(c2[1103],simde_mm_xor_si128(c2[135],simde_mm_xor_si128(c2[3330],simde_mm_xor_si128(c2[3341],simde_mm_xor_si128(c2[1417],simde_mm_xor_si128(c2[3051],simde_mm_xor_si128(c2[1127],simde_mm_xor_si128(c2[1450],simde_mm_xor_si128(c2[4645],simde_mm_xor_si128(c2[1483],simde_mm_xor_si128(c2[3087],simde_mm_xor_si128(c2[1163],simde_mm_xor_si128(c2[4673],simde_mm_xor_si128(c2[2765],simde_mm_xor_si128(c2[4352],simde_mm_xor_si128(c2[2444],simde_mm_xor_si128(c2[2797],simde_mm_xor_si128(c2[873],simde_mm_xor_si128(c2[871],simde_mm_xor_si128(c2[4066],simde_mm_xor_si128(c2[4097],simde_mm_xor_si128(c2[2189],simde_mm_xor_si128(c2[4098],simde_mm_xor_si128(c2[2190],simde_mm_xor_si128(c2[4740],simde_mm_xor_si128(c2[2816],simde_mm_xor_si128(c2[4428],simde_mm_xor_si128(c2[1580],simde_mm_xor_si128(c2[3168],simde_mm_xor_si128(c2[1260],simde_mm_xor_si128(c2[2851],simde_mm_xor_si128(c2[943],simde_mm_xor_si128(c2[4139],c2[2215])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 10
+     d2[160]=simde_mm_xor_si128(c2[3520],simde_mm_xor_si128(c2[2923],simde_mm_xor_si128(c2[192],c2[1831])));
+
+//row: 11
+     d2[176]=simde_mm_xor_si128(c2[4],simde_mm_xor_si128(c2[4493],simde_mm_xor_si128(c2[2568],simde_mm_xor_si128(c2[3520],simde_mm_xor_si128(c2[3882],simde_mm_xor_si128(c2[2922],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[4835],simde_mm_xor_si128(c2[1024],simde_mm_xor_si128(c2[711],simde_mm_xor_si128(c2[391],simde_mm_xor_si128(c2[421],simde_mm_xor_si128(c2[4269],simde_mm_xor_si128(c2[1062],simde_mm_xor_si128(c2[1411],simde_mm_xor_si128(c2[3654],simde_mm_xor_si128(c2[2061],simde_mm_xor_si128(c2[1741],simde_mm_xor_si128(c2[1451],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[4969],simde_mm_xor_si128(c2[1487],simde_mm_xor_si128(c2[3073],simde_mm_xor_si128(c2[3072],simde_mm_xor_si128(c2[2752],simde_mm_xor_si128(c2[1197],simde_mm_xor_si128(c2[4710],simde_mm_xor_si128(c2[4390],simde_mm_xor_si128(c2[4078],simde_mm_xor_si128(c2[2497],simde_mm_xor_si128(c2[2498],simde_mm_xor_si128(c2[3460],simde_mm_xor_si128(c2[3140],simde_mm_xor_si128(c2[1568],simde_mm_xor_si128(c2[1251],simde_mm_xor_si128(c2[2859],simde_mm_xor_si128(c2[2539],c2[941])))))))))))))))))))))))))))))))))))));
+
+//row: 12
+     d2[192]=simde_mm_xor_si128(c2[2241],simde_mm_xor_si128(c2[1921],simde_mm_xor_si128(c2[1291],simde_mm_xor_si128(c2[4485],simde_mm_xor_si128(c2[1000],simde_mm_xor_si128(c2[680],simde_mm_xor_si128(c2[4839],simde_mm_xor_si128(c2[1633],simde_mm_xor_si128(c2[4835],simde_mm_xor_si128(c2[3277],simde_mm_xor_si128(c2[2957],simde_mm_xor_si128(c2[2308],simde_mm_xor_si128(c2[2658],simde_mm_xor_si128(c2[2338],simde_mm_xor_si128(c2[1067],simde_mm_xor_si128(c2[2979],simde_mm_xor_si128(c2[4902],simde_mm_xor_si128(c2[3328],simde_mm_xor_si128(c2[452],simde_mm_xor_si128(c2[3658],simde_mm_xor_si128(c2[3368],simde_mm_xor_si128(c2[1767],simde_mm_xor_si128(c2[3724],simde_mm_xor_si128(c2[3404],simde_mm_xor_si128(c2[5006],simde_mm_xor_si128(c2[4685],simde_mm_xor_si128(c2[3114],simde_mm_xor_si128(c2[1188],simde_mm_xor_si128(c2[4430],simde_mm_xor_si128(c2[4431],simde_mm_xor_si128(c2[5057],simde_mm_xor_si128(c2[3821],simde_mm_xor_si128(c2[3501],simde_mm_xor_si128(c2[3168],c2[4456]))))))))))))))))))))))))))))))))));
+
+//row: 13
+     d2[208]=simde_mm_xor_si128(c2[1934],simde_mm_xor_si128(c2[1288],simde_mm_xor_si128(c2[4482],simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[677],simde_mm_xor_si128(c2[4836],simde_mm_xor_si128(c2[1966],simde_mm_xor_si128(c2[1646],simde_mm_xor_si128(c2[672],simde_mm_xor_si128(c2[2954],simde_mm_xor_si128(c2[2625],simde_mm_xor_si128(c2[2305],simde_mm_xor_si128(c2[2351],simde_mm_xor_si128(c2[1064],simde_mm_xor_si128(c2[2976],simde_mm_xor_si128(c2[3341],simde_mm_xor_si128(c2[449],simde_mm_xor_si128(c2[3975],simde_mm_xor_si128(c2[3655],simde_mm_xor_si128(c2[3365],simde_mm_xor_si128(c2[2084],simde_mm_xor_si128(c2[1764],simde_mm_xor_si128(c2[3401],simde_mm_xor_si128(c2[5003],simde_mm_xor_si128(c2[5002],simde_mm_xor_si128(c2[4682],simde_mm_xor_si128(c2[3111],simde_mm_xor_si128(c2[1505],simde_mm_xor_si128(c2[1185],simde_mm_xor_si128(c2[4427],simde_mm_xor_si128(c2[4428],simde_mm_xor_si128(c2[271],simde_mm_xor_si128(c2[5070],simde_mm_xor_si128(c2[2178],simde_mm_xor_si128(c2[3498],simde_mm_xor_si128(c2[3181],simde_mm_xor_si128(c2[4773],c2[4453])))))))))))))))))))))))))))))))))))));
+
+//row: 14
+     d2[224]=simde_mm_xor_si128(c2[3840],simde_mm_xor_si128(c2[3520],simde_mm_xor_si128(c2[15],simde_mm_xor_si128(c2[2890],simde_mm_xor_si128(c2[4488],simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[2563],simde_mm_xor_si128(c2[2599],simde_mm_xor_si128(c2[2279],simde_mm_xor_si128(c2[3877],simde_mm_xor_si128(c2[1319],simde_mm_xor_si128(c2[2917],simde_mm_xor_si128(c2[47],simde_mm_xor_si128(c2[3232],simde_mm_xor_si128(c2[4846],simde_mm_xor_si128(c2[999],simde_mm_xor_si128(c2[4876],simde_mm_xor_si128(c2[4556],simde_mm_xor_si128(c2[1035],simde_mm_xor_si128(c2[706],simde_mm_xor_si128(c2[3907],simde_mm_xor_si128(c2[386],simde_mm_xor_si128(c2[4257],simde_mm_xor_si128(c2[3937],simde_mm_xor_si128(c2[416],simde_mm_xor_si128(c2[2666],simde_mm_xor_si128(c2[4264],simde_mm_xor_si128(c2[4578],simde_mm_xor_si128(c2[1057],simde_mm_xor_si128(c2[4943],simde_mm_xor_si128(c2[1422],simde_mm_xor_si128(c2[2051],simde_mm_xor_si128(c2[3649],simde_mm_xor_si128(c2[2056],simde_mm_xor_si128(c2[138],simde_mm_xor_si128(c2[1736],simde_mm_xor_si128(c2[4967],simde_mm_xor_si128(c2[1446],simde_mm_xor_si128(c2[165],simde_mm_xor_si128(c2[3366],simde_mm_xor_si128(c2[4964],simde_mm_xor_si128(c2[204],simde_mm_xor_si128(c2[5003],simde_mm_xor_si128(c2[1482],simde_mm_xor_si128(c2[1486],simde_mm_xor_si128(c2[3084],simde_mm_xor_si128(c2[3083],simde_mm_xor_si128(c2[1165],simde_mm_xor_si128(c2[2763],simde_mm_xor_si128(c2[521],simde_mm_xor_si128(c2[4713],simde_mm_xor_si128(c2[1192],simde_mm_xor_si128(c2[4705],simde_mm_xor_si128(c2[2787],simde_mm_xor_si128(c2[4385],simde_mm_xor_si128(c2[910],simde_mm_xor_si128(c2[2508],simde_mm_xor_si128(c2[911],simde_mm_xor_si128(c2[2509],simde_mm_xor_si128(c2[3471],simde_mm_xor_si128(c2[1537],simde_mm_xor_si128(c2[3151],simde_mm_xor_si128(c2[301],simde_mm_xor_si128(c2[5100],simde_mm_xor_si128(c2[1579],simde_mm_xor_si128(c2[4783],simde_mm_xor_si128(c2[1262],simde_mm_xor_si128(c2[2854],simde_mm_xor_si128(c2[936],c2[2534])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 15
+     d2[240]=simde_mm_xor_si128(c2[4493],simde_mm_xor_si128(c2[2251],simde_mm_xor_si128(c2[4173],simde_mm_xor_si128(c2[1605],simde_mm_xor_si128(c2[3527],simde_mm_xor_si128(c2[4815],simde_mm_xor_si128(c2[1602],simde_mm_xor_si128(c2[963],simde_mm_xor_si128(c2[3236],simde_mm_xor_si128(c2[994],simde_mm_xor_si128(c2[2916],simde_mm_xor_si128(c2[34],simde_mm_xor_si128(c2[1956],simde_mm_xor_si128(c2[1963],simde_mm_xor_si128(c2[3885],simde_mm_xor_si128(c2[394],simde_mm_xor_si128(c2[3271],simde_mm_xor_si128(c2[74],simde_mm_xor_si128(c2[2638],simde_mm_xor_si128(c2[4544],simde_mm_xor_si128(c2[4910],simde_mm_xor_si128(c2[2668],simde_mm_xor_si128(c2[4590],simde_mm_xor_si128(c2[1381],simde_mm_xor_si128(c2[3303],simde_mm_xor_si128(c2[3309],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[3658],simde_mm_xor_si128(c2[461],simde_mm_xor_si128(c2[782],simde_mm_xor_si128(c2[2688],simde_mm_xor_si128(c2[3972],simde_mm_xor_si128(c2[775],simde_mm_xor_si128(c2[3682],simde_mm_xor_si128(c2[485],simde_mm_xor_si128(c2[2081],simde_mm_xor_si128(c2[4003],simde_mm_xor_si128(c2[841],simde_mm_xor_si128(c2[3718],simde_mm_xor_si128(c2[521],simde_mm_xor_si128(c2[201],simde_mm_xor_si128(c2[2123],simde_mm_xor_si128(c2[4999],simde_mm_xor_si128(c2[1802],simde_mm_xor_si128(c2[3428],simde_mm_xor_si128(c2[231],simde_mm_xor_si128(c2[1518],simde_mm_xor_si128(c2[3424],simde_mm_xor_si128(c2[4744],simde_mm_xor_si128(c2[1547],simde_mm_xor_si128(c2[4745],simde_mm_xor_si128(c2[1548],simde_mm_xor_si128(c2[268],simde_mm_xor_si128(c2[2190],simde_mm_xor_si128(c2[938],simde_mm_xor_si128(c2[3815],simde_mm_xor_si128(c2[618],simde_mm_xor_si128(c2[3498],simde_mm_xor_si128(c2[301],simde_mm_xor_si128(c2[4770],c2[1573]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 16
+     d2[256]=simde_mm_xor_si128(c2[321],simde_mm_xor_si128(c2[3535],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[3215],simde_mm_xor_si128(c2[2889],simde_mm_xor_si128(c2[4490],simde_mm_xor_si128(c2[2569],simde_mm_xor_si128(c2[2565],simde_mm_xor_si128(c2[644],simde_mm_xor_si128(c2[4199],simde_mm_xor_si128(c2[2278],simde_mm_xor_si128(c2[3879],simde_mm_xor_si128(c2[1958],simde_mm_xor_si128(c2[2919],simde_mm_xor_si128(c2[998],simde_mm_xor_si128(c2[4832],simde_mm_xor_si128(c2[2927],simde_mm_xor_si128(c2[3564],simde_mm_xor_si128(c2[1357],simde_mm_xor_si128(c2[4555],simde_mm_xor_si128(c2[1037],simde_mm_xor_si128(c2[4235],simde_mm_xor_si128(c2[388],simde_mm_xor_si128(c2[3586],simde_mm_xor_si128(c2[738],simde_mm_xor_si128(c2[3936],simde_mm_xor_si128(c2[418],simde_mm_xor_si128(c2[3616],simde_mm_xor_si128(c2[2665],simde_mm_xor_si128(c2[4266],simde_mm_xor_si128(c2[2345],simde_mm_xor_si128(c2[1059],simde_mm_xor_si128(c2[4257],simde_mm_xor_si128(c2[4942],simde_mm_xor_si128(c2[1408],simde_mm_xor_si128(c2[4622],simde_mm_xor_si128(c2[3651],simde_mm_xor_si128(c2[1730],simde_mm_xor_si128(c2[1738],simde_mm_xor_si128(c2[4936],simde_mm_xor_si128(c2[4966],simde_mm_xor_si128(c2[1448],simde_mm_xor_si128(c2[4646],simde_mm_xor_si128(c2[4966],simde_mm_xor_si128(c2[3045],simde_mm_xor_si128(c2[1804],simde_mm_xor_si128(c2[5002],simde_mm_xor_si128(c2[1484],simde_mm_xor_si128(c2[4682],simde_mm_xor_si128(c2[1485],simde_mm_xor_si128(c2[3086],simde_mm_xor_si128(c2[1165],simde_mm_xor_si128(c2[2765],simde_mm_xor_si128(c2[844],simde_mm_xor_si128(c2[4712],simde_mm_xor_si128(c2[1194],simde_mm_xor_si128(c2[4392],simde_mm_xor_si128(c2[4387],simde_mm_xor_si128(c2[2466],simde_mm_xor_si128(c2[909],simde_mm_xor_si128(c2[2510],simde_mm_xor_si128(c2[589],simde_mm_xor_si128(c2[2511],simde_mm_xor_si128(c2[590],simde_mm_xor_si128(c2[3137],simde_mm_xor_si128(c2[1216],simde_mm_xor_si128(c2[1901],simde_mm_xor_si128(c2[5099],simde_mm_xor_si128(c2[1581],simde_mm_xor_si128(c2[4779],simde_mm_xor_si128(c2[4782],simde_mm_xor_si128(c2[1248],simde_mm_xor_si128(c2[4462],simde_mm_xor_si128(c2[2536],simde_mm_xor_si128(c2[615],c2[4776])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 17
+     d2[272]=simde_mm_xor_si128(c2[3527],simde_mm_xor_si128(c2[2884],simde_mm_xor_si128(c2[3207],simde_mm_xor_si128(c2[2564],simde_mm_xor_si128(c2[2254],simde_mm_xor_si128(c2[2561],simde_mm_xor_si128(c2[1934],simde_mm_xor_si128(c2[652],simde_mm_xor_si128(c2[9],simde_mm_xor_si128(c2[2286],simde_mm_xor_si128(c2[1643],simde_mm_xor_si128(c2[1966],simde_mm_xor_si128(c2[1323],simde_mm_xor_si128(c2[1006],simde_mm_xor_si128(c2[363],simde_mm_xor_si128(c2[2919],simde_mm_xor_si128(c2[2276],simde_mm_xor_si128(c2[4527],simde_mm_xor_si128(c2[4547],simde_mm_xor_si128(c2[3904],simde_mm_xor_si128(c2[4227],simde_mm_xor_si128(c2[3584],simde_mm_xor_si128(c2[3594],simde_mm_xor_si128(c2[2951],simde_mm_xor_si128(c2[3944],simde_mm_xor_si128(c2[3301],simde_mm_xor_si128(c2[3624],simde_mm_xor_si128(c2[2981],simde_mm_xor_si128(c2[2030],simde_mm_xor_si128(c2[2337],simde_mm_xor_si128(c2[1710],simde_mm_xor_si128(c2[4265],simde_mm_xor_si128(c2[3622],simde_mm_xor_si128(c2[4291],simde_mm_xor_si128(c2[4614],simde_mm_xor_si128(c2[3971],simde_mm_xor_si128(c2[1738],simde_mm_xor_si128(c2[1095],simde_mm_xor_si128(c2[4928],simde_mm_xor_si128(c2[4301],simde_mm_xor_si128(c2[4331],simde_mm_xor_si128(c2[4654],simde_mm_xor_si128(c2[4011],simde_mm_xor_si128(c2[3053],simde_mm_xor_si128(c2[2410],simde_mm_xor_si128(c2[4007],simde_mm_xor_si128(c2[4994],simde_mm_xor_si128(c2[4367],simde_mm_xor_si128(c2[4674],simde_mm_xor_si128(c2[4047],simde_mm_xor_si128(c2[834],simde_mm_xor_si128(c2[1157],simde_mm_xor_si128(c2[514],simde_mm_xor_si128(c2[836],simde_mm_xor_si128(c2[193],simde_mm_xor_si128(c2[4077],simde_mm_xor_si128(c2[4384],simde_mm_xor_si128(c2[3757],simde_mm_xor_si128(c2[2474],simde_mm_xor_si128(c2[1831],simde_mm_xor_si128(c2[258],simde_mm_xor_si128(c2[581],simde_mm_xor_si128(c2[5057],simde_mm_xor_si128(c2[582],simde_mm_xor_si128(c2[5058],simde_mm_xor_si128(c2[1224],simde_mm_xor_si128(c2[581],simde_mm_xor_si128(c2[5091],simde_mm_xor_si128(c2[4448],simde_mm_xor_si128(c2[4771],simde_mm_xor_si128(c2[4128],simde_mm_xor_si128(c2[4131],simde_mm_xor_si128(c2[4454],simde_mm_xor_si128(c2[3811],simde_mm_xor_si128(c2[623],c2[5099])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 18
+     d2[288]=simde_mm_xor_si128(c2[3853],simde_mm_xor_si128(c2[844],c2[867]));
+
+//row: 19
+     d2[304]=simde_mm_xor_si128(c2[332],simde_mm_xor_si128(c2[4805],simde_mm_xor_si128(c2[2880],simde_mm_xor_si128(c2[2245],simde_mm_xor_si128(c2[4194],simde_mm_xor_si128(c2[3234],simde_mm_xor_si128(c2[44],simde_mm_xor_si128(c2[1313],simde_mm_xor_si128(c2[1352],simde_mm_xor_si128(c2[719],simde_mm_xor_si128(c2[749],simde_mm_xor_si128(c2[4581],simde_mm_xor_si128(c2[1390],simde_mm_xor_si128(c2[1739],simde_mm_xor_si128(c2[3982],simde_mm_xor_si128(c2[2053],simde_mm_xor_si128(c2[1763],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[1799],simde_mm_xor_si128(c2[3401],simde_mm_xor_si128(c2[3080],simde_mm_xor_si128(c2[1509],simde_mm_xor_si128(c2[4718],simde_mm_xor_si128(c2[2825],simde_mm_xor_si128(c2[2826],simde_mm_xor_si128(c2[3468],simde_mm_xor_si128(c2[1896],simde_mm_xor_si128(c2[1579],c2[2851]))))))))))))))))))))))))))));
+
+//row: 20
+     d2[320]=simde_mm_xor_si128(c2[1922],simde_mm_xor_si128(c2[1602],simde_mm_xor_si128(c2[972],simde_mm_xor_si128(c2[4166],simde_mm_xor_si128(c2[681],simde_mm_xor_si128(c2[361],simde_mm_xor_si128(c2[4520],simde_mm_xor_si128(c2[1314],simde_mm_xor_si128(c2[3233],simde_mm_xor_si128(c2[2958],simde_mm_xor_si128(c2[2638],simde_mm_xor_si128(c2[1989],simde_mm_xor_si128(c2[2339],simde_mm_xor_si128(c2[2019],simde_mm_xor_si128(c2[748],simde_mm_xor_si128(c2[2660],simde_mm_xor_si128(c2[3009],simde_mm_xor_si128(c2[133],simde_mm_xor_si128(c2[3339],simde_mm_xor_si128(c2[3014],simde_mm_xor_si128(c2[3049],simde_mm_xor_si128(c2[1448],simde_mm_xor_si128(c2[3405],simde_mm_xor_si128(c2[3085],simde_mm_xor_si128(c2[4687],simde_mm_xor_si128(c2[4366],simde_mm_xor_si128(c2[2795],simde_mm_xor_si128(c2[869],simde_mm_xor_si128(c2[4111],simde_mm_xor_si128(c2[4096],simde_mm_xor_si128(c2[4738],simde_mm_xor_si128(c2[3502],simde_mm_xor_si128(c2[3182],simde_mm_xor_si128(c2[2849],c2[4137]))))))))))))))))))))))))))))))))));
+
+//row: 21
+     d2[336]=simde_mm_xor_si128(c2[3213],simde_mm_xor_si128(c2[2567],simde_mm_xor_si128(c2[642],simde_mm_xor_si128(c2[3844],simde_mm_xor_si128(c2[1956],simde_mm_xor_si128(c2[996],simde_mm_xor_si128(c2[3245],simde_mm_xor_si128(c2[2925],simde_mm_xor_si128(c2[4233],simde_mm_xor_si128(c2[3904],simde_mm_xor_si128(c2[3584],simde_mm_xor_si128(c2[3630],simde_mm_xor_si128(c2[2343],simde_mm_xor_si128(c2[4271],simde_mm_xor_si128(c2[4620],simde_mm_xor_si128(c2[1728],simde_mm_xor_si128(c2[135],simde_mm_xor_si128(c2[4934],simde_mm_xor_si128(c2[4644],simde_mm_xor_si128(c2[3363],simde_mm_xor_si128(c2[3043],simde_mm_xor_si128(c2[4680],simde_mm_xor_si128(c2[1163],simde_mm_xor_si128(c2[1162],simde_mm_xor_si128(c2[842],simde_mm_xor_si128(c2[4390],simde_mm_xor_si128(c2[2784],simde_mm_xor_si128(c2[2464],simde_mm_xor_si128(c2[587],simde_mm_xor_si128(c2[588],simde_mm_xor_si128(c2[1550],simde_mm_xor_si128(c2[1230],simde_mm_xor_si128(c2[3458],simde_mm_xor_si128(c2[4777],simde_mm_xor_si128(c2[4460],simde_mm_xor_si128(c2[933],c2[613]))))))))))))))))))))))))))))))))))));
+
+//row: 22
+     d2[352]=simde_mm_xor_si128(c2[4525],c2[4867]);
+
+//row: 23
+     d2[368]=simde_mm_xor_si128(c2[2241],simde_mm_xor_si128(c2[3630],c2[4654]));
+
+//row: 24
+     d2[384]=simde_mm_xor_si128(c2[4514],simde_mm_xor_si128(c2[3592],c2[2848]));
+
+//row: 25
+     d2[400]=simde_mm_xor_si128(c2[1294],c2[4009]);
+
+//row: 26
+     d2[416]=simde_mm_xor_si128(c2[2890],simde_mm_xor_si128(c2[2570],simde_mm_xor_si128(c2[648],simde_mm_xor_si128(c2[2244],simde_mm_xor_si128(c2[1924],simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[15],simde_mm_xor_si128(c2[3212],simde_mm_xor_si128(c2[1633],simde_mm_xor_si128(c2[1313],simde_mm_xor_si128(c2[4526],simde_mm_xor_si128(c2[353],simde_mm_xor_si128(c2[3566],simde_mm_xor_si128(c2[680],simde_mm_xor_si128(c2[2282],simde_mm_xor_si128(c2[360],simde_mm_xor_si128(c2[3910],simde_mm_xor_si128(c2[3590],simde_mm_xor_si128(c2[1668],simde_mm_xor_si128(c2[1355],simde_mm_xor_si128(c2[2957],simde_mm_xor_si128(c2[1035],simde_mm_xor_si128(c2[4225],simde_mm_xor_si128(c2[3307],simde_mm_xor_si128(c2[2987],simde_mm_xor_si128(c2[1065],simde_mm_xor_si128(c2[2020],simde_mm_xor_si128(c2[1700],simde_mm_xor_si128(c2[4897],simde_mm_xor_si128(c2[3628],simde_mm_xor_si128(c2[1706],simde_mm_xor_si128(c2[4297],simde_mm_xor_si128(c2[3977],simde_mm_xor_si128(c2[2055],simde_mm_xor_si128(c2[1101],simde_mm_xor_si128(c2[4298],simde_mm_xor_si128(c2[2689],simde_mm_xor_si128(c2[4291],simde_mm_xor_si128(c2[2369],simde_mm_xor_si128(c2[4321],simde_mm_xor_si128(c2[4001],simde_mm_xor_si128(c2[2095],simde_mm_xor_si128(c2[814],simde_mm_xor_si128(c2[2400],simde_mm_xor_si128(c2[494],simde_mm_xor_si128(c2[4357],simde_mm_xor_si128(c2[4037],simde_mm_xor_si128(c2[2115],simde_mm_xor_si128(c2[840],simde_mm_xor_si128(c2[520],simde_mm_xor_si128(c2[3717],simde_mm_xor_si128(c2[3716],simde_mm_xor_si128(c2[199],simde_mm_xor_si128(c2[3396],simde_mm_xor_si128(c2[4067],simde_mm_xor_si128(c2[3747],simde_mm_xor_si128(c2[1825],simde_mm_xor_si128(c2[235],simde_mm_xor_si128(c2[1837],simde_mm_xor_si128(c2[5034],simde_mm_xor_si128(c2[5032],simde_mm_xor_si128(c2[264],simde_mm_xor_si128(c2[5063],simde_mm_xor_si128(c2[3141],simde_mm_xor_si128(c2[5064],simde_mm_xor_si128(c2[3142],simde_mm_xor_si128(c2[4104],simde_mm_xor_si128(c2[587],simde_mm_xor_si128(c2[3784],simde_mm_xor_si128(c2[4454],simde_mm_xor_si128(c2[4134],simde_mm_xor_si128(c2[2212],simde_mm_xor_si128(c2[4137],simde_mm_xor_si128(c2[3817],simde_mm_xor_si128(c2[1895],simde_mm_xor_si128(c2[3503],simde_mm_xor_si128(c2[5089],c2[3183])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 27
+     d2[432]=simde_mm_xor_si128(c2[2560],c2[2441]);
+
+//row: 28
+     d2[448]=simde_mm_xor_si128(c2[678],simde_mm_xor_si128(c2[1670],c2[2408]));
+
+//row: 29
+     d2[464]=simde_mm_xor_si128(c2[641],c2[3969]);
+
+//row: 30
+     d2[480]=simde_mm_xor_si128(c2[2308],simde_mm_xor_si128(c2[175],simde_mm_xor_si128(c2[3104],c2[1573])));
+
+//row: 31
+     d2[496]=simde_mm_xor_si128(c2[2880],simde_mm_xor_si128(c2[2250],simde_mm_xor_si128(c2[325],simde_mm_xor_si128(c2[1639],simde_mm_xor_si128(c2[679],simde_mm_xor_si128(c2[2912],simde_mm_xor_si128(c2[2592],simde_mm_xor_si128(c2[3238],simde_mm_xor_si128(c2[3916],simde_mm_xor_si128(c2[3587],simde_mm_xor_si128(c2[3267],simde_mm_xor_si128(c2[3297],simde_mm_xor_si128(c2[2026],simde_mm_xor_si128(c2[3938],simde_mm_xor_si128(c2[4303],simde_mm_xor_si128(c2[1411],simde_mm_xor_si128(c2[4937],simde_mm_xor_si128(c2[4617],simde_mm_xor_si128(c2[4327],simde_mm_xor_si128(c2[3046],simde_mm_xor_si128(c2[2726],simde_mm_xor_si128(c2[4363],simde_mm_xor_si128(c2[846],simde_mm_xor_si128(c2[845],simde_mm_xor_si128(c2[525],simde_mm_xor_si128(c2[4073],simde_mm_xor_si128(c2[2467],simde_mm_xor_si128(c2[2147],simde_mm_xor_si128(c2[270],simde_mm_xor_si128(c2[271],simde_mm_xor_si128(c2[1217],simde_mm_xor_si128(c2[897],simde_mm_xor_si128(c2[4460],simde_mm_xor_si128(c2[4143],simde_mm_xor_si128(c2[616],c2[296])))))))))))))))))))))))))))))))))));
+
+//row: 32
+     d2[512]=simde_mm_xor_si128(c2[4810],simde_mm_xor_si128(c2[4490],simde_mm_xor_si128(c2[4164],simde_mm_xor_si128(c2[3844],simde_mm_xor_si128(c2[1935],simde_mm_xor_si128(c2[655],simde_mm_xor_si128(c2[3553],simde_mm_xor_si128(c2[3233],simde_mm_xor_si128(c2[2273],simde_mm_xor_si128(c2[4202],simde_mm_xor_si128(c2[711],simde_mm_xor_si128(c2[391],simde_mm_xor_si128(c2[4877],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[4907],simde_mm_xor_si128(c2[3940],simde_mm_xor_si128(c2[3620],simde_mm_xor_si128(c2[429],simde_mm_xor_si128(c2[1098],simde_mm_xor_si128(c2[778],simde_mm_xor_si128(c2[3021],simde_mm_xor_si128(c2[1092],simde_mm_xor_si128(c2[1122],simde_mm_xor_si128(c2[802],simde_mm_xor_si128(c2[4320],simde_mm_xor_si128(c2[4002],simde_mm_xor_si128(c2[1158],simde_mm_xor_si128(c2[838],simde_mm_xor_si128(c2[2760],simde_mm_xor_si128(c2[2440],simde_mm_xor_si128(c2[2119],simde_mm_xor_si128(c2[868],simde_mm_xor_si128(c2[548],simde_mm_xor_si128(c2[3757],simde_mm_xor_si128(c2[2184],simde_mm_xor_si128(c2[1864],simde_mm_xor_si128(c2[1865],simde_mm_xor_si128(c2[2507],simde_mm_xor_si128(c2[1255],simde_mm_xor_si128(c2[935],simde_mm_xor_si128(c2[938],simde_mm_xor_si128(c2[618],c2[1890]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 33
+     d2[528]=simde_mm_xor_si128(c2[975],simde_mm_xor_si128(c2[329],simde_mm_xor_si128(c2[3523],simde_mm_xor_si128(c2[4837],simde_mm_xor_si128(c2[3877],simde_mm_xor_si128(c2[687],simde_mm_xor_si128(c2[1995],simde_mm_xor_si128(c2[1346],simde_mm_xor_si128(c2[1352],simde_mm_xor_si128(c2[1376],simde_mm_xor_si128(c2[105],simde_mm_xor_si128(c2[2017],simde_mm_xor_si128(c2[2382],simde_mm_xor_si128(c2[4609],simde_mm_xor_si128(c2[2696],simde_mm_xor_si128(c2[2406],simde_mm_xor_si128(c2[805],simde_mm_xor_si128(c2[2442],simde_mm_xor_si128(c2[4044],simde_mm_xor_si128(c2[3723],simde_mm_xor_si128(c2[2152],simde_mm_xor_si128(c2[226],simde_mm_xor_si128(c2[1514],simde_mm_xor_si128(c2[3468],simde_mm_xor_si128(c2[3469],simde_mm_xor_si128(c2[4111],simde_mm_xor_si128(c2[2539],simde_mm_xor_si128(c2[2222],c2[3494]))))))))))))))))))))))))))));
+
+//row: 34
+     d2[544]=simde_mm_xor_si128(c2[4485],simde_mm_xor_si128(c2[4165],simde_mm_xor_si128(c2[3842],simde_mm_xor_si128(c2[3855],simde_mm_xor_si128(c2[3535],simde_mm_xor_si128(c2[3212],simde_mm_xor_si128(c2[1610],simde_mm_xor_si128(c2[1287],simde_mm_xor_si128(c2[969],simde_mm_xor_si128(c2[3244],simde_mm_xor_si128(c2[2924],simde_mm_xor_si128(c2[2601],simde_mm_xor_si128(c2[1964],simde_mm_xor_si128(c2[1641],simde_mm_xor_si128(c2[3874],simde_mm_xor_si128(c2[3877],simde_mm_xor_si128(c2[3554],simde_mm_xor_si128(c2[386],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[4878],simde_mm_xor_si128(c2[4549],simde_mm_xor_si128(c2[4552],simde_mm_xor_si128(c2[4229],simde_mm_xor_si128(c2[4902],simde_mm_xor_si128(c2[4582],simde_mm_xor_si128(c2[4259],simde_mm_xor_si128(c2[3631],simde_mm_xor_si128(c2[3311],simde_mm_xor_si128(c2[2988],simde_mm_xor_si128(c2[104],simde_mm_xor_si128(c2[4900],simde_mm_xor_si128(c2[773],simde_mm_xor_si128(c2[453],simde_mm_xor_si128(c2[130],simde_mm_xor_si128(c2[2696],simde_mm_xor_si128(c2[2373],simde_mm_xor_si128(c2[780],simde_mm_xor_si128(c2[783],simde_mm_xor_si128(c2[460],simde_mm_xor_si128(c2[813],simde_mm_xor_si128(c2[493],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[4008],simde_mm_xor_si128(c2[4011],simde_mm_xor_si128(c2[3688],simde_mm_xor_si128(c2[833],simde_mm_xor_si128(c2[513],simde_mm_xor_si128(c2[206],simde_mm_xor_si128(c2[2435],simde_mm_xor_si128(c2[2115],simde_mm_xor_si128(c2[1792],simde_mm_xor_si128(c2[1807],simde_mm_xor_si128(c2[1794],simde_mm_xor_si128(c2[1487],simde_mm_xor_si128(c2[559],simde_mm_xor_si128(c2[239],simde_mm_xor_si128(c2[5035],simde_mm_xor_si128(c2[3429],simde_mm_xor_si128(c2[3432],simde_mm_xor_si128(c2[3109],simde_mm_xor_si128(c2[1859],simde_mm_xor_si128(c2[1539],simde_mm_xor_si128(c2[1216],simde_mm_xor_si128(c2[1540],simde_mm_xor_si128(c2[1217],simde_mm_xor_si128(c2[2179],simde_mm_xor_si128(c2[2182],simde_mm_xor_si128(c2[1859],simde_mm_xor_si128(c2[930],simde_mm_xor_si128(c2[610],simde_mm_xor_si128(c2[303],simde_mm_xor_si128(c2[613],simde_mm_xor_si128(c2[293],simde_mm_xor_si128(c2[5089],simde_mm_xor_si128(c2[1578],simde_mm_xor_si128(c2[1581],c2[1258]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 35
+     d2[560]=simde_mm_xor_si128(c2[2564],simde_mm_xor_si128(c2[2244],simde_mm_xor_si128(c2[1614],simde_mm_xor_si128(c2[4808],simde_mm_xor_si128(c2[1323],simde_mm_xor_si128(c2[1003],simde_mm_xor_si128(c2[43],simde_mm_xor_si128(c2[1956],simde_mm_xor_si128(c2[2915],simde_mm_xor_si128(c2[3584],simde_mm_xor_si128(c2[3264],simde_mm_xor_si128(c2[2631],simde_mm_xor_si128(c2[2981],simde_mm_xor_si128(c2[2661],simde_mm_xor_si128(c2[1390],simde_mm_xor_si128(c2[3302],simde_mm_xor_si128(c2[3651],simde_mm_xor_si128(c2[775],simde_mm_xor_si128(c2[3981],simde_mm_xor_si128(c2[3691],simde_mm_xor_si128(c2[2090],simde_mm_xor_si128(c2[2722],simde_mm_xor_si128(c2[4047],simde_mm_xor_si128(c2[3727],simde_mm_xor_si128(c2[194],simde_mm_xor_si128(c2[4992],simde_mm_xor_si128(c2[3437],simde_mm_xor_si128(c2[1511],simde_mm_xor_si128(c2[4737],simde_mm_xor_si128(c2[4738],simde_mm_xor_si128(c2[261],simde_mm_xor_si128(c2[4128],simde_mm_xor_si128(c2[3808],simde_mm_xor_si128(c2[3491],c2[4779]))))))))))))))))))))))))))))))))));
+
+//row: 36
+     d2[576]=simde_mm_xor_si128(c2[3848],simde_mm_xor_si128(c2[1986],c2[3433]));
+
+//row: 37
+     d2[592]=simde_mm_xor_si128(c2[974],simde_mm_xor_si128(c2[4809],simde_mm_xor_si128(c2[328],simde_mm_xor_si128(c2[4163],simde_mm_xor_si128(c2[3522],simde_mm_xor_si128(c2[2254],simde_mm_xor_si128(c2[4836],simde_mm_xor_si128(c2[3552],simde_mm_xor_si128(c2[3876],simde_mm_xor_si128(c2[2592],simde_mm_xor_si128(c2[4841],simde_mm_xor_si128(c2[686],simde_mm_xor_si128(c2[4521],simde_mm_xor_si128(c2[1994],simde_mm_xor_si128(c2[710],simde_mm_xor_si128(c2[397],simde_mm_xor_si128(c2[1345],simde_mm_xor_si128(c2[77],simde_mm_xor_si128(c2[1391],simde_mm_xor_si128(c2[107],simde_mm_xor_si128(c2[104],simde_mm_xor_si128(c2[3939],simde_mm_xor_si128(c2[2016],simde_mm_xor_si128(c2[748],simde_mm_xor_si128(c2[2381],simde_mm_xor_si128(c2[1097],simde_mm_xor_si128(c2[4608],simde_mm_xor_si128(c2[3340],simde_mm_xor_si128(c2[1731],simde_mm_xor_si128(c2[2695],simde_mm_xor_si128(c2[1411],simde_mm_xor_si128(c2[2405],simde_mm_xor_si128(c2[1121],simde_mm_xor_si128(c2[4975],simde_mm_xor_si128(c2[804],simde_mm_xor_si128(c2[4655],simde_mm_xor_si128(c2[2441],simde_mm_xor_si128(c2[1157],simde_mm_xor_si128(c2[4043],simde_mm_xor_si128(c2[2759],simde_mm_xor_si128(c2[2758],simde_mm_xor_si128(c2[3722],simde_mm_xor_si128(c2[2438],simde_mm_xor_si128(c2[2151],simde_mm_xor_si128(c2[867],simde_mm_xor_si128(c2[4396],simde_mm_xor_si128(c2[225],simde_mm_xor_si128(c2[4076],simde_mm_xor_si128(c2[3467],simde_mm_xor_si128(c2[2183],simde_mm_xor_si128(c2[3468],simde_mm_xor_si128(c2[2184],simde_mm_xor_si128(c2[3146],simde_mm_xor_si128(c2[4110],simde_mm_xor_si128(c2[2826],simde_mm_xor_si128(c2[2538],simde_mm_xor_si128(c2[1254],simde_mm_xor_si128(c2[2221],simde_mm_xor_si128(c2[937],simde_mm_xor_si128(c2[2529],simde_mm_xor_si128(c2[3493],c2[2209])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 38
+     d2[608]=simde_mm_xor_si128(c2[4802],simde_mm_xor_si128(c2[4482],simde_mm_xor_si128(c2[3852],simde_mm_xor_si128(c2[1927],simde_mm_xor_si128(c2[3561],simde_mm_xor_si128(c2[3241],simde_mm_xor_si128(c2[2281],simde_mm_xor_si128(c2[4194],simde_mm_xor_si128(c2[4833],simde_mm_xor_si128(c2[719],simde_mm_xor_si128(c2[399],simde_mm_xor_si128(c2[4869],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[4899],simde_mm_xor_si128(c2[3628],simde_mm_xor_si128(c2[421],simde_mm_xor_si128(c2[770],simde_mm_xor_si128(c2[3013],simde_mm_xor_si128(c2[1100],simde_mm_xor_si128(c2[810],simde_mm_xor_si128(c2[4328],simde_mm_xor_si128(c2[804],simde_mm_xor_si128(c2[1166],simde_mm_xor_si128(c2[846],simde_mm_xor_si128(c2[2432],simde_mm_xor_si128(c2[2127],simde_mm_xor_si128(c2[556],simde_mm_xor_si128(c2[3749],simde_mm_xor_si128(c2[1856],simde_mm_xor_si128(c2[1857],simde_mm_xor_si128(c2[2499],simde_mm_xor_si128(c2[1263],simde_mm_xor_si128(c2[943],simde_mm_xor_si128(c2[610],c2[1898]))))))))))))))))))))))))))))))))));
+
+//row: 39
+     d2[624]=simde_mm_xor_si128(c2[3522],simde_mm_xor_si128(c2[3202],simde_mm_xor_si128(c2[2892],simde_mm_xor_si128(c2[2572],simde_mm_xor_si128(c2[647],simde_mm_xor_si128(c2[4814],simde_mm_xor_si128(c2[2281],simde_mm_xor_si128(c2[1961],simde_mm_xor_si128(c2[1001],simde_mm_xor_si128(c2[2914],simde_mm_xor_si128(c2[4558],simde_mm_xor_si128(c2[4238],simde_mm_xor_si128(c2[3589],simde_mm_xor_si128(c2[3939],simde_mm_xor_si128(c2[3619],simde_mm_xor_si128(c2[2668],simde_mm_xor_si128(c2[2348],simde_mm_xor_si128(c2[4260],simde_mm_xor_si128(c2[4929],simde_mm_xor_si128(c2[4609],simde_mm_xor_si128(c2[1733],simde_mm_xor_si128(c2[4939],simde_mm_xor_si128(c2[4969],simde_mm_xor_si128(c2[4649],simde_mm_xor_si128(c2[3048],simde_mm_xor_si128(c2[5005],simde_mm_xor_si128(c2[4685],simde_mm_xor_si128(c2[1472],simde_mm_xor_si128(c2[1152],simde_mm_xor_si128(c2[847],simde_mm_xor_si128(c2[4715],simde_mm_xor_si128(c2[4395],simde_mm_xor_si128(c2[2469],simde_mm_xor_si128(c2[4074],simde_mm_xor_si128(c2[896],simde_mm_xor_si128(c2[576],simde_mm_xor_si128(c2[577],simde_mm_xor_si128(c2[1219],simde_mm_xor_si128(c2[5102],simde_mm_xor_si128(c2[4782],simde_mm_xor_si128(c2[4769],simde_mm_xor_si128(c2[4449],c2[618]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 40
+     d2[640]=simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[335],simde_mm_xor_si128(c2[4481],simde_mm_xor_si128(c2[3529],simde_mm_xor_si128(c2[2572],simde_mm_xor_si128(c2[4843],simde_mm_xor_si128(c2[3886],simde_mm_xor_si128(c2[3883],simde_mm_xor_si128(c2[2926],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[677],simde_mm_xor_si128(c2[4839],simde_mm_xor_si128(c2[1985],simde_mm_xor_si128(c2[1028],simde_mm_xor_si128(c2[715],simde_mm_xor_si128(c2[1352],simde_mm_xor_si128(c2[395],simde_mm_xor_si128(c2[64],simde_mm_xor_si128(c2[1382],simde_mm_xor_si128(c2[425],simde_mm_xor_si128(c2[111],simde_mm_xor_si128(c2[4257],simde_mm_xor_si128(c2[2023],simde_mm_xor_si128(c2[1066],simde_mm_xor_si128(c2[2372],simde_mm_xor_si128(c2[1415],simde_mm_xor_si128(c2[4615],simde_mm_xor_si128(c2[3658],simde_mm_xor_si128(c2[2049],simde_mm_xor_si128(c2[2702],simde_mm_xor_si128(c2[1729],simde_mm_xor_si128(c2[2412],simde_mm_xor_si128(c2[1455],simde_mm_xor_si128(c2[174],simde_mm_xor_si128(c2[811],simde_mm_xor_si128(c2[4973],simde_mm_xor_si128(c2[2432],simde_mm_xor_si128(c2[1475],simde_mm_xor_si128(c2[4034],simde_mm_xor_si128(c2[3077],simde_mm_xor_si128(c2[3076],simde_mm_xor_si128(c2[3713],simde_mm_xor_si128(c2[2756],simde_mm_xor_si128(c2[2158],simde_mm_xor_si128(c2[1185],simde_mm_xor_si128(c2[4714],simde_mm_xor_si128(c2[232],simde_mm_xor_si128(c2[4394],simde_mm_xor_si128(c2[3458],simde_mm_xor_si128(c2[2501],simde_mm_xor_si128(c2[3459],simde_mm_xor_si128(c2[2502],simde_mm_xor_si128(c2[3464],simde_mm_xor_si128(c2[4101],simde_mm_xor_si128(c2[3144],simde_mm_xor_si128(c2[2529],simde_mm_xor_si128(c2[1572],simde_mm_xor_si128(c2[2212],simde_mm_xor_si128(c2[1255],simde_mm_xor_si128(c2[2863],simde_mm_xor_si128(c2[3500],c2[2543]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 41
+     d2[656]=simde_mm_xor_si128(c2[4807],simde_mm_xor_si128(c2[4487],simde_mm_xor_si128(c2[3841],simde_mm_xor_si128(c2[1932],simde_mm_xor_si128(c2[3566],simde_mm_xor_si128(c2[3246],simde_mm_xor_si128(c2[2286],simde_mm_xor_si128(c2[4199],simde_mm_xor_si128(c2[360],simde_mm_xor_si128(c2[708],simde_mm_xor_si128(c2[388],simde_mm_xor_si128(c2[4874],simde_mm_xor_si128(c2[105],simde_mm_xor_si128(c2[4904],simde_mm_xor_si128(c2[3617],simde_mm_xor_si128(c2[426],simde_mm_xor_si128(c2[775],simde_mm_xor_si128(c2[3018],simde_mm_xor_si128(c2[1089],simde_mm_xor_si128(c2[815],simde_mm_xor_si128(c2[4333],simde_mm_xor_si128(c2[1774],simde_mm_xor_si128(c2[1155],simde_mm_xor_si128(c2[835],simde_mm_xor_si128(c2[2437],simde_mm_xor_si128(c2[2116],simde_mm_xor_si128(c2[545],simde_mm_xor_si128(c2[3754],simde_mm_xor_si128(c2[1861],simde_mm_xor_si128(c2[1862],simde_mm_xor_si128(c2[2504],simde_mm_xor_si128(c2[1252],simde_mm_xor_si128(c2[932],simde_mm_xor_si128(c2[615],c2[1903]))))))))))))))))))))))))))))))))));
+  }
+}
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc288_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc288_byte.c
index df03405c2d2cc12aad7bebe6079597ce9159978b..a9ee7a0e67e6d6c9443fcda4748e594918422405 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc288_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc288_byte.c
@@ -1,9 +1,10 @@
+#ifdef __AVX2__
 #include "PHY/sse_intrin.h"
 // generated code for Zc=288, byte encoding
 static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG2_Zc288_byte(uint8_t *c,uint8_t *d) {
-  __m256i *csimd=(__m256i *)c,*dsimd=(__m256i *)d;
+  simde__m256i *csimd=(simde__m256i *)c,*dsimd=(simde__m256i *)d;
 
-  __m256i *c2,*d2;
+  simde__m256i *c2,*d2;
 
   int i2;
   for (i2=0; i2<9; i2++) {
@@ -137,3 +138,4 @@ static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG
      d2[369]=simde_mm256_xor_si256(c2[3960],simde_mm256_xor_si256(c2[3780],simde_mm256_xor_si256(c2[901],simde_mm256_xor_si256(c2[3063],simde_mm256_xor_si256(c2[2359],simde_mm256_xor_si256(c2[2179],simde_mm256_xor_si256(c2[5061],simde_mm256_xor_si256(c2[1822],simde_mm256_xor_si256(c2[2899],simde_mm256_xor_si256(c2[1478],simde_mm256_xor_si256(c2[1298],simde_mm256_xor_si256(c2[2020],simde_mm256_xor_si256(c2[4015],simde_mm256_xor_si256(c2[3835],simde_mm256_xor_si256(c2[2937],simde_mm256_xor_si256(c2[2577],simde_mm256_xor_si256(c2[4755],simde_mm256_xor_si256(c2[76],simde_mm256_xor_si256(c2[1513],simde_mm256_xor_si256(c2[3692],simde_mm256_xor_si256(c2[4050],simde_mm256_xor_si256(c2[2791],simde_mm256_xor_si256(c2[1192],simde_mm256_xor_si256(c2[1012],simde_mm256_xor_si256(c2[1191],simde_mm256_xor_si256(c2[4970],simde_mm256_xor_si256(c2[3729],simde_mm256_xor_si256(c2[668],simde_mm256_xor_si256(c2[4286],simde_mm256_xor_si256(c2[328],simde_mm256_xor_si256(c2[3388],simde_mm256_xor_si256(c2[3406],simde_mm256_xor_si256(c2[3226],simde_mm256_xor_si256(c2[884],c2[345]))))))))))))))))))))))))))))))))));
   }
 }
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc288_byte_128.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc288_byte_128.c
new file mode 100644
index 0000000000000000000000000000000000000000..66e07b6bfb4fc0888b6191b5b478f056dfd77ea1
--- /dev/null
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc288_byte_128.c
@@ -0,0 +1,141 @@
+#ifndef __AVX2__
+#include "PHY/sse_intrin.h"
+// generated code for Zc=288, byte encoding
+static inline void ldpc_BG2_Zc288_byte(uint8_t *c,uint8_t *d) {
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
+
+  simde__m128i *c2,*d2;
+
+  int i2;
+  for (i2=0; i2<18; i2++) {
+     c2=&csimd[i2];
+     d2=&dsimd[i2];
+
+//row: 0
+     d2[0]=simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[721],simde_mm_xor_si128(c2[5045],simde_mm_xor_si128(c2[3277],simde_mm_xor_si128(c2[3282],simde_mm_xor_si128(c2[2563],simde_mm_xor_si128(c2[1515],simde_mm_xor_si128(c2[2959],simde_mm_xor_si128(c2[830],simde_mm_xor_si128(c2[4793],simde_mm_xor_si128(c2[4073],simde_mm_xor_si128(c2[2670],simde_mm_xor_si128(c2[4830],simde_mm_xor_si128(c2[1945],simde_mm_xor_si128(c2[544],simde_mm_xor_si128(c2[1260],simde_mm_xor_si128(c2[943],simde_mm_xor_si128(c2[1301],simde_mm_xor_si128(c2[3100],simde_mm_xor_si128(c2[618],simde_mm_xor_si128(c2[255],simde_mm_xor_si128(c2[1732],simde_mm_xor_si128(c2[5334],simde_mm_xor_si128(c2[5695],simde_mm_xor_si128(c2[5371],simde_mm_xor_si128(c2[687],c2[5368]))))))))))))))))))))))))));
+
+//row: 1
+     d2[18]=simde_mm_xor_si128(c2[1080],simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[721],simde_mm_xor_si128(c2[5045],simde_mm_xor_si128(c2[3637],simde_mm_xor_si128(c2[3277],simde_mm_xor_si128(c2[3282],simde_mm_xor_si128(c2[2563],simde_mm_xor_si128(c2[1875],simde_mm_xor_si128(c2[1515],simde_mm_xor_si128(c2[2959],simde_mm_xor_si128(c2[1190],simde_mm_xor_si128(c2[830],simde_mm_xor_si128(c2[4793],simde_mm_xor_si128(c2[4073],simde_mm_xor_si128(c2[2670],simde_mm_xor_si128(c2[4830],simde_mm_xor_si128(c2[1945],simde_mm_xor_si128(c2[544],simde_mm_xor_si128(c2[1260],simde_mm_xor_si128(c2[1303],simde_mm_xor_si128(c2[943],simde_mm_xor_si128(c2[1301],simde_mm_xor_si128(c2[3100],simde_mm_xor_si128(c2[618],simde_mm_xor_si128(c2[255],simde_mm_xor_si128(c2[1732],simde_mm_xor_si128(c2[5334],simde_mm_xor_si128(c2[5695],simde_mm_xor_si128(c2[5731],simde_mm_xor_si128(c2[5371],simde_mm_xor_si128(c2[687],c2[5368]))))))))))))))))))))))))))))))));
+
+//row: 2
+     d2[36]=simde_mm_xor_si128(c2[1080],simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[1081],simde_mm_xor_si128(c2[721],simde_mm_xor_si128(c2[5045],simde_mm_xor_si128(c2[3637],simde_mm_xor_si128(c2[3277],simde_mm_xor_si128(c2[3282],simde_mm_xor_si128(c2[2563],simde_mm_xor_si128(c2[1875],simde_mm_xor_si128(c2[1515],simde_mm_xor_si128(c2[2959],simde_mm_xor_si128(c2[1190],simde_mm_xor_si128(c2[830],simde_mm_xor_si128(c2[5153],simde_mm_xor_si128(c2[4793],simde_mm_xor_si128(c2[4073],simde_mm_xor_si128(c2[3030],simde_mm_xor_si128(c2[2670],simde_mm_xor_si128(c2[4830],simde_mm_xor_si128(c2[1945],simde_mm_xor_si128(c2[904],simde_mm_xor_si128(c2[544],simde_mm_xor_si128(c2[1260],simde_mm_xor_si128(c2[1303],simde_mm_xor_si128(c2[943],simde_mm_xor_si128(c2[1661],simde_mm_xor_si128(c2[1301],simde_mm_xor_si128(c2[3100],simde_mm_xor_si128(c2[978],simde_mm_xor_si128(c2[618],simde_mm_xor_si128(c2[255],simde_mm_xor_si128(c2[2092],simde_mm_xor_si128(c2[1732],simde_mm_xor_si128(c2[5334],simde_mm_xor_si128(c2[5695],simde_mm_xor_si128(c2[5731],simde_mm_xor_si128(c2[5371],simde_mm_xor_si128(c2[1047],simde_mm_xor_si128(c2[687],c2[5368]))))))))))))))))))))))))))))))))))))))));
+
+//row: 3
+     d2[54]=simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[721],simde_mm_xor_si128(c2[5045],simde_mm_xor_si128(c2[3277],simde_mm_xor_si128(c2[3282],simde_mm_xor_si128(c2[2923],simde_mm_xor_si128(c2[2563],simde_mm_xor_si128(c2[1515],simde_mm_xor_si128(c2[3319],simde_mm_xor_si128(c2[2959],simde_mm_xor_si128(c2[830],simde_mm_xor_si128(c2[4793],simde_mm_xor_si128(c2[4073],simde_mm_xor_si128(c2[2670],simde_mm_xor_si128(c2[4830],simde_mm_xor_si128(c2[2305],simde_mm_xor_si128(c2[1945],simde_mm_xor_si128(c2[544],simde_mm_xor_si128(c2[1620],simde_mm_xor_si128(c2[1260],simde_mm_xor_si128(c2[943],simde_mm_xor_si128(c2[1301],simde_mm_xor_si128(c2[3460],simde_mm_xor_si128(c2[3100],simde_mm_xor_si128(c2[618],simde_mm_xor_si128(c2[615],simde_mm_xor_si128(c2[255],simde_mm_xor_si128(c2[1732],simde_mm_xor_si128(c2[5334],simde_mm_xor_si128(c2[296],simde_mm_xor_si128(c2[5695],simde_mm_xor_si128(c2[5371],simde_mm_xor_si128(c2[687],simde_mm_xor_si128(c2[5728],c2[5368]))))))))))))))))))))))))))))))))));
+
+//row: 4
+     d2[72]=simde_mm_xor_si128(c2[2163],simde_mm_xor_si128(c2[1803],simde_mm_xor_si128(c2[1804],simde_mm_xor_si128(c2[369],simde_mm_xor_si128(c2[3602],simde_mm_xor_si128(c2[4720],simde_mm_xor_si128(c2[4360],simde_mm_xor_si128(c2[4365],simde_mm_xor_si128(c2[3646],simde_mm_xor_si128(c2[2917],simde_mm_xor_si128(c2[2958],simde_mm_xor_si128(c2[2598],simde_mm_xor_si128(c2[4042],simde_mm_xor_si128(c2[2273],simde_mm_xor_si128(c2[1913],simde_mm_xor_si128(c2[117],simde_mm_xor_si128(c2[5156],simde_mm_xor_si128(c2[3753],simde_mm_xor_si128(c2[154],simde_mm_xor_si128(c2[3028],simde_mm_xor_si128(c2[1627],simde_mm_xor_si128(c2[2343],simde_mm_xor_si128(c2[2386],simde_mm_xor_si128(c2[2026],simde_mm_xor_si128(c2[2384],simde_mm_xor_si128(c2[4183],simde_mm_xor_si128(c2[1701],simde_mm_xor_si128(c2[1338],simde_mm_xor_si128(c2[2815],simde_mm_xor_si128(c2[658],simde_mm_xor_si128(c2[1019],simde_mm_xor_si128(c2[1055],simde_mm_xor_si128(c2[695],simde_mm_xor_si128(c2[1770],c2[692]))))))))))))))))))))))))))))))))));
+
+//row: 5
+     d2[90]=simde_mm_xor_si128(c2[3604],simde_mm_xor_si128(c2[3244],simde_mm_xor_si128(c2[3245],simde_mm_xor_si128(c2[1810],simde_mm_xor_si128(c2[2882],simde_mm_xor_si128(c2[402],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[47],simde_mm_xor_si128(c2[5087],simde_mm_xor_si128(c2[4364],simde_mm_xor_si128(c2[4399],simde_mm_xor_si128(c2[4039],simde_mm_xor_si128(c2[5483],simde_mm_xor_si128(c2[3714],simde_mm_xor_si128(c2[3354],simde_mm_xor_si128(c2[1558],simde_mm_xor_si128(c2[838],simde_mm_xor_si128(c2[5194],simde_mm_xor_si128(c2[1595],simde_mm_xor_si128(c2[4469],simde_mm_xor_si128(c2[3068],simde_mm_xor_si128(c2[3784],simde_mm_xor_si128(c2[1625],simde_mm_xor_si128(c2[3827],simde_mm_xor_si128(c2[3467],simde_mm_xor_si128(c2[3825],simde_mm_xor_si128(c2[5624],simde_mm_xor_si128(c2[3142],simde_mm_xor_si128(c2[2779],simde_mm_xor_si128(c2[3500],simde_mm_xor_si128(c2[4256],simde_mm_xor_si128(c2[2099],simde_mm_xor_si128(c2[2460],simde_mm_xor_si128(c2[2496],simde_mm_xor_si128(c2[2136],simde_mm_xor_si128(c2[3211],c2[2133]))))))))))))))))))))))))))))))))))));
+
+//row: 6
+     d2[108]=simde_mm_xor_si128(c2[5048],simde_mm_xor_si128(c2[4688],simde_mm_xor_si128(c2[4689],simde_mm_xor_si128(c2[3254],simde_mm_xor_si128(c2[4686],simde_mm_xor_si128(c2[1846],simde_mm_xor_si128(c2[1486],simde_mm_xor_si128(c2[1491],simde_mm_xor_si128(c2[772],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[5483],simde_mm_xor_si128(c2[1168],simde_mm_xor_si128(c2[5158],simde_mm_xor_si128(c2[4798],simde_mm_xor_si128(c2[3002],simde_mm_xor_si128(c2[2282],simde_mm_xor_si128(c2[879],simde_mm_xor_si128(c2[3039],simde_mm_xor_si128(c2[154],simde_mm_xor_si128(c2[4512],simde_mm_xor_si128(c2[5228],simde_mm_xor_si128(c2[2705],simde_mm_xor_si128(c2[5271],simde_mm_xor_si128(c2[4911],simde_mm_xor_si128(c2[5269],simde_mm_xor_si128(c2[1309],simde_mm_xor_si128(c2[4586],simde_mm_xor_si128(c2[4223],simde_mm_xor_si128(c2[4218],simde_mm_xor_si128(c2[5700],simde_mm_xor_si128(c2[3543],simde_mm_xor_si128(c2[3904],simde_mm_xor_si128(c2[3940],simde_mm_xor_si128(c2[3580],simde_mm_xor_si128(c2[4655],simde_mm_xor_si128(c2[3577],c2[2132]))))))))))))))))))))))))))))))))))));
+
+//row: 7
+     d2[126]=simde_mm_xor_si128(c2[5406],simde_mm_xor_si128(c2[5046],simde_mm_xor_si128(c2[1084],simde_mm_xor_si128(c2[5047],simde_mm_xor_si128(c2[1085],simde_mm_xor_si128(c2[3612],simde_mm_xor_si128(c2[5409],simde_mm_xor_si128(c2[2204],simde_mm_xor_si128(c2[1844],simde_mm_xor_si128(c2[3641],simde_mm_xor_si128(c2[1849],simde_mm_xor_si128(c2[3646],simde_mm_xor_si128(c2[1130],simde_mm_xor_si128(c2[3287],simde_mm_xor_si128(c2[2927],simde_mm_xor_si128(c2[402],simde_mm_xor_si128(c2[442],simde_mm_xor_si128(c2[82],simde_mm_xor_si128(c2[1879],simde_mm_xor_si128(c2[1526],simde_mm_xor_si128(c2[3683],simde_mm_xor_si128(c2[3323],simde_mm_xor_si128(c2[5516],simde_mm_xor_si128(c2[5156],simde_mm_xor_si128(c2[1194],simde_mm_xor_si128(c2[3360],simde_mm_xor_si128(c2[5157],simde_mm_xor_si128(c2[2640],simde_mm_xor_si128(c2[4437],simde_mm_xor_si128(c2[1237],simde_mm_xor_si128(c2[3034],simde_mm_xor_si128(c2[3397],simde_mm_xor_si128(c2[5194],simde_mm_xor_si128(c2[512],simde_mm_xor_si128(c2[2669],simde_mm_xor_si128(c2[2309],simde_mm_xor_si128(c2[4870],simde_mm_xor_si128(c2[908],simde_mm_xor_si128(c2[5586],simde_mm_xor_si128(c2[1984],simde_mm_xor_si128(c2[1624],simde_mm_xor_si128(c2[2708],simde_mm_xor_si128(c2[5629],simde_mm_xor_si128(c2[5269],simde_mm_xor_si128(c2[1307],simde_mm_xor_si128(c2[5627],simde_mm_xor_si128(c2[1665],simde_mm_xor_si128(c2[1667],simde_mm_xor_si128(c2[3824],simde_mm_xor_si128(c2[3464],simde_mm_xor_si128(c2[4944],simde_mm_xor_si128(c2[982],simde_mm_xor_si128(c2[4581],simde_mm_xor_si128(c2[979],simde_mm_xor_si128(c2[619],simde_mm_xor_si128(c2[1334],simde_mm_xor_si128(c2[299],simde_mm_xor_si128(c2[2096],simde_mm_xor_si128(c2[3901],simde_mm_xor_si128(c2[5698],simde_mm_xor_si128(c2[4262],simde_mm_xor_si128(c2[660],simde_mm_xor_si128(c2[300],simde_mm_xor_si128(c2[4298],simde_mm_xor_si128(c2[3938],simde_mm_xor_si128(c2[5735],simde_mm_xor_si128(c2[5013],simde_mm_xor_si128(c2[1051],simde_mm_xor_si128(c2[3935],simde_mm_xor_si128(c2[333],c2[5732]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 8
+     d2[144]=simde_mm_xor_si128(c2[3965],simde_mm_xor_si128(c2[3605],simde_mm_xor_si128(c2[3966],simde_mm_xor_si128(c2[3606],simde_mm_xor_si128(c2[2171],simde_mm_xor_si128(c2[2164],simde_mm_xor_si128(c2[763],simde_mm_xor_si128(c2[403],simde_mm_xor_si128(c2[408],simde_mm_xor_si128(c2[5448],simde_mm_xor_si128(c2[1840],simde_mm_xor_si128(c2[4760],simde_mm_xor_si128(c2[4400],simde_mm_xor_si128(c2[85],simde_mm_xor_si128(c2[4075],simde_mm_xor_si128(c2[3715],simde_mm_xor_si128(c2[2279],simde_mm_xor_si128(c2[1919],simde_mm_xor_si128(c2[1199],simde_mm_xor_si128(c2[156],simde_mm_xor_si128(c2[5555],simde_mm_xor_si128(c2[1956],simde_mm_xor_si128(c2[4830],simde_mm_xor_si128(c2[3789],simde_mm_xor_si128(c2[3429],simde_mm_xor_si128(c2[4145],simde_mm_xor_si128(c2[4188],simde_mm_xor_si128(c2[3828],simde_mm_xor_si128(c2[4546],simde_mm_xor_si128(c2[4186],simde_mm_xor_si128(c2[226],simde_mm_xor_si128(c2[3863],simde_mm_xor_si128(c2[3503],simde_mm_xor_si128(c2[3140],simde_mm_xor_si128(c2[4977],simde_mm_xor_si128(c2[4617],simde_mm_xor_si128(c2[2460],simde_mm_xor_si128(c2[2821],simde_mm_xor_si128(c2[2857],simde_mm_xor_si128(c2[2497],simde_mm_xor_si128(c2[3932],simde_mm_xor_si128(c2[3572],c2[2494]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 9
+     d2[162]=simde_mm_xor_si128(c2[3601],simde_mm_xor_si128(c2[1443],simde_mm_xor_si128(c2[1083],simde_mm_xor_si128(c2[3602],simde_mm_xor_si128(c2[1084],simde_mm_xor_si128(c2[2167],simde_mm_xor_si128(c2[5408],simde_mm_xor_si128(c2[399],simde_mm_xor_si128(c2[4000],simde_mm_xor_si128(c2[3640],simde_mm_xor_si128(c2[404],simde_mm_xor_si128(c2[3645],simde_mm_xor_si128(c2[5444],simde_mm_xor_si128(c2[2926],simde_mm_xor_si128(c2[402],simde_mm_xor_si128(c2[4396],simde_mm_xor_si128(c2[2238],simde_mm_xor_si128(c2[1878],simde_mm_xor_si128(c2[81],simde_mm_xor_si128(c2[3322],simde_mm_xor_si128(c2[3711],simde_mm_xor_si128(c2[1553],simde_mm_xor_si128(c2[1193],simde_mm_xor_si128(c2[1915],simde_mm_xor_si128(c2[5156],simde_mm_xor_si128(c2[1195],simde_mm_xor_si128(c2[4436],simde_mm_xor_si128(c2[5551],simde_mm_xor_si128(c2[3033],simde_mm_xor_si128(c2[1952],simde_mm_xor_si128(c2[5193],simde_mm_xor_si128(c2[4826],simde_mm_xor_si128(c2[2308],simde_mm_xor_si128(c2[3425],simde_mm_xor_si128(c2[907],simde_mm_xor_si128(c2[4141],simde_mm_xor_si128(c2[1623],simde_mm_xor_si128(c2[3824],simde_mm_xor_si128(c2[1666],simde_mm_xor_si128(c2[1306],simde_mm_xor_si128(c2[4182],simde_mm_xor_si128(c2[1664],simde_mm_xor_si128(c2[222],simde_mm_xor_si128(c2[3463],simde_mm_xor_si128(c2[3499],simde_mm_xor_si128(c2[981],simde_mm_xor_si128(c2[3136],simde_mm_xor_si128(c2[618],simde_mm_xor_si128(c2[4613],simde_mm_xor_si128(c2[2095],simde_mm_xor_si128(c2[2456],simde_mm_xor_si128(c2[5697],simde_mm_xor_si128(c2[2817],simde_mm_xor_si128(c2[299],simde_mm_xor_si128(c2[3170],simde_mm_xor_si128(c2[2493],simde_mm_xor_si128(c2[335],simde_mm_xor_si128(c2[5734],simde_mm_xor_si128(c2[3568],simde_mm_xor_si128(c2[1050],simde_mm_xor_si128(c2[2490],c2[5731])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 10
+     d2[180]=simde_mm_xor_si128(c2[5042],simde_mm_xor_si128(c2[3278],simde_mm_xor_si128(c2[2022],c2[258])));
+
+//row: 11
+     d2[198]=simde_mm_xor_si128(c2[724],simde_mm_xor_si128(c2[725],simde_mm_xor_si128(c2[5049],simde_mm_xor_si128(c2[4321],simde_mm_xor_si128(c2[3281],simde_mm_xor_si128(c2[3286],simde_mm_xor_si128(c2[2927],simde_mm_xor_si128(c2[2567],simde_mm_xor_si128(c2[1519],simde_mm_xor_si128(c2[3323],simde_mm_xor_si128(c2[2963],simde_mm_xor_si128(c2[834],simde_mm_xor_si128(c2[4797],simde_mm_xor_si128(c2[4077],simde_mm_xor_si128(c2[2674],simde_mm_xor_si128(c2[4834],simde_mm_xor_si128(c2[2309],simde_mm_xor_si128(c2[1949],simde_mm_xor_si128(c2[548],simde_mm_xor_si128(c2[1624],simde_mm_xor_si128(c2[1264],simde_mm_xor_si128(c2[947],simde_mm_xor_si128(c2[1305],simde_mm_xor_si128(c2[3464],simde_mm_xor_si128(c2[3104],simde_mm_xor_si128(c2[622],simde_mm_xor_si128(c2[619],simde_mm_xor_si128(c2[259],simde_mm_xor_si128(c2[5293],simde_mm_xor_si128(c2[1736],simde_mm_xor_si128(c2[5338],simde_mm_xor_si128(c2[300],simde_mm_xor_si128(c2[5699],simde_mm_xor_si128(c2[5375],simde_mm_xor_si128(c2[691],simde_mm_xor_si128(c2[5732],simde_mm_xor_si128(c2[5372],c2[1771])))))))))))))))))))))))))))))))))))));
+
+//row: 12
+     d2[216]=simde_mm_xor_si128(c2[2168],simde_mm_xor_si128(c2[1808],simde_mm_xor_si128(c2[1809],simde_mm_xor_si128(c2[374],simde_mm_xor_si128(c2[4725],simde_mm_xor_si128(c2[4365],simde_mm_xor_si128(c2[4370],simde_mm_xor_si128(c2[3651],simde_mm_xor_si128(c2[398],simde_mm_xor_si128(c2[2963],simde_mm_xor_si128(c2[2603],simde_mm_xor_si128(c2[4047],simde_mm_xor_si128(c2[2278],simde_mm_xor_si128(c2[1918],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[5161],simde_mm_xor_si128(c2[3715],simde_mm_xor_si128(c2[3758],simde_mm_xor_si128(c2[159],simde_mm_xor_si128(c2[3033],simde_mm_xor_si128(c2[1632],simde_mm_xor_si128(c2[2348],simde_mm_xor_si128(c2[2391],simde_mm_xor_si128(c2[2031],simde_mm_xor_si128(c2[2389],simde_mm_xor_si128(c2[4188],simde_mm_xor_si128(c2[1706],simde_mm_xor_si128(c2[1343],simde_mm_xor_si128(c2[2820],simde_mm_xor_si128(c2[663],simde_mm_xor_si128(c2[1024],simde_mm_xor_si128(c2[1060],simde_mm_xor_si128(c2[700],simde_mm_xor_si128(c2[1775],c2[697]))))))))))))))))))))))))))))))))));
+
+//row: 13
+     d2[234]=simde_mm_xor_si128(c2[363],simde_mm_xor_si128(c2[364],simde_mm_xor_si128(c2[4688],simde_mm_xor_si128(c2[4324],simde_mm_xor_si128(c2[2920],simde_mm_xor_si128(c2[2925],simde_mm_xor_si128(c2[2566],simde_mm_xor_si128(c2[2206],simde_mm_xor_si128(c2[1838],simde_mm_xor_si128(c2[1158],simde_mm_xor_si128(c2[2962],simde_mm_xor_si128(c2[2602],simde_mm_xor_si128(c2[473],simde_mm_xor_si128(c2[4436],simde_mm_xor_si128(c2[3716],simde_mm_xor_si128(c2[2313],simde_mm_xor_si128(c2[4473],simde_mm_xor_si128(c2[1948],simde_mm_xor_si128(c2[1588],simde_mm_xor_si128(c2[187],simde_mm_xor_si128(c2[1263],simde_mm_xor_si128(c2[903],simde_mm_xor_si128(c2[586],simde_mm_xor_si128(c2[944],simde_mm_xor_si128(c2[3103],simde_mm_xor_si128(c2[2743],simde_mm_xor_si128(c2[261],simde_mm_xor_si128(c2[258],simde_mm_xor_si128(c2[5657],simde_mm_xor_si128(c2[1375],simde_mm_xor_si128(c2[4977],simde_mm_xor_si128(c2[5698],simde_mm_xor_si128(c2[5338],simde_mm_xor_si128(c2[5331],simde_mm_xor_si128(c2[5014],simde_mm_xor_si128(c2[330],simde_mm_xor_si128(c2[5371],c2[5011])))))))))))))))))))))))))))))))))))));
+
+//row: 14
+     d2[252]=simde_mm_xor_si128(c2[1808],simde_mm_xor_si128(c2[1448],simde_mm_xor_si128(c2[1086],simde_mm_xor_si128(c2[1449],simde_mm_xor_si128(c2[1087],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[5411],simde_mm_xor_si128(c2[4365],simde_mm_xor_si128(c2[4005],simde_mm_xor_si128(c2[3643],simde_mm_xor_si128(c2[4010],simde_mm_xor_si128(c2[3648],simde_mm_xor_si128(c2[3291],simde_mm_xor_si128(c2[3289],simde_mm_xor_si128(c2[2929],simde_mm_xor_si128(c2[5444],simde_mm_xor_si128(c2[2603],simde_mm_xor_si128(c2[2243],simde_mm_xor_si128(c2[1881],simde_mm_xor_si128(c2[3687],simde_mm_xor_si128(c2[3685],simde_mm_xor_si128(c2[3325],simde_mm_xor_si128(c2[1918],simde_mm_xor_si128(c2[1558],simde_mm_xor_si128(c2[1196],simde_mm_xor_si128(c2[5521],simde_mm_xor_si128(c2[5159],simde_mm_xor_si128(c2[4801],simde_mm_xor_si128(c2[4439],simde_mm_xor_si128(c2[3398],simde_mm_xor_si128(c2[3036],simde_mm_xor_si128(c2[5558],simde_mm_xor_si128(c2[5196],simde_mm_xor_si128(c2[2673],simde_mm_xor_si128(c2[2671],simde_mm_xor_si128(c2[2311],simde_mm_xor_si128(c2[1272],simde_mm_xor_si128(c2[910],simde_mm_xor_si128(c2[1988],simde_mm_xor_si128(c2[1986],simde_mm_xor_si128(c2[1626],simde_mm_xor_si128(c2[2031],simde_mm_xor_si128(c2[1671],simde_mm_xor_si128(c2[1309],simde_mm_xor_si128(c2[2029],simde_mm_xor_si128(c2[1667],simde_mm_xor_si128(c2[3828],simde_mm_xor_si128(c2[3826],simde_mm_xor_si128(c2[3466],simde_mm_xor_si128(c2[1299],simde_mm_xor_si128(c2[1346],simde_mm_xor_si128(c2[984],simde_mm_xor_si128(c2[983],simde_mm_xor_si128(c2[981],simde_mm_xor_si128(c2[621],simde_mm_xor_si128(c2[2460],simde_mm_xor_si128(c2[2098],simde_mm_xor_si128(c2[303],simde_mm_xor_si128(c2[5700],simde_mm_xor_si128(c2[664],simde_mm_xor_si128(c2[662],simde_mm_xor_si128(c2[302],simde_mm_xor_si128(c2[700],simde_mm_xor_si128(c2[340],simde_mm_xor_si128(c2[5737],simde_mm_xor_si128(c2[1415],simde_mm_xor_si128(c2[1053],simde_mm_xor_si128(c2[337],simde_mm_xor_si128(c2[335],c2[5734])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 15
+     d2[270]=simde_mm_xor_si128(c2[726],simde_mm_xor_si128(c2[1088],simde_mm_xor_si128(c2[728],simde_mm_xor_si128(c2[727],simde_mm_xor_si128(c2[729],simde_mm_xor_si128(c2[5051],simde_mm_xor_si128(c2[5053],simde_mm_xor_si128(c2[3968],simde_mm_xor_si128(c2[3283],simde_mm_xor_si128(c2[3645],simde_mm_xor_si128(c2[3285],simde_mm_xor_si128(c2[3288],simde_mm_xor_si128(c2[3290],simde_mm_xor_si128(c2[2569],simde_mm_xor_si128(c2[2571],simde_mm_xor_si128(c2[1521],simde_mm_xor_si128(c2[1883],simde_mm_xor_si128(c2[1523],simde_mm_xor_si128(c2[2965],simde_mm_xor_si128(c2[2967],simde_mm_xor_si128(c2[836],simde_mm_xor_si128(c2[1198],simde_mm_xor_si128(c2[838],simde_mm_xor_si128(c2[4799],simde_mm_xor_si128(c2[4801],simde_mm_xor_si128(c2[4079],simde_mm_xor_si128(c2[4081],simde_mm_xor_si128(c2[2676],simde_mm_xor_si128(c2[2678],simde_mm_xor_si128(c2[4836],simde_mm_xor_si128(c2[4838],simde_mm_xor_si128(c2[1951],simde_mm_xor_si128(c2[1953],simde_mm_xor_si128(c2[550],simde_mm_xor_si128(c2[552],simde_mm_xor_si128(c2[1266],simde_mm_xor_si128(c2[1268],simde_mm_xor_si128(c2[949],simde_mm_xor_si128(c2[1311],simde_mm_xor_si128(c2[951],simde_mm_xor_si128(c2[1307],simde_mm_xor_si128(c2[1309],simde_mm_xor_si128(c2[3106],simde_mm_xor_si128(c2[3108],simde_mm_xor_si128(c2[624],simde_mm_xor_si128(c2[626],simde_mm_xor_si128(c2[261],simde_mm_xor_si128(c2[263],simde_mm_xor_si128(c2[1738],simde_mm_xor_si128(c2[1740],simde_mm_xor_si128(c2[5340],simde_mm_xor_si128(c2[5342],simde_mm_xor_si128(c2[5701],simde_mm_xor_si128(c2[5703],simde_mm_xor_si128(c2[5377],simde_mm_xor_si128(c2[5739],simde_mm_xor_si128(c2[5379],simde_mm_xor_si128(c2[693],simde_mm_xor_si128(c2[695],simde_mm_xor_si128(c2[5374],c2[5376]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 16
+     d2[288]=simde_mm_xor_si128(c2[5401],simde_mm_xor_si128(c2[5041],simde_mm_xor_si128(c2[3960],simde_mm_xor_si128(c2[3600],simde_mm_xor_si128(c2[5042],simde_mm_xor_si128(c2[3961],simde_mm_xor_si128(c2[3601],simde_mm_xor_si128(c2[3607],simde_mm_xor_si128(c2[2166],simde_mm_xor_si128(c2[2199],simde_mm_xor_si128(c2[1839],simde_mm_xor_si128(c2[758],simde_mm_xor_si128(c2[398],simde_mm_xor_si128(c2[1844],simde_mm_xor_si128(c2[403],simde_mm_xor_si128(c2[1125],simde_mm_xor_si128(c2[5443],simde_mm_xor_si128(c2[39],simde_mm_xor_si128(c2[437],simde_mm_xor_si128(c2[77],simde_mm_xor_si128(c2[4755],simde_mm_xor_si128(c2[4395],simde_mm_xor_si128(c2[1521],simde_mm_xor_si128(c2[80],simde_mm_xor_si128(c2[5511],simde_mm_xor_si128(c2[5151],simde_mm_xor_si128(c2[4070],simde_mm_xor_si128(c2[3710],simde_mm_xor_si128(c2[3355],simde_mm_xor_si128(c2[2274],simde_mm_xor_si128(c2[1914],simde_mm_xor_si128(c2[2635],simde_mm_xor_si128(c2[1194],simde_mm_xor_si128(c2[1232],simde_mm_xor_si128(c2[151],simde_mm_xor_si128(c2[5550],simde_mm_xor_si128(c2[3392],simde_mm_xor_si128(c2[1951],simde_mm_xor_si128(c2[507],simde_mm_xor_si128(c2[4825],simde_mm_xor_si128(c2[4865],simde_mm_xor_si128(c2[3784],simde_mm_xor_si128(c2[3424],simde_mm_xor_si128(c2[5581],simde_mm_xor_si128(c2[4140],simde_mm_xor_si128(c2[5624],simde_mm_xor_si128(c2[5264],simde_mm_xor_si128(c2[4183],simde_mm_xor_si128(c2[3823],simde_mm_xor_si128(c2[5622],simde_mm_xor_si128(c2[4541],simde_mm_xor_si128(c2[4181],simde_mm_xor_si128(c2[1662],simde_mm_xor_si128(c2[221],simde_mm_xor_si128(c2[4939],simde_mm_xor_si128(c2[3858],simde_mm_xor_si128(c2[3498],simde_mm_xor_si128(c2[4576],simde_mm_xor_si128(c2[3135],simde_mm_xor_si128(c2[294],simde_mm_xor_si128(c2[4972],simde_mm_xor_si128(c2[4612],simde_mm_xor_si128(c2[3896],simde_mm_xor_si128(c2[2455],simde_mm_xor_si128(c2[4257],simde_mm_xor_si128(c2[2816],simde_mm_xor_si128(c2[4293],simde_mm_xor_si128(c2[3933],simde_mm_xor_si128(c2[2852],simde_mm_xor_si128(c2[2492],simde_mm_xor_si128(c2[5008],simde_mm_xor_si128(c2[3927],simde_mm_xor_si128(c2[3567],simde_mm_xor_si128(c2[3930],simde_mm_xor_si128(c2[2489],c2[3564])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 17
+     d2[306]=simde_mm_xor_si128(c2[1444],simde_mm_xor_si128(c2[1084],simde_mm_xor_si128(c2[4682],simde_mm_xor_si128(c2[4322],simde_mm_xor_si128(c2[1085],simde_mm_xor_si128(c2[4683],simde_mm_xor_si128(c2[4323],simde_mm_xor_si128(c2[5409],simde_mm_xor_si128(c2[2888],simde_mm_xor_si128(c2[4001],simde_mm_xor_si128(c2[3641],simde_mm_xor_si128(c2[1480],simde_mm_xor_si128(c2[1120],simde_mm_xor_si128(c2[3646],simde_mm_xor_si128(c2[1125],simde_mm_xor_si128(c2[2927],simde_mm_xor_si128(c2[406],simde_mm_xor_si128(c2[2923],simde_mm_xor_si128(c2[2239],simde_mm_xor_si128(c2[1879],simde_mm_xor_si128(c2[5477],simde_mm_xor_si128(c2[5117],simde_mm_xor_si128(c2[3323],simde_mm_xor_si128(c2[802],simde_mm_xor_si128(c2[1554],simde_mm_xor_si128(c2[1194],simde_mm_xor_si128(c2[4792],simde_mm_xor_si128(c2[4432],simde_mm_xor_si128(c2[5157],simde_mm_xor_si128(c2[2996],simde_mm_xor_si128(c2[2636],simde_mm_xor_si128(c2[4437],simde_mm_xor_si128(c2[1916],simde_mm_xor_si128(c2[3034],simde_mm_xor_si128(c2[873],simde_mm_xor_si128(c2[513],simde_mm_xor_si128(c2[5194],simde_mm_xor_si128(c2[2673],simde_mm_xor_si128(c2[2309],simde_mm_xor_si128(c2[5547],simde_mm_xor_si128(c2[908],simde_mm_xor_si128(c2[4506],simde_mm_xor_si128(c2[4146],simde_mm_xor_si128(c2[1624],simde_mm_xor_si128(c2[4862],simde_mm_xor_si128(c2[4142],simde_mm_xor_si128(c2[1667],simde_mm_xor_si128(c2[1307],simde_mm_xor_si128(c2[4905],simde_mm_xor_si128(c2[4545],simde_mm_xor_si128(c2[1665],simde_mm_xor_si128(c2[5263],simde_mm_xor_si128(c2[4903],simde_mm_xor_si128(c2[3464],simde_mm_xor_si128(c2[943],simde_mm_xor_si128(c2[982],simde_mm_xor_si128(c2[4580],simde_mm_xor_si128(c2[4220],simde_mm_xor_si128(c2[619],simde_mm_xor_si128(c2[3857],simde_mm_xor_si128(c2[2096],simde_mm_xor_si128(c2[5694],simde_mm_xor_si128(c2[5334],simde_mm_xor_si128(c2[5698],simde_mm_xor_si128(c2[3177],simde_mm_xor_si128(c2[300],simde_mm_xor_si128(c2[3538],simde_mm_xor_si128(c2[336],simde_mm_xor_si128(c2[5735],simde_mm_xor_si128(c2[3574],simde_mm_xor_si128(c2[3214],simde_mm_xor_si128(c2[1051],simde_mm_xor_si128(c2[4649],simde_mm_xor_si128(c2[4289],simde_mm_xor_si128(c2[5732],c2[3211])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 18
+     d2[324]=simde_mm_xor_si128(c2[361],simde_mm_xor_si128(c2[3822],c2[5300]));
+
+//row: 19
+     d2[342]=simde_mm_xor_si128(c2[3962],simde_mm_xor_si128(c2[3963],simde_mm_xor_si128(c2[2528],simde_mm_xor_si128(c2[5404],simde_mm_xor_si128(c2[760],simde_mm_xor_si128(c2[765],simde_mm_xor_si128(c2[46],simde_mm_xor_si128(c2[4357],simde_mm_xor_si128(c2[4757],simde_mm_xor_si128(c2[442],simde_mm_xor_si128(c2[4072],simde_mm_xor_si128(c2[2276],simde_mm_xor_si128(c2[1556],simde_mm_xor_si128(c2[153],simde_mm_xor_si128(c2[2313],simde_mm_xor_si128(c2[5187],simde_mm_xor_si128(c2[3786],simde_mm_xor_si128(c2[4502],simde_mm_xor_si128(c2[4185],simde_mm_xor_si128(c2[4543],simde_mm_xor_si128(c2[583],simde_mm_xor_si128(c2[3860],simde_mm_xor_si128(c2[3497],simde_mm_xor_si128(c2[4974],simde_mm_xor_si128(c2[2817],simde_mm_xor_si128(c2[3178],simde_mm_xor_si128(c2[2854],simde_mm_xor_si128(c2[3929],c2[2851]))))))))))))))))))))))))))));
+
+//row: 20
+     d2[360]=simde_mm_xor_si128(c2[365],simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[6],simde_mm_xor_si128(c2[4330],simde_mm_xor_si128(c2[2922],simde_mm_xor_si128(c2[2562],simde_mm_xor_si128(c2[2567],simde_mm_xor_si128(c2[1848],simde_mm_xor_si128(c2[756],simde_mm_xor_si128(c2[1160],simde_mm_xor_si128(c2[800],simde_mm_xor_si128(c2[2244],simde_mm_xor_si128(c2[475],simde_mm_xor_si128(c2[115],simde_mm_xor_si128(c2[4078],simde_mm_xor_si128(c2[3358],simde_mm_xor_si128(c2[1955],simde_mm_xor_si128(c2[4115],simde_mm_xor_si128(c2[1230],simde_mm_xor_si128(c2[2670],simde_mm_xor_si128(c2[5588],simde_mm_xor_si128(c2[545],simde_mm_xor_si128(c2[588],simde_mm_xor_si128(c2[228],simde_mm_xor_si128(c2[586],simde_mm_xor_si128(c2[2385],simde_mm_xor_si128(c2[5662],simde_mm_xor_si128(c2[5299],simde_mm_xor_si128(c2[1017],simde_mm_xor_si128(c2[4619],simde_mm_xor_si128(c2[4980],simde_mm_xor_si128(c2[5016],simde_mm_xor_si128(c2[4656],simde_mm_xor_si128(c2[5731],c2[4653]))))))))))))))))))))))))))))))))));
+
+//row: 21
+     d2[378]=simde_mm_xor_si128(c2[1085],simde_mm_xor_si128(c2[1086],simde_mm_xor_si128(c2[5410],simde_mm_xor_si128(c2[3965],simde_mm_xor_si128(c2[3642],simde_mm_xor_si128(c2[3647],simde_mm_xor_si128(c2[3288],simde_mm_xor_si128(c2[2928],simde_mm_xor_si128(c2[1880],simde_mm_xor_si128(c2[3684],simde_mm_xor_si128(c2[3324],simde_mm_xor_si128(c2[1195],simde_mm_xor_si128(c2[5158],simde_mm_xor_si128(c2[4438],simde_mm_xor_si128(c2[3035],simde_mm_xor_si128(c2[5195],simde_mm_xor_si128(c2[2670],simde_mm_xor_si128(c2[2310],simde_mm_xor_si128(c2[909],simde_mm_xor_si128(c2[1985],simde_mm_xor_si128(c2[1625],simde_mm_xor_si128(c2[1308],simde_mm_xor_si128(c2[1666],simde_mm_xor_si128(c2[3825],simde_mm_xor_si128(c2[3465],simde_mm_xor_si128(c2[983],simde_mm_xor_si128(c2[980],simde_mm_xor_si128(c2[620],simde_mm_xor_si128(c2[2097],simde_mm_xor_si128(c2[5699],simde_mm_xor_si128(c2[661],simde_mm_xor_si128(c2[301],simde_mm_xor_si128(c2[4252],simde_mm_xor_si128(c2[5736],simde_mm_xor_si128(c2[1052],simde_mm_xor_si128(c2[334],c2[5733]))))))))))))))))))))))))))))))))))));
+
+//row: 22
+     d2[396]=simde_mm_xor_si128(c2[2199],c2[1520]);
+
+//row: 23
+     d2[414]=simde_mm_xor_si128(c2[1444],simde_mm_xor_si128(c2[1195],c2[3063]));
+
+//row: 24
+     d2[432]=simde_mm_xor_si128(c2[5077],simde_mm_xor_si128(c2[3674],c2[2130]));
+
+//row: 25
+     d2[450]=simde_mm_xor_si128(c2[8],c2[5583]);
+
+//row: 26
+     d2[468]=simde_mm_xor_si128(c2[2526],simde_mm_xor_si128(c2[2166],simde_mm_xor_si128(c2[2528],simde_mm_xor_si128(c2[2527],simde_mm_xor_si128(c2[2167],simde_mm_xor_si128(c2[2529],simde_mm_xor_si128(c2[732],simde_mm_xor_si128(c2[1094],simde_mm_xor_si128(c2[5083],simde_mm_xor_si128(c2[4723],simde_mm_xor_si128(c2[5085],simde_mm_xor_si128(c2[4728],simde_mm_xor_si128(c2[5090],simde_mm_xor_si128(c2[4009],simde_mm_xor_si128(c2[4731],simde_mm_xor_si128(c2[4371],simde_mm_xor_si128(c2[3321],simde_mm_xor_si128(c2[2961],simde_mm_xor_si128(c2[3323],simde_mm_xor_si128(c2[4405],simde_mm_xor_si128(c2[5127],simde_mm_xor_si128(c2[4767],simde_mm_xor_si128(c2[5120],simde_mm_xor_si128(c2[2636],simde_mm_xor_si128(c2[2276],simde_mm_xor_si128(c2[2638],simde_mm_xor_si128(c2[840],simde_mm_xor_si128(c2[480],simde_mm_xor_si128(c2[842],simde_mm_xor_si128(c2[5519],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[4476],simde_mm_xor_si128(c2[4116],simde_mm_xor_si128(c2[4478],simde_mm_xor_si128(c2[517],simde_mm_xor_si128(c2[879],simde_mm_xor_si128(c2[3391],simde_mm_xor_si128(c2[4113],simde_mm_xor_si128(c2[3753],simde_mm_xor_si128(c2[2350],simde_mm_xor_si128(c2[1990],simde_mm_xor_si128(c2[2352],simde_mm_xor_si128(c2[2706],simde_mm_xor_si128(c2[3428],simde_mm_xor_si128(c2[3068],simde_mm_xor_si128(c2[2749],simde_mm_xor_si128(c2[2389],simde_mm_xor_si128(c2[2751],simde_mm_xor_si128(c2[3107],simde_mm_xor_si128(c2[2747],simde_mm_xor_si128(c2[3109],simde_mm_xor_si128(c2[4546],simde_mm_xor_si128(c2[5268],simde_mm_xor_si128(c2[4908],simde_mm_xor_si128(c2[2424],simde_mm_xor_si128(c2[2064],simde_mm_xor_si128(c2[2426],simde_mm_xor_si128(c2[1701],simde_mm_xor_si128(c2[2423],simde_mm_xor_si128(c2[2063],simde_mm_xor_si128(c2[4573],simde_mm_xor_si128(c2[3538],simde_mm_xor_si128(c2[3178],simde_mm_xor_si128(c2[3540],simde_mm_xor_si128(c2[1021],simde_mm_xor_si128(c2[1383],simde_mm_xor_si128(c2[1382],simde_mm_xor_si128(c2[2104],simde_mm_xor_si128(c2[1744],simde_mm_xor_si128(c2[1418],simde_mm_xor_si128(c2[1058],simde_mm_xor_si128(c2[1420],simde_mm_xor_si128(c2[2493],simde_mm_xor_si128(c2[2133],simde_mm_xor_si128(c2[2495],simde_mm_xor_si128(c2[1055],simde_mm_xor_si128(c2[1777],c2[1417])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 27
+     d2[486]=simde_mm_xor_si128(c2[4680],c2[3816]);
+
+//row: 28
+     d2[504]=simde_mm_xor_si128(c2[3642],simde_mm_xor_si128(c2[4756],c2[4142]));
+
+//row: 29
+     d2[522]=simde_mm_xor_si128(c2[1808],c2[3385]);
+
+//row: 30
+     d2[540]=simde_mm_xor_si128(c2[2597],simde_mm_xor_si128(c2[3063],simde_mm_xor_si128(c2[3138],c2[2488])));
+
+//row: 31
+     d2[558]=simde_mm_xor_si128(c2[4688],simde_mm_xor_si128(c2[4689],simde_mm_xor_si128(c2[3254],simde_mm_xor_si128(c2[1486],simde_mm_xor_si128(c2[1491],simde_mm_xor_si128(c2[1132],simde_mm_xor_si128(c2[772],simde_mm_xor_si128(c2[41],simde_mm_xor_si128(c2[5483],simde_mm_xor_si128(c2[1528],simde_mm_xor_si128(c2[1168],simde_mm_xor_si128(c2[4798],simde_mm_xor_si128(c2[3002],simde_mm_xor_si128(c2[2282],simde_mm_xor_si128(c2[879],simde_mm_xor_si128(c2[3039],simde_mm_xor_si128(c2[514],simde_mm_xor_si128(c2[154],simde_mm_xor_si128(c2[4512],simde_mm_xor_si128(c2[5588],simde_mm_xor_si128(c2[5228],simde_mm_xor_si128(c2[4911],simde_mm_xor_si128(c2[5269],simde_mm_xor_si128(c2[1669],simde_mm_xor_si128(c2[1309],simde_mm_xor_si128(c2[4586],simde_mm_xor_si128(c2[4583],simde_mm_xor_si128(c2[4223],simde_mm_xor_si128(c2[5700],simde_mm_xor_si128(c2[3543],simde_mm_xor_si128(c2[4264],simde_mm_xor_si128(c2[3904],simde_mm_xor_si128(c2[3580],simde_mm_xor_si128(c2[4655],simde_mm_xor_si128(c2[3937],c2[3577])))))))))))))))))))))))))))))))))));
+
+//row: 32
+     d2[576]=simde_mm_xor_si128(c2[3604],simde_mm_xor_si128(c2[3244],simde_mm_xor_si128(c2[3605],simde_mm_xor_si128(c2[3245],simde_mm_xor_si128(c2[1810],simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[402],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[47],simde_mm_xor_si128(c2[5087],simde_mm_xor_si128(c2[4399],simde_mm_xor_si128(c2[4039],simde_mm_xor_si128(c2[5483],simde_mm_xor_si128(c2[3714],simde_mm_xor_si128(c2[3354],simde_mm_xor_si128(c2[1918],simde_mm_xor_si128(c2[1558],simde_mm_xor_si128(c2[838],simde_mm_xor_si128(c2[5554],simde_mm_xor_si128(c2[5194],simde_mm_xor_si128(c2[1595],simde_mm_xor_si128(c2[4469],simde_mm_xor_si128(c2[3428],simde_mm_xor_si128(c2[3068],simde_mm_xor_si128(c2[3784],simde_mm_xor_si128(c2[3425],simde_mm_xor_si128(c2[3827],simde_mm_xor_si128(c2[3467],simde_mm_xor_si128(c2[4185],simde_mm_xor_si128(c2[3825],simde_mm_xor_si128(c2[5624],simde_mm_xor_si128(c2[3502],simde_mm_xor_si128(c2[3142],simde_mm_xor_si128(c2[2779],simde_mm_xor_si128(c2[4616],simde_mm_xor_si128(c2[4256],simde_mm_xor_si128(c2[2099],simde_mm_xor_si128(c2[2460],simde_mm_xor_si128(c2[2496],simde_mm_xor_si128(c2[2136],simde_mm_xor_si128(c2[3571],simde_mm_xor_si128(c2[3211],c2[2133]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 33
+     d2[594]=simde_mm_xor_si128(c2[1440],simde_mm_xor_si128(c2[1441],simde_mm_xor_si128(c2[6],simde_mm_xor_si128(c2[3997],simde_mm_xor_si128(c2[4002],simde_mm_xor_si128(c2[3283],simde_mm_xor_si128(c2[2235],simde_mm_xor_si128(c2[3679],simde_mm_xor_si128(c2[2600],simde_mm_xor_si128(c2[1550],simde_mm_xor_si128(c2[5513],simde_mm_xor_si128(c2[4793],simde_mm_xor_si128(c2[3390],simde_mm_xor_si128(c2[5550],simde_mm_xor_si128(c2[2665],simde_mm_xor_si128(c2[1264],simde_mm_xor_si128(c2[1980],simde_mm_xor_si128(c2[1663],simde_mm_xor_si128(c2[2021],simde_mm_xor_si128(c2[3820],simde_mm_xor_si128(c2[1338],simde_mm_xor_si128(c2[975],simde_mm_xor_si128(c2[2412],simde_mm_xor_si128(c2[2452],simde_mm_xor_si128(c2[295],simde_mm_xor_si128(c2[656],simde_mm_xor_si128(c2[332],simde_mm_xor_si128(c2[1407],c2[329]))))))))))))))))))))))))))));
+
+//row: 34
+     d2[612]=simde_mm_xor_si128(c2[4321],simde_mm_xor_si128(c2[3961],simde_mm_xor_si128(c2[1447],simde_mm_xor_si128(c2[4322],simde_mm_xor_si128(c2[3962],simde_mm_xor_si128(c2[1448],simde_mm_xor_si128(c2[2527],simde_mm_xor_si128(c2[13],simde_mm_xor_si128(c2[1802],simde_mm_xor_si128(c2[1119],simde_mm_xor_si128(c2[759],simde_mm_xor_si128(c2[4004],simde_mm_xor_si128(c2[764],simde_mm_xor_si128(c2[4009],simde_mm_xor_si128(c2[45],simde_mm_xor_si128(c2[3650],simde_mm_xor_si128(c2[3290],simde_mm_xor_si128(c2[5116],simde_mm_xor_si128(c2[4756],simde_mm_xor_si128(c2[2242],simde_mm_xor_si128(c2[441],simde_mm_xor_si128(c2[4046],simde_mm_xor_si128(c2[3686],simde_mm_xor_si128(c2[4431],simde_mm_xor_si128(c2[4071],simde_mm_xor_si128(c2[1557],simde_mm_xor_si128(c2[2635],simde_mm_xor_si128(c2[2275],simde_mm_xor_si128(c2[5520],simde_mm_xor_si128(c2[1555],simde_mm_xor_si128(c2[4800],simde_mm_xor_si128(c2[512],simde_mm_xor_si128(c2[152],simde_mm_xor_si128(c2[3397],simde_mm_xor_si128(c2[2312],simde_mm_xor_si128(c2[5557],simde_mm_xor_si128(c2[5186],simde_mm_xor_si128(c2[3032],simde_mm_xor_si128(c2[2672],simde_mm_xor_si128(c2[4145],simde_mm_xor_si128(c2[3785],simde_mm_xor_si128(c2[1271],simde_mm_xor_si128(c2[4501],simde_mm_xor_si128(c2[2347],simde_mm_xor_si128(c2[1987],simde_mm_xor_si128(c2[4544],simde_mm_xor_si128(c2[4184],simde_mm_xor_si128(c2[1670],simde_mm_xor_si128(c2[4902],simde_mm_xor_si128(c2[4542],simde_mm_xor_si128(c2[2028],simde_mm_xor_si128(c2[582],simde_mm_xor_si128(c2[4187],simde_mm_xor_si128(c2[3827],simde_mm_xor_si128(c2[4219],simde_mm_xor_si128(c2[3859],simde_mm_xor_si128(c2[1345],simde_mm_xor_si128(c2[3496],simde_mm_xor_si128(c2[1342],simde_mm_xor_si128(c2[982],simde_mm_xor_si128(c2[5333],simde_mm_xor_si128(c2[4973],simde_mm_xor_si128(c2[2459],simde_mm_xor_si128(c2[2816],simde_mm_xor_si128(c2[302],simde_mm_xor_si128(c2[3177],simde_mm_xor_si128(c2[1023],simde_mm_xor_si128(c2[663],simde_mm_xor_si128(c2[3213],simde_mm_xor_si128(c2[2853],simde_mm_xor_si128(c2[339],simde_mm_xor_si128(c2[4288],simde_mm_xor_si128(c2[3928],simde_mm_xor_si128(c2[1414],simde_mm_xor_si128(c2[2850],simde_mm_xor_si128(c2[696],c2[336]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 35
+     d2[630]=simde_mm_xor_si128(c2[6],simde_mm_xor_si128(c2[5405],simde_mm_xor_si128(c2[5406],simde_mm_xor_si128(c2[3971],simde_mm_xor_si128(c2[2563],simde_mm_xor_si128(c2[2203],simde_mm_xor_si128(c2[2208],simde_mm_xor_si128(c2[1489],simde_mm_xor_si128(c2[4359],simde_mm_xor_si128(c2[801],simde_mm_xor_si128(c2[441],simde_mm_xor_si128(c2[1885],simde_mm_xor_si128(c2[116],simde_mm_xor_si128(c2[5515],simde_mm_xor_si128(c2[3719],simde_mm_xor_si128(c2[2999],simde_mm_xor_si128(c2[1596],simde_mm_xor_si128(c2[3756],simde_mm_xor_si128(c2[871],simde_mm_xor_si128(c2[5229],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[3428],simde_mm_xor_si128(c2[229],simde_mm_xor_si128(c2[5628],simde_mm_xor_si128(c2[227],simde_mm_xor_si128(c2[2026],simde_mm_xor_si128(c2[5303],simde_mm_xor_si128(c2[4940],simde_mm_xor_si128(c2[658],simde_mm_xor_si128(c2[4260],simde_mm_xor_si128(c2[4621],simde_mm_xor_si128(c2[4657],simde_mm_xor_si128(c2[4297],simde_mm_xor_si128(c2[5372],c2[4294]))))))))))))))))))))))))))))))))));
+
+//row: 36
+     d2[648]=simde_mm_xor_si128(c2[3247],simde_mm_xor_si128(c2[440],c2[3853]));
+
+//row: 37
+     d2[666]=simde_mm_xor_si128(c2[1086],simde_mm_xor_si128(c2[3603],simde_mm_xor_si128(c2[1087],simde_mm_xor_si128(c2[3604],simde_mm_xor_si128(c2[5411],simde_mm_xor_si128(c2[2169],simde_mm_xor_si128(c2[3643],simde_mm_xor_si128(c2[401],simde_mm_xor_si128(c2[3648],simde_mm_xor_si128(c2[406],simde_mm_xor_si128(c2[2929],simde_mm_xor_si128(c2[47],simde_mm_xor_si128(c2[5446],simde_mm_xor_si128(c2[1881],simde_mm_xor_si128(c2[4398],simde_mm_xor_si128(c2[3325],simde_mm_xor_si128(c2[443],simde_mm_xor_si128(c2[83],simde_mm_xor_si128(c2[1196],simde_mm_xor_si128(c2[3713],simde_mm_xor_si128(c2[5159],simde_mm_xor_si128(c2[1917],simde_mm_xor_si128(c2[4439],simde_mm_xor_si128(c2[1197],simde_mm_xor_si128(c2[3036],simde_mm_xor_si128(c2[5553],simde_mm_xor_si128(c2[5196],simde_mm_xor_si128(c2[1954],simde_mm_xor_si128(c2[2311],simde_mm_xor_si128(c2[5188],simde_mm_xor_si128(c2[4828],simde_mm_xor_si128(c2[910],simde_mm_xor_si128(c2[3427],simde_mm_xor_si128(c2[1626],simde_mm_xor_si128(c2[4503],simde_mm_xor_si128(c2[4143],simde_mm_xor_si128(c2[1309],simde_mm_xor_si128(c2[3826],simde_mm_xor_si128(c2[1667],simde_mm_xor_si128(c2[4184],simde_mm_xor_si128(c2[3466],simde_mm_xor_si128(c2[584],simde_mm_xor_si128(c2[224],simde_mm_xor_si128(c2[984],simde_mm_xor_si128(c2[3501],simde_mm_xor_si128(c2[621],simde_mm_xor_si128(c2[3498],simde_mm_xor_si128(c2[3138],simde_mm_xor_si128(c2[2098],simde_mm_xor_si128(c2[4615],simde_mm_xor_si128(c2[5700],simde_mm_xor_si128(c2[2458],simde_mm_xor_si128(c2[302],simde_mm_xor_si128(c2[3179],simde_mm_xor_si128(c2[2819],simde_mm_xor_si128(c2[5737],simde_mm_xor_si128(c2[2495],simde_mm_xor_si128(c2[1053],simde_mm_xor_si128(c2[3570],simde_mm_xor_si128(c2[5734],simde_mm_xor_si128(c2[2852],c2[2492])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 38
+     d2[684]=simde_mm_xor_si128(c2[1440],simde_mm_xor_si128(c2[1080],simde_mm_xor_si128(c2[1081],simde_mm_xor_si128(c2[5405],simde_mm_xor_si128(c2[3997],simde_mm_xor_si128(c2[3637],simde_mm_xor_si128(c2[3642],simde_mm_xor_si128(c2[2923],simde_mm_xor_si128(c2[396],simde_mm_xor_si128(c2[2235],simde_mm_xor_si128(c2[1875],simde_mm_xor_si128(c2[3319],simde_mm_xor_si128(c2[1550],simde_mm_xor_si128(c2[1190],simde_mm_xor_si128(c2[5153],simde_mm_xor_si128(c2[4433],simde_mm_xor_si128(c2[3030],simde_mm_xor_si128(c2[5190],simde_mm_xor_si128(c2[2305],simde_mm_xor_si128(c2[904],simde_mm_xor_si128(c2[1620],simde_mm_xor_si128(c2[2344],simde_mm_xor_si128(c2[1663],simde_mm_xor_si128(c2[1303],simde_mm_xor_si128(c2[1661],simde_mm_xor_si128(c2[3460],simde_mm_xor_si128(c2[978],simde_mm_xor_si128(c2[615],simde_mm_xor_si128(c2[2092],simde_mm_xor_si128(c2[5694],simde_mm_xor_si128(c2[296],simde_mm_xor_si128(c2[332],simde_mm_xor_si128(c2[5731],simde_mm_xor_si128(c2[1047],c2[5728]))))))))))))))))))))))))))))))))));
+
+//row: 39
+     d2[702]=simde_mm_xor_si128(c2[369],simde_mm_xor_si128(c2[9],simde_mm_xor_si128(c2[370],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[4334],simde_mm_xor_si128(c2[2527],simde_mm_xor_si128(c2[2926],simde_mm_xor_si128(c2[2566],simde_mm_xor_si128(c2[2571],simde_mm_xor_si128(c2[1852],simde_mm_xor_si128(c2[1164],simde_mm_xor_si128(c2[804],simde_mm_xor_si128(c2[2248],simde_mm_xor_si128(c2[479],simde_mm_xor_si128(c2[119],simde_mm_xor_si128(c2[4442],simde_mm_xor_si128(c2[4082],simde_mm_xor_si128(c2[3362],simde_mm_xor_si128(c2[2319],simde_mm_xor_si128(c2[1959],simde_mm_xor_si128(c2[4119],simde_mm_xor_si128(c2[1234],simde_mm_xor_si128(c2[193],simde_mm_xor_si128(c2[5592],simde_mm_xor_si128(c2[549],simde_mm_xor_si128(c2[592],simde_mm_xor_si128(c2[232],simde_mm_xor_si128(c2[950],simde_mm_xor_si128(c2[590],simde_mm_xor_si128(c2[2389],simde_mm_xor_si128(c2[267],simde_mm_xor_si128(c2[5666],simde_mm_xor_si128(c2[5303],simde_mm_xor_si128(c2[254],simde_mm_xor_si128(c2[1381],simde_mm_xor_si128(c2[1021],simde_mm_xor_si128(c2[4623],simde_mm_xor_si128(c2[4984],simde_mm_xor_si128(c2[5020],simde_mm_xor_si128(c2[4660],simde_mm_xor_si128(c2[336],simde_mm_xor_si128(c2[5735],c2[4657]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 40
+     d2[720]=simde_mm_xor_si128(c2[3964],simde_mm_xor_si128(c2[2886],simde_mm_xor_si128(c2[3965],simde_mm_xor_si128(c2[2887],simde_mm_xor_si128(c2[2530],simde_mm_xor_si128(c2[1452],simde_mm_xor_si128(c2[762],simde_mm_xor_si128(c2[5443],simde_mm_xor_si128(c2[767],simde_mm_xor_si128(c2[5448],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[5089],simde_mm_xor_si128(c2[4729],simde_mm_xor_si128(c2[4759],simde_mm_xor_si128(c2[3681],simde_mm_xor_si128(c2[444],simde_mm_xor_si128(c2[5485],simde_mm_xor_si128(c2[5125],simde_mm_xor_si128(c2[2232],simde_mm_xor_si128(c2[4074],simde_mm_xor_si128(c2[2996],simde_mm_xor_si128(c2[2278],simde_mm_xor_si128(c2[1200],simde_mm_xor_si128(c2[1558],simde_mm_xor_si128(c2[480],simde_mm_xor_si128(c2[155],simde_mm_xor_si128(c2[4836],simde_mm_xor_si128(c2[2315],simde_mm_xor_si128(c2[1237],simde_mm_xor_si128(c2[5189],simde_mm_xor_si128(c2[4471],simde_mm_xor_si128(c2[4111],simde_mm_xor_si128(c2[3788],simde_mm_xor_si128(c2[2710],simde_mm_xor_si128(c2[4504],simde_mm_xor_si128(c2[3786],simde_mm_xor_si128(c2[3426],simde_mm_xor_si128(c2[4187],simde_mm_xor_si128(c2[3109],simde_mm_xor_si128(c2[4545],simde_mm_xor_si128(c2[3467],simde_mm_xor_si128(c2[585],simde_mm_xor_si128(c2[5626],simde_mm_xor_si128(c2[5266],simde_mm_xor_si128(c2[3862],simde_mm_xor_si128(c2[2784],simde_mm_xor_si128(c2[3499],simde_mm_xor_si128(c2[2781],simde_mm_xor_si128(c2[2421],simde_mm_xor_si128(c2[4976],simde_mm_xor_si128(c2[3898],simde_mm_xor_si128(c2[2819],simde_mm_xor_si128(c2[1741],simde_mm_xor_si128(c2[3180],simde_mm_xor_si128(c2[2462],simde_mm_xor_si128(c2[2102],simde_mm_xor_si128(c2[2856],simde_mm_xor_si128(c2[1778],simde_mm_xor_si128(c2[3931],simde_mm_xor_si128(c2[2853],simde_mm_xor_si128(c2[2853],simde_mm_xor_si128(c2[2135],c2[1775]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 41
+     d2[738]=simde_mm_xor_si128(c2[2161],simde_mm_xor_si128(c2[1801],simde_mm_xor_si128(c2[1802],simde_mm_xor_si128(c2[367],simde_mm_xor_si128(c2[4718],simde_mm_xor_si128(c2[4358],simde_mm_xor_si128(c2[4363],simde_mm_xor_si128(c2[3644],simde_mm_xor_si128(c2[39],simde_mm_xor_si128(c2[2956],simde_mm_xor_si128(c2[2596],simde_mm_xor_si128(c2[4040],simde_mm_xor_si128(c2[2271],simde_mm_xor_si128(c2[1911],simde_mm_xor_si128(c2[115],simde_mm_xor_si128(c2[5154],simde_mm_xor_si128(c2[3751],simde_mm_xor_si128(c2[152],simde_mm_xor_si128(c2[3026],simde_mm_xor_si128(c2[1625],simde_mm_xor_si128(c2[2341],simde_mm_xor_si128(c2[5582],simde_mm_xor_si128(c2[2384],simde_mm_xor_si128(c2[2024],simde_mm_xor_si128(c2[2382],simde_mm_xor_si128(c2[4181],simde_mm_xor_si128(c2[1699],simde_mm_xor_si128(c2[1336],simde_mm_xor_si128(c2[2813],simde_mm_xor_si128(c2[656],simde_mm_xor_si128(c2[1017],simde_mm_xor_si128(c2[1053],simde_mm_xor_si128(c2[693],simde_mm_xor_si128(c2[1768],c2[690]))))))))))))))))))))))))))))))))));
+  }
+}
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc320_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc320_byte.c
index 29a977a3de01ae51c656b2e353debf6d436b2b81..87dd299de15ddc8d94be8eecdde4c75016d2a47e 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc320_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc320_byte.c
@@ -1,9 +1,9 @@
+#ifdef __AVX2__
 #include "PHY/sse_intrin.h"
 // generated code for Zc=320, byte encoding
 static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG2_Zc320_byte(uint8_t *c,uint8_t *d) {
-  __m256i *csimd=(__m256i *)c,*dsimd=(__m256i *)d;
-
-  __m256i *c2,*d2;
+  simde__m256i *csimd=(simde__m256i *)c,*dsimd=(simde__m256i *)d;
+  simde__m256i *c2,*d2;
 
   int i2;
   for (i2=0; i2<10; i2++) {
@@ -137,3 +137,4 @@ static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG
      d2[410]=simde_mm256_xor_si256(c2[4601],simde_mm256_xor_si256(c2[4401],simde_mm256_xor_si256(c2[6205],simde_mm256_xor_si256(c2[2002],simde_mm256_xor_si256(c2[4621],simde_mm256_xor_si256(c2[4421],simde_mm256_xor_si256(c2[4024],simde_mm256_xor_si256(c2[5622],simde_mm256_xor_si256(c2[20],simde_mm256_xor_si256(c2[4641],simde_mm256_xor_si256(c2[4441],simde_mm256_xor_si256(c2[1042],simde_mm256_xor_si256(c2[4661],simde_mm256_xor_si256(c2[4461],simde_mm256_xor_si256(c2[3665],simde_mm256_xor_si256(c2[5064],simde_mm256_xor_si256(c2[4481],simde_mm256_xor_si256(c2[6281],simde_mm256_xor_si256(c2[5684],simde_mm256_xor_si256(c2[4501],simde_mm256_xor_si256(c2[1706],simde_mm256_xor_si256(c2[500],simde_mm256_xor_si256(c2[4721],simde_mm256_xor_si256(c2[4521],simde_mm256_xor_si256(c2[2924],simde_mm256_xor_si256(c2[522],simde_mm256_xor_si256(c2[4541],simde_mm256_xor_si256(c2[3143],simde_mm256_xor_si256(c2[4561],simde_mm256_xor_si256(c2[565],simde_mm256_xor_si256(c2[2363],simde_mm256_xor_si256(c2[4781],simde_mm256_xor_si256(c2[4581],simde_mm256_xor_si256(c2[2783],c2[983]))))))))))))))))))))))))))))))))));
   }
 }
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc320_byte_128.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc320_byte_128.c
new file mode 100644
index 0000000000000000000000000000000000000000..d8e16c9f9b2dccb8e8d755dbce9398fcdced7e99
--- /dev/null
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc320_byte_128.c
@@ -0,0 +1,141 @@
+#ifndef __AVX2__
+#include "PHY/sse_intrin.h"
+// generated code for Zc=320, byte encoding
+static inline void ldpc_BG2_Zc320_byte(uint8_t *c,uint8_t *d) {
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
+
+  simde__m128i *c2,*d2;
+
+  int i2;
+  for (i2=0; i2<20; i2++) {
+     c2=&csimd[i2];
+     d2=&dsimd[i2];
+
+//row: 0
+     d2[0]=simde_mm_xor_si128(c2[6019],simde_mm_xor_si128(c2[3208],simde_mm_xor_si128(c2[1201],simde_mm_xor_si128(c2[6059],simde_mm_xor_si128(c2[5245],simde_mm_xor_si128(c2[2042],simde_mm_xor_si128(c2[6099],simde_mm_xor_si128(c2[5680],simde_mm_xor_si128(c2[6139],simde_mm_xor_si128(c2[4527],simde_mm_xor_si128(c2[926],simde_mm_xor_si128(c2[6179],simde_mm_xor_si128(c2[3360],simde_mm_xor_si128(c2[2166],simde_mm_xor_si128(c2[6219],simde_mm_xor_si128(c2[609],simde_mm_xor_si128(c2[6259],simde_mm_xor_si128(c2[3045],simde_mm_xor_si128(c2[4640],simde_mm_xor_si128(c2[6299],simde_mm_xor_si128(c2[3483],simde_mm_xor_si128(c2[6339],simde_mm_xor_si128(c2[4726],simde_mm_xor_si128(c2[1923],simde_mm_xor_si128(c2[6379],simde_mm_xor_si128(c2[2763],c2[5562]))))))))))))))))))))))))));
+
+//row: 1
+     d2[20]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[6019],simde_mm_xor_si128(c2[3208],simde_mm_xor_si128(c2[1201],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[6059],simde_mm_xor_si128(c2[5245],simde_mm_xor_si128(c2[2042],simde_mm_xor_si128(c2[80],simde_mm_xor_si128(c2[6099],simde_mm_xor_si128(c2[5680],simde_mm_xor_si128(c2[120],simde_mm_xor_si128(c2[6139],simde_mm_xor_si128(c2[4527],simde_mm_xor_si128(c2[926],simde_mm_xor_si128(c2[6179],simde_mm_xor_si128(c2[3360],simde_mm_xor_si128(c2[2166],simde_mm_xor_si128(c2[6219],simde_mm_xor_si128(c2[609],simde_mm_xor_si128(c2[240],simde_mm_xor_si128(c2[6259],simde_mm_xor_si128(c2[3045],simde_mm_xor_si128(c2[4640],simde_mm_xor_si128(c2[6299],simde_mm_xor_si128(c2[3483],simde_mm_xor_si128(c2[6339],simde_mm_xor_si128(c2[4726],simde_mm_xor_si128(c2[1923],simde_mm_xor_si128(c2[360],simde_mm_xor_si128(c2[6379],simde_mm_xor_si128(c2[2763],c2[5562]))))))))))))))))))))))))))))))));
+
+//row: 2
+     d2[40]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[6019],simde_mm_xor_si128(c2[3608],simde_mm_xor_si128(c2[3208],simde_mm_xor_si128(c2[1201],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[6059],simde_mm_xor_si128(c2[5245],simde_mm_xor_si128(c2[2042],simde_mm_xor_si128(c2[80],simde_mm_xor_si128(c2[6099],simde_mm_xor_si128(c2[5680],simde_mm_xor_si128(c2[120],simde_mm_xor_si128(c2[6139],simde_mm_xor_si128(c2[4927],simde_mm_xor_si128(c2[4527],simde_mm_xor_si128(c2[926],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[6179],simde_mm_xor_si128(c2[3360],simde_mm_xor_si128(c2[2166],simde_mm_xor_si128(c2[200],simde_mm_xor_si128(c2[6219],simde_mm_xor_si128(c2[609],simde_mm_xor_si128(c2[240],simde_mm_xor_si128(c2[6259],simde_mm_xor_si128(c2[3445],simde_mm_xor_si128(c2[3045],simde_mm_xor_si128(c2[4640],simde_mm_xor_si128(c2[280],simde_mm_xor_si128(c2[6299],simde_mm_xor_si128(c2[3483],simde_mm_xor_si128(c2[320],simde_mm_xor_si128(c2[6339],simde_mm_xor_si128(c2[4726],simde_mm_xor_si128(c2[1923],simde_mm_xor_si128(c2[360],simde_mm_xor_si128(c2[6379],simde_mm_xor_si128(c2[3163],simde_mm_xor_si128(c2[2763],c2[5562]))))))))))))))))))))))))))))))))))))))));
+
+//row: 3
+     d2[60]=simde_mm_xor_si128(c2[6019],simde_mm_xor_si128(c2[3208],simde_mm_xor_si128(c2[1201],simde_mm_xor_si128(c2[6059],simde_mm_xor_si128(c2[5245],simde_mm_xor_si128(c2[2442],simde_mm_xor_si128(c2[2042],simde_mm_xor_si128(c2[6099],simde_mm_xor_si128(c2[6080],simde_mm_xor_si128(c2[5680],simde_mm_xor_si128(c2[6139],simde_mm_xor_si128(c2[4527],simde_mm_xor_si128(c2[926],simde_mm_xor_si128(c2[6179],simde_mm_xor_si128(c2[3360],simde_mm_xor_si128(c2[2566],simde_mm_xor_si128(c2[2166],simde_mm_xor_si128(c2[6219],simde_mm_xor_si128(c2[1009],simde_mm_xor_si128(c2[609],simde_mm_xor_si128(c2[6259],simde_mm_xor_si128(c2[3045],simde_mm_xor_si128(c2[5040],simde_mm_xor_si128(c2[4640],simde_mm_xor_si128(c2[6299],simde_mm_xor_si128(c2[3883],simde_mm_xor_si128(c2[3483],simde_mm_xor_si128(c2[6339],simde_mm_xor_si128(c2[4726],simde_mm_xor_si128(c2[2323],simde_mm_xor_si128(c2[1923],simde_mm_xor_si128(c2[6379],simde_mm_xor_si128(c2[2763],simde_mm_xor_si128(c2[5962],c2[5562]))))))))))))))))))))))))))))))))));
+
+//row: 4
+     d2[80]=simde_mm_xor_si128(c2[5209],simde_mm_xor_si128(c2[4809],simde_mm_xor_si128(c2[2018],simde_mm_xor_si128(c2[11],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[5249],simde_mm_xor_si128(c2[4849],simde_mm_xor_si128(c2[4055],simde_mm_xor_si128(c2[852],simde_mm_xor_si128(c2[3248],simde_mm_xor_si128(c2[5289],simde_mm_xor_si128(c2[4889],simde_mm_xor_si128(c2[4490],simde_mm_xor_si128(c2[5329],simde_mm_xor_si128(c2[4929],simde_mm_xor_si128(c2[3337],simde_mm_xor_si128(c2[6135],simde_mm_xor_si128(c2[4969],simde_mm_xor_si128(c2[2170],simde_mm_xor_si128(c2[976],simde_mm_xor_si128(c2[5009],simde_mm_xor_si128(c2[5818],simde_mm_xor_si128(c2[5449],simde_mm_xor_si128(c2[5049],simde_mm_xor_si128(c2[1855],simde_mm_xor_si128(c2[3450],simde_mm_xor_si128(c2[5089],simde_mm_xor_si128(c2[2293],simde_mm_xor_si128(c2[5129],simde_mm_xor_si128(c2[3536],simde_mm_xor_si128(c2[733],simde_mm_xor_si128(c2[5569],simde_mm_xor_si128(c2[5169],simde_mm_xor_si128(c2[1573],c2[4372]))))))))))))))))))))))))))))))))));
+
+//row: 5
+     d2[100]=simde_mm_xor_si128(c2[4],simde_mm_xor_si128(c2[6003],simde_mm_xor_si128(c2[3212],simde_mm_xor_si128(c2[1205],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[44],simde_mm_xor_si128(c2[6043],simde_mm_xor_si128(c2[5249],simde_mm_xor_si128(c2[2046],simde_mm_xor_si128(c2[1248],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[6083],simde_mm_xor_si128(c2[5684],simde_mm_xor_si128(c2[124],simde_mm_xor_si128(c2[6123],simde_mm_xor_si128(c2[4531],simde_mm_xor_si128(c2[930],simde_mm_xor_si128(c2[6163],simde_mm_xor_si128(c2[3364],simde_mm_xor_si128(c2[2170],simde_mm_xor_si128(c2[6203],simde_mm_xor_si128(c2[613],simde_mm_xor_si128(c2[5808],simde_mm_xor_si128(c2[244],simde_mm_xor_si128(c2[6243],simde_mm_xor_si128(c2[3049],simde_mm_xor_si128(c2[4644],simde_mm_xor_si128(c2[6283],simde_mm_xor_si128(c2[3487],simde_mm_xor_si128(c2[5488],simde_mm_xor_si128(c2[6323],simde_mm_xor_si128(c2[4730],simde_mm_xor_si128(c2[1927],simde_mm_xor_si128(c2[364],simde_mm_xor_si128(c2[6363],simde_mm_xor_si128(c2[2767],c2[5566]))))))))))))))))))))))))))))))))))));
+
+//row: 6
+     d2[120]=simde_mm_xor_si128(c2[1609],simde_mm_xor_si128(c2[1209],simde_mm_xor_si128(c2[4817],simde_mm_xor_si128(c2[2810],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[1649],simde_mm_xor_si128(c2[1249],simde_mm_xor_si128(c2[455],simde_mm_xor_si128(c2[3651],simde_mm_xor_si128(c2[1689],simde_mm_xor_si128(c2[1289],simde_mm_xor_si128(c2[890],simde_mm_xor_si128(c2[1729],simde_mm_xor_si128(c2[1329],simde_mm_xor_si128(c2[6136],simde_mm_xor_si128(c2[2535],simde_mm_xor_si128(c2[1369],simde_mm_xor_si128(c2[4969],simde_mm_xor_si128(c2[3775],simde_mm_xor_si128(c2[1409],simde_mm_xor_si128(c2[2218],simde_mm_xor_si128(c2[5007],simde_mm_xor_si128(c2[1849],simde_mm_xor_si128(c2[1449],simde_mm_xor_si128(c2[4654],simde_mm_xor_si128(c2[6249],simde_mm_xor_si128(c2[1489],simde_mm_xor_si128(c2[5092],simde_mm_xor_si128(c2[1486],simde_mm_xor_si128(c2[1529],simde_mm_xor_si128(c2[6335],simde_mm_xor_si128(c2[3532],simde_mm_xor_si128(c2[1969],simde_mm_xor_si128(c2[1569],simde_mm_xor_si128(c2[4372],simde_mm_xor_si128(c2[772],c2[5562]))))))))))))))))))))))))))))))))))));
+
+//row: 7
+     d2[140]=simde_mm_xor_si128(c2[6],simde_mm_xor_si128(c2[6005],simde_mm_xor_si128(c2[5204],simde_mm_xor_si128(c2[3214],simde_mm_xor_si128(c2[2413],simde_mm_xor_si128(c2[1207],simde_mm_xor_si128(c2[406],simde_mm_xor_si128(c2[46],simde_mm_xor_si128(c2[6045],simde_mm_xor_si128(c2[5244],simde_mm_xor_si128(c2[5251],simde_mm_xor_si128(c2[4450],simde_mm_xor_si128(c2[2048],simde_mm_xor_si128(c2[1647],simde_mm_xor_si128(c2[1247],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[6085],simde_mm_xor_si128(c2[5284],simde_mm_xor_si128(c2[5686],simde_mm_xor_si128(c2[5285],simde_mm_xor_si128(c2[4885],simde_mm_xor_si128(c2[126],simde_mm_xor_si128(c2[6125],simde_mm_xor_si128(c2[5324],simde_mm_xor_si128(c2[4533],simde_mm_xor_si128(c2[3732],simde_mm_xor_si128(c2[932],simde_mm_xor_si128(c2[131],simde_mm_xor_si128(c2[6165],simde_mm_xor_si128(c2[5364],simde_mm_xor_si128(c2[3366],simde_mm_xor_si128(c2[2565],simde_mm_xor_si128(c2[2172],simde_mm_xor_si128(c2[1771],simde_mm_xor_si128(c2[1371],simde_mm_xor_si128(c2[6205],simde_mm_xor_si128(c2[5404],simde_mm_xor_si128(c2[615],simde_mm_xor_si128(c2[214],simde_mm_xor_si128(c2[6213],simde_mm_xor_si128(c2[5402],simde_mm_xor_si128(c2[246],simde_mm_xor_si128(c2[6245],simde_mm_xor_si128(c2[5444],simde_mm_xor_si128(c2[3051],simde_mm_xor_si128(c2[2250],simde_mm_xor_si128(c2[4646],simde_mm_xor_si128(c2[4245],simde_mm_xor_si128(c2[3845],simde_mm_xor_si128(c2[6285],simde_mm_xor_si128(c2[5484],simde_mm_xor_si128(c2[3489],simde_mm_xor_si128(c2[3088],simde_mm_xor_si128(c2[2688],simde_mm_xor_si128(c2[1889],simde_mm_xor_si128(c2[6325],simde_mm_xor_si128(c2[5524],simde_mm_xor_si128(c2[4732],simde_mm_xor_si128(c2[3931],simde_mm_xor_si128(c2[1929],simde_mm_xor_si128(c2[1528],simde_mm_xor_si128(c2[1128],simde_mm_xor_si128(c2[366],simde_mm_xor_si128(c2[6365],simde_mm_xor_si128(c2[5564],simde_mm_xor_si128(c2[2769],simde_mm_xor_si128(c2[1968],simde_mm_xor_si128(c2[5568],simde_mm_xor_si128(c2[5167],c2[4767]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 8
+     d2[160]=simde_mm_xor_si128(c2[2805],simde_mm_xor_si128(c2[2405],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[6013],simde_mm_xor_si128(c2[4006],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[2845],simde_mm_xor_si128(c2[2445],simde_mm_xor_si128(c2[1651],simde_mm_xor_si128(c2[4847],simde_mm_xor_si128(c2[444],simde_mm_xor_si128(c2[2885],simde_mm_xor_si128(c2[2485],simde_mm_xor_si128(c2[2086],simde_mm_xor_si128(c2[2925],simde_mm_xor_si128(c2[2525],simde_mm_xor_si128(c2[1333],simde_mm_xor_si128(c2[933],simde_mm_xor_si128(c2[3731],simde_mm_xor_si128(c2[2965],simde_mm_xor_si128(c2[2565],simde_mm_xor_si128(c2[6165],simde_mm_xor_si128(c2[4971],simde_mm_xor_si128(c2[3005],simde_mm_xor_si128(c2[2605],simde_mm_xor_si128(c2[3414],simde_mm_xor_si128(c2[3045],simde_mm_xor_si128(c2[2645],simde_mm_xor_si128(c2[6250],simde_mm_xor_si128(c2[5850],simde_mm_xor_si128(c2[1046],simde_mm_xor_si128(c2[3085],simde_mm_xor_si128(c2[2685],simde_mm_xor_si128(c2[6288],simde_mm_xor_si128(c2[3125],simde_mm_xor_si128(c2[2725],simde_mm_xor_si128(c2[1132],simde_mm_xor_si128(c2[4728],simde_mm_xor_si128(c2[3165],simde_mm_xor_si128(c2[2765],simde_mm_xor_si128(c2[5968],simde_mm_xor_si128(c2[5568],c2[1968]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 9
+     d2[180]=simde_mm_xor_si128(c2[803],simde_mm_xor_si128(c2[2005],simde_mm_xor_si128(c2[1605],simde_mm_xor_si128(c2[4411],simde_mm_xor_si128(c2[5213],simde_mm_xor_si128(c2[2404],simde_mm_xor_si128(c2[3206],simde_mm_xor_si128(c2[843],simde_mm_xor_si128(c2[2045],simde_mm_xor_si128(c2[1645],simde_mm_xor_si128(c2[49],simde_mm_xor_si128(c2[851],simde_mm_xor_si128(c2[3245],simde_mm_xor_si128(c2[4047],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[883],simde_mm_xor_si128(c2[2085],simde_mm_xor_si128(c2[1685],simde_mm_xor_si128(c2[484],simde_mm_xor_si128(c2[1286],simde_mm_xor_si128(c2[923],simde_mm_xor_si128(c2[2125],simde_mm_xor_si128(c2[1725],simde_mm_xor_si128(c2[5730],simde_mm_xor_si128(c2[133],simde_mm_xor_si128(c2[2129],simde_mm_xor_si128(c2[2931],simde_mm_xor_si128(c2[963],simde_mm_xor_si128(c2[1765],simde_mm_xor_si128(c2[4563],simde_mm_xor_si128(c2[5365],simde_mm_xor_si128(c2[3369],simde_mm_xor_si128(c2[4171],simde_mm_xor_si128(c2[1003],simde_mm_xor_si128(c2[1805],simde_mm_xor_si128(c2[1812],simde_mm_xor_si128(c2[2614],simde_mm_xor_si128(c2[1043],simde_mm_xor_si128(c2[2245],simde_mm_xor_si128(c2[1845],simde_mm_xor_si128(c2[4248],simde_mm_xor_si128(c2[5050],simde_mm_xor_si128(c2[5843],simde_mm_xor_si128(c2[246],simde_mm_xor_si128(c2[1083],simde_mm_xor_si128(c2[1885],simde_mm_xor_si128(c2[4686],simde_mm_xor_si128(c2[5488],simde_mm_xor_si128(c2[1123],simde_mm_xor_si128(c2[1925],simde_mm_xor_si128(c2[5929],simde_mm_xor_si128(c2[332],simde_mm_xor_si128(c2[3126],simde_mm_xor_si128(c2[3928],simde_mm_xor_si128(c2[726],simde_mm_xor_si128(c2[1163],simde_mm_xor_si128(c2[2365],simde_mm_xor_si128(c2[1965],simde_mm_xor_si128(c2[3966],simde_mm_xor_si128(c2[4768],simde_mm_xor_si128(c2[366],c2[1168])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 10
+     d2[200]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[441],simde_mm_xor_si128(c2[5049],c2[1881])));
+
+//row: 11
+     d2[220]=simde_mm_xor_si128(c2[400],simde_mm_xor_si128(c2[4008],simde_mm_xor_si128(c2[2001],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[440],simde_mm_xor_si128(c2[6045],simde_mm_xor_si128(c2[3242],simde_mm_xor_si128(c2[2842],simde_mm_xor_si128(c2[480],simde_mm_xor_si128(c2[481],simde_mm_xor_si128(c2[81],simde_mm_xor_si128(c2[520],simde_mm_xor_si128(c2[5327],simde_mm_xor_si128(c2[1726],simde_mm_xor_si128(c2[560],simde_mm_xor_si128(c2[4160],simde_mm_xor_si128(c2[3366],simde_mm_xor_si128(c2[2966],simde_mm_xor_si128(c2[600],simde_mm_xor_si128(c2[1809],simde_mm_xor_si128(c2[1409],simde_mm_xor_si128(c2[640],simde_mm_xor_si128(c2[3845],simde_mm_xor_si128(c2[5840],simde_mm_xor_si128(c2[5440],simde_mm_xor_si128(c2[680],simde_mm_xor_si128(c2[4683],simde_mm_xor_si128(c2[4283],simde_mm_xor_si128(c2[3080],simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[5526],simde_mm_xor_si128(c2[3123],simde_mm_xor_si128(c2[2723],simde_mm_xor_si128(c2[760],simde_mm_xor_si128(c2[3563],simde_mm_xor_si128(c2[363],simde_mm_xor_si128(c2[6362],c2[1960])))))))))))))))))))))))))))))))))))));
+
+//row: 12
+     d2[240]=simde_mm_xor_si128(c2[3],simde_mm_xor_si128(c2[6002],simde_mm_xor_si128(c2[3211],simde_mm_xor_si128(c2[1204],simde_mm_xor_si128(c2[43],simde_mm_xor_si128(c2[6042],simde_mm_xor_si128(c2[5248],simde_mm_xor_si128(c2[2045],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[83],simde_mm_xor_si128(c2[6082],simde_mm_xor_si128(c2[5683],simde_mm_xor_si128(c2[123],simde_mm_xor_si128(c2[6122],simde_mm_xor_si128(c2[4530],simde_mm_xor_si128(c2[929],simde_mm_xor_si128(c2[527],simde_mm_xor_si128(c2[6162],simde_mm_xor_si128(c2[3363],simde_mm_xor_si128(c2[2169],simde_mm_xor_si128(c2[6202],simde_mm_xor_si128(c2[612],simde_mm_xor_si128(c2[243],simde_mm_xor_si128(c2[6242],simde_mm_xor_si128(c2[3048],simde_mm_xor_si128(c2[4643],simde_mm_xor_si128(c2[6282],simde_mm_xor_si128(c2[3486],simde_mm_xor_si128(c2[6322],simde_mm_xor_si128(c2[4729],simde_mm_xor_si128(c2[1926],simde_mm_xor_si128(c2[363],simde_mm_xor_si128(c2[6362],simde_mm_xor_si128(c2[2766],c2[5565]))))))))))))))))))))))))))))))))));
+
+//row: 13
+     d2[260]=simde_mm_xor_si128(c2[3601],simde_mm_xor_si128(c2[810],simde_mm_xor_si128(c2[5202],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[3641],simde_mm_xor_si128(c2[2847],simde_mm_xor_si128(c2[44],simde_mm_xor_si128(c2[6043],simde_mm_xor_si128(c2[47],simde_mm_xor_si128(c2[3681],simde_mm_xor_si128(c2[3682],simde_mm_xor_si128(c2[3282],simde_mm_xor_si128(c2[3721],simde_mm_xor_si128(c2[2129],simde_mm_xor_si128(c2[4927],simde_mm_xor_si128(c2[3761],simde_mm_xor_si128(c2[962],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[6167],simde_mm_xor_si128(c2[3801],simde_mm_xor_si128(c2[5010],simde_mm_xor_si128(c2[4610],simde_mm_xor_si128(c2[3841],simde_mm_xor_si128(c2[647],simde_mm_xor_si128(c2[2642],simde_mm_xor_si128(c2[2242],simde_mm_xor_si128(c2[3881],simde_mm_xor_si128(c2[1485],simde_mm_xor_si128(c2[1085],simde_mm_xor_si128(c2[3921],simde_mm_xor_si128(c2[2328],simde_mm_xor_si128(c2[6324],simde_mm_xor_si128(c2[5924],simde_mm_xor_si128(c2[2726],simde_mm_xor_si128(c2[3961],simde_mm_xor_si128(c2[365],simde_mm_xor_si128(c2[3564],c2[3164])))))))))))))))))))))))))))))))))))));
+
+//row: 14
+     d2[280]=simde_mm_xor_si128(c2[3603],simde_mm_xor_si128(c2[3203],simde_mm_xor_si128(c2[4001],simde_mm_xor_si128(c2[412],simde_mm_xor_si128(c2[1210],simde_mm_xor_si128(c2[4804],simde_mm_xor_si128(c2[5602],simde_mm_xor_si128(c2[3643],simde_mm_xor_si128(c2[3243],simde_mm_xor_si128(c2[4041],simde_mm_xor_si128(c2[2449],simde_mm_xor_si128(c2[3247],simde_mm_xor_si128(c2[5645],simde_mm_xor_si128(c2[444],simde_mm_xor_si128(c2[44],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[3683],simde_mm_xor_si128(c2[3283],simde_mm_xor_si128(c2[4081],simde_mm_xor_si128(c2[2884],simde_mm_xor_si128(c2[4082],simde_mm_xor_si128(c2[3682],simde_mm_xor_si128(c2[3723],simde_mm_xor_si128(c2[3323],simde_mm_xor_si128(c2[4121],simde_mm_xor_si128(c2[1731],simde_mm_xor_si128(c2[2529],simde_mm_xor_si128(c2[4529],simde_mm_xor_si128(c2[5327],simde_mm_xor_si128(c2[3363],simde_mm_xor_si128(c2[4161],simde_mm_xor_si128(c2[564],simde_mm_xor_si128(c2[1362],simde_mm_xor_si128(c2[5769],simde_mm_xor_si128(c2[568],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[3403],simde_mm_xor_si128(c2[4201],simde_mm_xor_si128(c2[4212],simde_mm_xor_si128(c2[5410],simde_mm_xor_si128(c2[5010],simde_mm_xor_si128(c2[3843],simde_mm_xor_si128(c2[3443],simde_mm_xor_si128(c2[4241],simde_mm_xor_si128(c2[249],simde_mm_xor_si128(c2[1047],simde_mm_xor_si128(c2[1844],simde_mm_xor_si128(c2[3042],simde_mm_xor_si128(c2[2642],simde_mm_xor_si128(c2[4248],simde_mm_xor_si128(c2[3483],simde_mm_xor_si128(c2[4281],simde_mm_xor_si128(c2[687],simde_mm_xor_si128(c2[1885],simde_mm_xor_si128(c2[1485],simde_mm_xor_si128(c2[3523],simde_mm_xor_si128(c2[4321],simde_mm_xor_si128(c2[1930],simde_mm_xor_si128(c2[2728],simde_mm_xor_si128(c2[5526],simde_mm_xor_si128(c2[325],simde_mm_xor_si128(c2[6324],simde_mm_xor_si128(c2[3963],simde_mm_xor_si128(c2[3563],simde_mm_xor_si128(c2[4361],simde_mm_xor_si128(c2[6366],simde_mm_xor_si128(c2[765],simde_mm_xor_si128(c2[2766],simde_mm_xor_si128(c2[3964],c2[3564])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 15
+     d2[300]=simde_mm_xor_si128(c2[3204],simde_mm_xor_si128(c2[1206],simde_mm_xor_si128(c2[806],simde_mm_xor_si128(c2[413],simde_mm_xor_si128(c2[4414],simde_mm_xor_si128(c2[4805],simde_mm_xor_si128(c2[2407],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[3244],simde_mm_xor_si128(c2[1246],simde_mm_xor_si128(c2[846],simde_mm_xor_si128(c2[2450],simde_mm_xor_si128(c2[52],simde_mm_xor_si128(c2[5646],simde_mm_xor_si128(c2[3248],simde_mm_xor_si128(c2[3284],simde_mm_xor_si128(c2[1286],simde_mm_xor_si128(c2[886],simde_mm_xor_si128(c2[2885],simde_mm_xor_si128(c2[487],simde_mm_xor_si128(c2[3324],simde_mm_xor_si128(c2[1326],simde_mm_xor_si128(c2[926],simde_mm_xor_si128(c2[1732],simde_mm_xor_si128(c2[5733],simde_mm_xor_si128(c2[4530],simde_mm_xor_si128(c2[2132],simde_mm_xor_si128(c2[3364],simde_mm_xor_si128(c2[966],simde_mm_xor_si128(c2[565],simde_mm_xor_si128(c2[4566],simde_mm_xor_si128(c2[5770],simde_mm_xor_si128(c2[3372],simde_mm_xor_si128(c2[3404],simde_mm_xor_si128(c2[1006],simde_mm_xor_si128(c2[4213],simde_mm_xor_si128(c2[1815],simde_mm_xor_si128(c2[3444],simde_mm_xor_si128(c2[1446],simde_mm_xor_si128(c2[1046],simde_mm_xor_si128(c2[250],simde_mm_xor_si128(c2[4251],simde_mm_xor_si128(c2[1845],simde_mm_xor_si128(c2[5846],simde_mm_xor_si128(c2[3484],simde_mm_xor_si128(c2[1086],simde_mm_xor_si128(c2[688],simde_mm_xor_si128(c2[4689],simde_mm_xor_si128(c2[3524],simde_mm_xor_si128(c2[1126],simde_mm_xor_si128(c2[1931],simde_mm_xor_si128(c2[5932],simde_mm_xor_si128(c2[5527],simde_mm_xor_si128(c2[3129],simde_mm_xor_si128(c2[3564],simde_mm_xor_si128(c2[1566],simde_mm_xor_si128(c2[1166],simde_mm_xor_si128(c2[6367],simde_mm_xor_si128(c2[3969],simde_mm_xor_si128(c2[2767],c2[369]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 16
+     d2[320]=simde_mm_xor_si128(c2[6006],simde_mm_xor_si128(c2[5606],simde_mm_xor_si128(c2[6008],simde_mm_xor_si128(c2[5608],simde_mm_xor_si128(c2[2815],simde_mm_xor_si128(c2[3217],simde_mm_xor_si128(c2[2817],simde_mm_xor_si128(c2[808],simde_mm_xor_si128(c2[810],simde_mm_xor_si128(c2[6046],simde_mm_xor_si128(c2[5646],simde_mm_xor_si128(c2[6048],simde_mm_xor_si128(c2[5648],simde_mm_xor_si128(c2[4852],simde_mm_xor_si128(c2[4854],simde_mm_xor_si128(c2[1649],simde_mm_xor_si128(c2[1651],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[6086],simde_mm_xor_si128(c2[5686],simde_mm_xor_si128(c2[6088],simde_mm_xor_si128(c2[5688],simde_mm_xor_si128(c2[5287],simde_mm_xor_si128(c2[5289],simde_mm_xor_si128(c2[6126],simde_mm_xor_si128(c2[5726],simde_mm_xor_si128(c2[6128],simde_mm_xor_si128(c2[5728],simde_mm_xor_si128(c2[4134],simde_mm_xor_si128(c2[4536],simde_mm_xor_si128(c2[4136],simde_mm_xor_si128(c2[533],simde_mm_xor_si128(c2[535],simde_mm_xor_si128(c2[5766],simde_mm_xor_si128(c2[6168],simde_mm_xor_si128(c2[5768],simde_mm_xor_si128(c2[2967],simde_mm_xor_si128(c2[2969],simde_mm_xor_si128(c2[1773],simde_mm_xor_si128(c2[1775],simde_mm_xor_si128(c2[5806],simde_mm_xor_si128(c2[6208],simde_mm_xor_si128(c2[5808],simde_mm_xor_si128(c2[216],simde_mm_xor_si128(c2[218],simde_mm_xor_si128(c2[6246],simde_mm_xor_si128(c2[5846],simde_mm_xor_si128(c2[6248],simde_mm_xor_si128(c2[5848],simde_mm_xor_si128(c2[2652],simde_mm_xor_si128(c2[3054],simde_mm_xor_si128(c2[2654],simde_mm_xor_si128(c2[4247],simde_mm_xor_si128(c2[4249],simde_mm_xor_si128(c2[5886],simde_mm_xor_si128(c2[6288],simde_mm_xor_si128(c2[5888],simde_mm_xor_si128(c2[3090],simde_mm_xor_si128(c2[3092],simde_mm_xor_si128(c2[5926],simde_mm_xor_si128(c2[6328],simde_mm_xor_si128(c2[5928],simde_mm_xor_si128(c2[4333],simde_mm_xor_si128(c2[4335],simde_mm_xor_si128(c2[1530],simde_mm_xor_si128(c2[1532],simde_mm_xor_si128(c2[6366],simde_mm_xor_si128(c2[5966],simde_mm_xor_si128(c2[6368],simde_mm_xor_si128(c2[5968],simde_mm_xor_si128(c2[2370],simde_mm_xor_si128(c2[2772],simde_mm_xor_si128(c2[2372],simde_mm_xor_si128(c2[5169],simde_mm_xor_si128(c2[5171],c2[6364])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 17
+     d2[340]=simde_mm_xor_si128(c2[5206],simde_mm_xor_si128(c2[4806],simde_mm_xor_si128(c2[801],simde_mm_xor_si128(c2[401],simde_mm_xor_si128(c2[2015],simde_mm_xor_si128(c2[4409],simde_mm_xor_si128(c2[4009],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[2002],simde_mm_xor_si128(c2[5246],simde_mm_xor_si128(c2[4846],simde_mm_xor_si128(c2[841],simde_mm_xor_si128(c2[441],simde_mm_xor_si128(c2[4052],simde_mm_xor_si128(c2[6046],simde_mm_xor_si128(c2[849],simde_mm_xor_si128(c2[2843],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[5286],simde_mm_xor_si128(c2[4886],simde_mm_xor_si128(c2[881],simde_mm_xor_si128(c2[481],simde_mm_xor_si128(c2[4487],simde_mm_xor_si128(c2[82],simde_mm_xor_si128(c2[5326],simde_mm_xor_si128(c2[4926],simde_mm_xor_si128(c2[921],simde_mm_xor_si128(c2[521],simde_mm_xor_si128(c2[3334],simde_mm_xor_si128(c2[5728],simde_mm_xor_si128(c2[5328],simde_mm_xor_si128(c2[6132],simde_mm_xor_si128(c2[1727],simde_mm_xor_si128(c2[4966],simde_mm_xor_si128(c2[961],simde_mm_xor_si128(c2[561],simde_mm_xor_si128(c2[2167],simde_mm_xor_si128(c2[4161],simde_mm_xor_si128(c2[973],simde_mm_xor_si128(c2[2967],simde_mm_xor_si128(c2[5006],simde_mm_xor_si128(c2[1001],simde_mm_xor_si128(c2[601],simde_mm_xor_si128(c2[5815],simde_mm_xor_si128(c2[1410],simde_mm_xor_si128(c2[3401],simde_mm_xor_si128(c2[5446],simde_mm_xor_si128(c2[5046],simde_mm_xor_si128(c2[1041],simde_mm_xor_si128(c2[641],simde_mm_xor_si128(c2[1852],simde_mm_xor_si128(c2[4246],simde_mm_xor_si128(c2[3846],simde_mm_xor_si128(c2[3447],simde_mm_xor_si128(c2[5441],simde_mm_xor_si128(c2[5086],simde_mm_xor_si128(c2[1081],simde_mm_xor_si128(c2[681],simde_mm_xor_si128(c2[2290],simde_mm_xor_si128(c2[4284],simde_mm_xor_si128(c2[5126],simde_mm_xor_si128(c2[1121],simde_mm_xor_si128(c2[721],simde_mm_xor_si128(c2[3533],simde_mm_xor_si128(c2[5527],simde_mm_xor_si128(c2[730],simde_mm_xor_si128(c2[2724],simde_mm_xor_si128(c2[5566],simde_mm_xor_si128(c2[5166],simde_mm_xor_si128(c2[1161],simde_mm_xor_si128(c2[761],simde_mm_xor_si128(c2[1570],simde_mm_xor_si128(c2[3964],simde_mm_xor_si128(c2[3564],simde_mm_xor_si128(c2[4369],c2[6363])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 18
+     d2[360]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[1041],c2[2685]));
+
+//row: 19
+     d2[380]=simde_mm_xor_si128(c2[3609],simde_mm_xor_si128(c2[818],simde_mm_xor_si128(c2[5210],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[3649],simde_mm_xor_si128(c2[2855],simde_mm_xor_si128(c2[6051],simde_mm_xor_si128(c2[5649],simde_mm_xor_si128(c2[3689],simde_mm_xor_si128(c2[3290],simde_mm_xor_si128(c2[3729],simde_mm_xor_si128(c2[2137],simde_mm_xor_si128(c2[4935],simde_mm_xor_si128(c2[3769],simde_mm_xor_si128(c2[970],simde_mm_xor_si128(c2[6175],simde_mm_xor_si128(c2[3809],simde_mm_xor_si128(c2[4618],simde_mm_xor_si128(c2[3849],simde_mm_xor_si128(c2[655],simde_mm_xor_si128(c2[2250],simde_mm_xor_si128(c2[3889],simde_mm_xor_si128(c2[1093],simde_mm_xor_si128(c2[3929],simde_mm_xor_si128(c2[2336],simde_mm_xor_si128(c2[5932],simde_mm_xor_si128(c2[3969],simde_mm_xor_si128(c2[373],c2[3172]))))))))))))))))))))))))))));
+
+//row: 20
+     d2[400]=simde_mm_xor_si128(c2[3206],simde_mm_xor_si128(c2[2806],simde_mm_xor_si128(c2[15],simde_mm_xor_si128(c2[4407],simde_mm_xor_si128(c2[3246],simde_mm_xor_si128(c2[2846],simde_mm_xor_si128(c2[2052],simde_mm_xor_si128(c2[5248],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[3286],simde_mm_xor_si128(c2[2886],simde_mm_xor_si128(c2[2487],simde_mm_xor_si128(c2[3326],simde_mm_xor_si128(c2[2926],simde_mm_xor_si128(c2[1334],simde_mm_xor_si128(c2[4132],simde_mm_xor_si128(c2[2966],simde_mm_xor_si128(c2[167],simde_mm_xor_si128(c2[5372],simde_mm_xor_si128(c2[1769],simde_mm_xor_si128(c2[3006],simde_mm_xor_si128(c2[3815],simde_mm_xor_si128(c2[3446],simde_mm_xor_si128(c2[3046],simde_mm_xor_si128(c2[6251],simde_mm_xor_si128(c2[1447],simde_mm_xor_si128(c2[3086],simde_mm_xor_si128(c2[290],simde_mm_xor_si128(c2[3126],simde_mm_xor_si128(c2[1533],simde_mm_xor_si128(c2[5129],simde_mm_xor_si128(c2[3566],simde_mm_xor_si128(c2[3166],simde_mm_xor_si128(c2[5969],c2[2369]))))))))))))))))))))))))))))))))));
+
+//row: 21
+     d2[420]=simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[3610],simde_mm_xor_si128(c2[1603],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[5647],simde_mm_xor_si128(c2[2844],simde_mm_xor_si128(c2[2444],simde_mm_xor_si128(c2[82],simde_mm_xor_si128(c2[83],simde_mm_xor_si128(c2[6082],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[4929],simde_mm_xor_si128(c2[1328],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[3762],simde_mm_xor_si128(c2[2968],simde_mm_xor_si128(c2[2568],simde_mm_xor_si128(c2[202],simde_mm_xor_si128(c2[1411],simde_mm_xor_si128(c2[1011],simde_mm_xor_si128(c2[242],simde_mm_xor_si128(c2[3447],simde_mm_xor_si128(c2[5442],simde_mm_xor_si128(c2[5042],simde_mm_xor_si128(c2[282],simde_mm_xor_si128(c2[4285],simde_mm_xor_si128(c2[3885],simde_mm_xor_si128(c2[322],simde_mm_xor_si128(c2[5128],simde_mm_xor_si128(c2[2725],simde_mm_xor_si128(c2[2325],simde_mm_xor_si128(c2[721],simde_mm_xor_si128(c2[362],simde_mm_xor_si128(c2[3165],simde_mm_xor_si128(c2[6364],c2[5964]))))))))))))))))))))))))))))))))))));
+
+//row: 22
+     d2[440]=simde_mm_xor_si128(c2[40],c2[1680]);
+
+//row: 23
+     d2[460]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[4524],c2[5809]));
+
+//row: 24
+     d2[480]=simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[2084],c2[3165]));
+
+//row: 25
+     d2[500]=simde_mm_xor_si128(c2[0],c2[604]);
+
+//row: 26
+     d2[520]=simde_mm_xor_si128(c2[5200],simde_mm_xor_si128(c2[4800],simde_mm_xor_si128(c2[2400],simde_mm_xor_si128(c2[2409],simde_mm_xor_si128(c2[2009],simde_mm_xor_si128(c2[6008],simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[4001],simde_mm_xor_si128(c2[5240],simde_mm_xor_si128(c2[4840],simde_mm_xor_si128(c2[2440],simde_mm_xor_si128(c2[4046],simde_mm_xor_si128(c2[1646],simde_mm_xor_si128(c2[843],simde_mm_xor_si128(c2[5242],simde_mm_xor_si128(c2[4842],simde_mm_xor_si128(c2[5280],simde_mm_xor_si128(c2[4880],simde_mm_xor_si128(c2[2480],simde_mm_xor_si128(c2[4481],simde_mm_xor_si128(c2[2481],simde_mm_xor_si128(c2[2081],simde_mm_xor_si128(c2[80],simde_mm_xor_si128(c2[5320],simde_mm_xor_si128(c2[4920],simde_mm_xor_si128(c2[2520],simde_mm_xor_si128(c2[3728],simde_mm_xor_si128(c2[3328],simde_mm_xor_si128(c2[928],simde_mm_xor_si128(c2[6126],simde_mm_xor_si128(c2[3726],simde_mm_xor_si128(c2[5360],simde_mm_xor_si128(c2[4960],simde_mm_xor_si128(c2[2560],simde_mm_xor_si128(c2[2161],simde_mm_xor_si128(c2[6160],simde_mm_xor_si128(c2[967],simde_mm_xor_si128(c2[5366],simde_mm_xor_si128(c2[4966],simde_mm_xor_si128(c2[5400],simde_mm_xor_si128(c2[5000],simde_mm_xor_si128(c2[2600],simde_mm_xor_si128(c2[5809],simde_mm_xor_si128(c2[3809],simde_mm_xor_si128(c2[3409],simde_mm_xor_si128(c2[5440],simde_mm_xor_si128(c2[5040],simde_mm_xor_si128(c2[2640],simde_mm_xor_si128(c2[2246],simde_mm_xor_si128(c2[1846],simde_mm_xor_si128(c2[5845],simde_mm_xor_si128(c2[3441],simde_mm_xor_si128(c2[1441],simde_mm_xor_si128(c2[1041],simde_mm_xor_si128(c2[5480],simde_mm_xor_si128(c2[5080],simde_mm_xor_si128(c2[2680],simde_mm_xor_si128(c2[2284],simde_mm_xor_si128(c2[284],simde_mm_xor_si128(c2[6283],simde_mm_xor_si128(c2[1886],simde_mm_xor_si128(c2[5520],simde_mm_xor_si128(c2[5120],simde_mm_xor_si128(c2[2720],simde_mm_xor_si128(c2[3527],simde_mm_xor_si128(c2[1127],simde_mm_xor_si128(c2[724],simde_mm_xor_si128(c2[5123],simde_mm_xor_si128(c2[4723],simde_mm_xor_si128(c2[5560],simde_mm_xor_si128(c2[5160],simde_mm_xor_si128(c2[2760],simde_mm_xor_si128(c2[1964],simde_mm_xor_si128(c2[1564],simde_mm_xor_si128(c2[5563],simde_mm_xor_si128(c2[4363],simde_mm_xor_si128(c2[2363],c2[1963])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 27
+     d2[540]=simde_mm_xor_si128(c2[0],c2[242]);
+
+//row: 28
+     d2[560]=simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[5687],c2[5806]));
+
+//row: 29
+     d2[580]=simde_mm_xor_si128(c2[0],c2[4169]);
+
+//row: 30
+     d2[600]=simde_mm_xor_si128(c2[80],simde_mm_xor_si128(c2[1402],simde_mm_xor_si128(c2[1483],c2[2768])));
+
+//row: 31
+     d2[620]=simde_mm_xor_si128(c2[1201],simde_mm_xor_si128(c2[4809],simde_mm_xor_si128(c2[2802],simde_mm_xor_si128(c2[1241],simde_mm_xor_si128(c2[447],simde_mm_xor_si128(c2[4043],simde_mm_xor_si128(c2[3643],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[1281],simde_mm_xor_si128(c2[1282],simde_mm_xor_si128(c2[882],simde_mm_xor_si128(c2[1321],simde_mm_xor_si128(c2[6128],simde_mm_xor_si128(c2[2527],simde_mm_xor_si128(c2[1361],simde_mm_xor_si128(c2[4961],simde_mm_xor_si128(c2[4167],simde_mm_xor_si128(c2[3767],simde_mm_xor_si128(c2[1401],simde_mm_xor_si128(c2[2610],simde_mm_xor_si128(c2[2210],simde_mm_xor_si128(c2[1441],simde_mm_xor_si128(c2[4646],simde_mm_xor_si128(c2[242],simde_mm_xor_si128(c2[6241],simde_mm_xor_si128(c2[1481],simde_mm_xor_si128(c2[5484],simde_mm_xor_si128(c2[5084],simde_mm_xor_si128(c2[1521],simde_mm_xor_si128(c2[6327],simde_mm_xor_si128(c2[3924],simde_mm_xor_si128(c2[3524],simde_mm_xor_si128(c2[1561],simde_mm_xor_si128(c2[4364],simde_mm_xor_si128(c2[1164],c2[764])))))))))))))))))))))))))))))))))));
+
+//row: 32
+     d2[640]=simde_mm_xor_si128(c2[4007],simde_mm_xor_si128(c2[3607],simde_mm_xor_si128(c2[1216],simde_mm_xor_si128(c2[816],simde_mm_xor_si128(c2[5208],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[4047],simde_mm_xor_si128(c2[3647],simde_mm_xor_si128(c2[2853],simde_mm_xor_si128(c2[6049],simde_mm_xor_si128(c2[4087],simde_mm_xor_si128(c2[3687],simde_mm_xor_si128(c2[3288],simde_mm_xor_si128(c2[4127],simde_mm_xor_si128(c2[3727],simde_mm_xor_si128(c2[2535],simde_mm_xor_si128(c2[2135],simde_mm_xor_si128(c2[4933],simde_mm_xor_si128(c2[4167],simde_mm_xor_si128(c2[3767],simde_mm_xor_si128(c2[968],simde_mm_xor_si128(c2[6173],simde_mm_xor_si128(c2[4207],simde_mm_xor_si128(c2[3807],simde_mm_xor_si128(c2[4616],simde_mm_xor_si128(c2[1801],simde_mm_xor_si128(c2[4247],simde_mm_xor_si128(c2[3847],simde_mm_xor_si128(c2[1053],simde_mm_xor_si128(c2[653],simde_mm_xor_si128(c2[2248],simde_mm_xor_si128(c2[4287],simde_mm_xor_si128(c2[3887],simde_mm_xor_si128(c2[1091],simde_mm_xor_si128(c2[4327],simde_mm_xor_si128(c2[3927],simde_mm_xor_si128(c2[2334],simde_mm_xor_si128(c2[5930],simde_mm_xor_si128(c2[4367],simde_mm_xor_si128(c2[3967],simde_mm_xor_si128(c2[771],simde_mm_xor_si128(c2[371],c2[3170]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 33
+     d2[660]=simde_mm_xor_si128(c2[4800],simde_mm_xor_si128(c2[2009],simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[4840],simde_mm_xor_si128(c2[4046],simde_mm_xor_si128(c2[843],simde_mm_xor_si128(c2[4880],simde_mm_xor_si128(c2[4481],simde_mm_xor_si128(c2[80],simde_mm_xor_si128(c2[4920],simde_mm_xor_si128(c2[3328],simde_mm_xor_si128(c2[6126],simde_mm_xor_si128(c2[4960],simde_mm_xor_si128(c2[2161],simde_mm_xor_si128(c2[967],simde_mm_xor_si128(c2[5000],simde_mm_xor_si128(c2[5809],simde_mm_xor_si128(c2[5040],simde_mm_xor_si128(c2[1846],simde_mm_xor_si128(c2[3441],simde_mm_xor_si128(c2[5080],simde_mm_xor_si128(c2[2284],simde_mm_xor_si128(c2[3485],simde_mm_xor_si128(c2[5120],simde_mm_xor_si128(c2[3527],simde_mm_xor_si128(c2[724],simde_mm_xor_si128(c2[5160],simde_mm_xor_si128(c2[1564],c2[4363]))))))))))))))))))))))))))));
+
+//row: 34
+     d2[680]=simde_mm_xor_si128(c2[1201],simde_mm_xor_si128(c2[801],simde_mm_xor_si128(c2[5204],simde_mm_xor_si128(c2[4809],simde_mm_xor_si128(c2[4409],simde_mm_xor_si128(c2[2413],simde_mm_xor_si128(c2[2402],simde_mm_xor_si128(c2[406],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[1241],simde_mm_xor_si128(c2[841],simde_mm_xor_si128(c2[5244],simde_mm_xor_si128(c2[47],simde_mm_xor_si128(c2[4450],simde_mm_xor_si128(c2[3243],simde_mm_xor_si128(c2[1647],simde_mm_xor_si128(c2[1247],simde_mm_xor_si128(c2[1281],simde_mm_xor_si128(c2[881],simde_mm_xor_si128(c2[5284],simde_mm_xor_si128(c2[482],simde_mm_xor_si128(c2[5285],simde_mm_xor_si128(c2[4885],simde_mm_xor_si128(c2[1321],simde_mm_xor_si128(c2[921],simde_mm_xor_si128(c2[5324],simde_mm_xor_si128(c2[6128],simde_mm_xor_si128(c2[5728],simde_mm_xor_si128(c2[3732],simde_mm_xor_si128(c2[2127],simde_mm_xor_si128(c2[131],simde_mm_xor_si128(c2[1361],simde_mm_xor_si128(c2[961],simde_mm_xor_si128(c2[5364],simde_mm_xor_si128(c2[4561],simde_mm_xor_si128(c2[2565],simde_mm_xor_si128(c2[3367],simde_mm_xor_si128(c2[1771],simde_mm_xor_si128(c2[1371],simde_mm_xor_si128(c2[1401],simde_mm_xor_si128(c2[1001],simde_mm_xor_si128(c2[5404],simde_mm_xor_si128(c2[1810],simde_mm_xor_si128(c2[214],simde_mm_xor_si128(c2[6213],simde_mm_xor_si128(c2[1441],simde_mm_xor_si128(c2[1041],simde_mm_xor_si128(c2[5444],simde_mm_xor_si128(c2[4646],simde_mm_xor_si128(c2[4246],simde_mm_xor_si128(c2[2250],simde_mm_xor_si128(c2[5841],simde_mm_xor_si128(c2[4245],simde_mm_xor_si128(c2[3845],simde_mm_xor_si128(c2[1481],simde_mm_xor_si128(c2[1081],simde_mm_xor_si128(c2[5484],simde_mm_xor_si128(c2[4684],simde_mm_xor_si128(c2[3088],simde_mm_xor_si128(c2[2688],simde_mm_xor_si128(c2[1521],simde_mm_xor_si128(c2[1121],simde_mm_xor_si128(c2[5524],simde_mm_xor_si128(c2[5927],simde_mm_xor_si128(c2[3931],simde_mm_xor_si128(c2[3124],simde_mm_xor_si128(c2[1528],simde_mm_xor_si128(c2[1128],simde_mm_xor_si128(c2[1561],simde_mm_xor_si128(c2[1161],simde_mm_xor_si128(c2[5564],simde_mm_xor_si128(c2[4364],simde_mm_xor_si128(c2[3964],simde_mm_xor_si128(c2[1968],simde_mm_xor_si128(c2[364],simde_mm_xor_si128(c2[5167],c2[4767]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 35
+     d2[700]=simde_mm_xor_si128(c2[2400],simde_mm_xor_si128(c2[2000],simde_mm_xor_si128(c2[5608],simde_mm_xor_si128(c2[3601],simde_mm_xor_si128(c2[2440],simde_mm_xor_si128(c2[2040],simde_mm_xor_si128(c2[1246],simde_mm_xor_si128(c2[4442],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[2480],simde_mm_xor_si128(c2[2080],simde_mm_xor_si128(c2[1681],simde_mm_xor_si128(c2[2520],simde_mm_xor_si128(c2[2120],simde_mm_xor_si128(c2[528],simde_mm_xor_si128(c2[3326],simde_mm_xor_si128(c2[2160],simde_mm_xor_si128(c2[5760],simde_mm_xor_si128(c2[4566],simde_mm_xor_si128(c2[2200],simde_mm_xor_si128(c2[3009],simde_mm_xor_si128(c2[5409],simde_mm_xor_si128(c2[2640],simde_mm_xor_si128(c2[2240],simde_mm_xor_si128(c2[5445],simde_mm_xor_si128(c2[641],simde_mm_xor_si128(c2[2280],simde_mm_xor_si128(c2[5883],simde_mm_xor_si128(c2[2320],simde_mm_xor_si128(c2[727],simde_mm_xor_si128(c2[4323],simde_mm_xor_si128(c2[2760],simde_mm_xor_si128(c2[2360],simde_mm_xor_si128(c2[5163],c2[1563]))))))))))))))))))))))))))))))))));
+
+//row: 36
+     d2[720]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[6083],c2[1085]));
+
+//row: 37
+     d2[740]=simde_mm_xor_si128(c2[6019],simde_mm_xor_si128(c2[6008],simde_mm_xor_si128(c2[3208],simde_mm_xor_si128(c2[3217],simde_mm_xor_si128(c2[1201],simde_mm_xor_si128(c2[1210],simde_mm_xor_si128(c2[6059],simde_mm_xor_si128(c2[6048],simde_mm_xor_si128(c2[5245],simde_mm_xor_si128(c2[5254],simde_mm_xor_si128(c2[2042],simde_mm_xor_si128(c2[2451],simde_mm_xor_si128(c2[2051],simde_mm_xor_si128(c2[6099],simde_mm_xor_si128(c2[6088],simde_mm_xor_si128(c2[5680],simde_mm_xor_si128(c2[6089],simde_mm_xor_si128(c2[5689],simde_mm_xor_si128(c2[6139],simde_mm_xor_si128(c2[6128],simde_mm_xor_si128(c2[4527],simde_mm_xor_si128(c2[4536],simde_mm_xor_si128(c2[926],simde_mm_xor_si128(c2[935],simde_mm_xor_si128(c2[6179],simde_mm_xor_si128(c2[6168],simde_mm_xor_si128(c2[3360],simde_mm_xor_si128(c2[3369],simde_mm_xor_si128(c2[2166],simde_mm_xor_si128(c2[2575],simde_mm_xor_si128(c2[2175],simde_mm_xor_si128(c2[6219],simde_mm_xor_si128(c2[6208],simde_mm_xor_si128(c2[609],simde_mm_xor_si128(c2[1018],simde_mm_xor_si128(c2[618],simde_mm_xor_si128(c2[6259],simde_mm_xor_si128(c2[6248],simde_mm_xor_si128(c2[3045],simde_mm_xor_si128(c2[3054],simde_mm_xor_si128(c2[4640],simde_mm_xor_si128(c2[5049],simde_mm_xor_si128(c2[4649],simde_mm_xor_si128(c2[6299],simde_mm_xor_si128(c2[6288],simde_mm_xor_si128(c2[3483],simde_mm_xor_si128(c2[3892],simde_mm_xor_si128(c2[3492],simde_mm_xor_si128(c2[6339],simde_mm_xor_si128(c2[6328],simde_mm_xor_si128(c2[4726],simde_mm_xor_si128(c2[4735],simde_mm_xor_si128(c2[1923],simde_mm_xor_si128(c2[2332],simde_mm_xor_si128(c2[1932],simde_mm_xor_si128(c2[6379],simde_mm_xor_si128(c2[6368],simde_mm_xor_si128(c2[2763],simde_mm_xor_si128(c2[2772],simde_mm_xor_si128(c2[5562],simde_mm_xor_si128(c2[5971],c2[5571])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 38
+     d2[760]=simde_mm_xor_si128(c2[1201],simde_mm_xor_si128(c2[801],simde_mm_xor_si128(c2[4409],simde_mm_xor_si128(c2[2402],simde_mm_xor_si128(c2[1241],simde_mm_xor_si128(c2[841],simde_mm_xor_si128(c2[47],simde_mm_xor_si128(c2[3243],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[1281],simde_mm_xor_si128(c2[881],simde_mm_xor_si128(c2[482],simde_mm_xor_si128(c2[1321],simde_mm_xor_si128(c2[921],simde_mm_xor_si128(c2[5728],simde_mm_xor_si128(c2[2127],simde_mm_xor_si128(c2[961],simde_mm_xor_si128(c2[4561],simde_mm_xor_si128(c2[3367],simde_mm_xor_si128(c2[1001],simde_mm_xor_si128(c2[1810],simde_mm_xor_si128(c2[5405],simde_mm_xor_si128(c2[1441],simde_mm_xor_si128(c2[1041],simde_mm_xor_si128(c2[4246],simde_mm_xor_si128(c2[5841],simde_mm_xor_si128(c2[1081],simde_mm_xor_si128(c2[4684],simde_mm_xor_si128(c2[1121],simde_mm_xor_si128(c2[5927],simde_mm_xor_si128(c2[3124],simde_mm_xor_si128(c2[1561],simde_mm_xor_si128(c2[1161],simde_mm_xor_si128(c2[3964],c2[364]))))))))))))))))))))))))))))))))));
+
+//row: 39
+     d2[780]=simde_mm_xor_si128(c2[4008],simde_mm_xor_si128(c2[3608],simde_mm_xor_si128(c2[1217],simde_mm_xor_si128(c2[817],simde_mm_xor_si128(c2[5209],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[4048],simde_mm_xor_si128(c2[3648],simde_mm_xor_si128(c2[2854],simde_mm_xor_si128(c2[6050],simde_mm_xor_si128(c2[4088],simde_mm_xor_si128(c2[3688],simde_mm_xor_si128(c2[3289],simde_mm_xor_si128(c2[4128],simde_mm_xor_si128(c2[3728],simde_mm_xor_si128(c2[2536],simde_mm_xor_si128(c2[2136],simde_mm_xor_si128(c2[4934],simde_mm_xor_si128(c2[4168],simde_mm_xor_si128(c2[3768],simde_mm_xor_si128(c2[969],simde_mm_xor_si128(c2[6174],simde_mm_xor_si128(c2[4208],simde_mm_xor_si128(c2[3808],simde_mm_xor_si128(c2[4617],simde_mm_xor_si128(c2[4248],simde_mm_xor_si128(c2[3848],simde_mm_xor_si128(c2[1054],simde_mm_xor_si128(c2[654],simde_mm_xor_si128(c2[2249],simde_mm_xor_si128(c2[4288],simde_mm_xor_si128(c2[3888],simde_mm_xor_si128(c2[1092],simde_mm_xor_si128(c2[3481],simde_mm_xor_si128(c2[4328],simde_mm_xor_si128(c2[3928],simde_mm_xor_si128(c2[2335],simde_mm_xor_si128(c2[5931],simde_mm_xor_si128(c2[4368],simde_mm_xor_si128(c2[3968],simde_mm_xor_si128(c2[772],simde_mm_xor_si128(c2[372],c2[3171]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 40
+     d2[800]=simde_mm_xor_si128(c2[1202],simde_mm_xor_si128(c2[5608],simde_mm_xor_si128(c2[4810],simde_mm_xor_si128(c2[2817],simde_mm_xor_si128(c2[2803],simde_mm_xor_si128(c2[810],simde_mm_xor_si128(c2[1242],simde_mm_xor_si128(c2[5648],simde_mm_xor_si128(c2[448],simde_mm_xor_si128(c2[4854],simde_mm_xor_si128(c2[3644],simde_mm_xor_si128(c2[2051],simde_mm_xor_si128(c2[1651],simde_mm_xor_si128(c2[1282],simde_mm_xor_si128(c2[5688],simde_mm_xor_si128(c2[883],simde_mm_xor_si128(c2[5689],simde_mm_xor_si128(c2[5289],simde_mm_xor_si128(c2[80],simde_mm_xor_si128(c2[1322],simde_mm_xor_si128(c2[5728],simde_mm_xor_si128(c2[6129],simde_mm_xor_si128(c2[4136],simde_mm_xor_si128(c2[2528],simde_mm_xor_si128(c2[535],simde_mm_xor_si128(c2[1362],simde_mm_xor_si128(c2[5768],simde_mm_xor_si128(c2[4962],simde_mm_xor_si128(c2[2969],simde_mm_xor_si128(c2[3768],simde_mm_xor_si128(c2[2175],simde_mm_xor_si128(c2[1775],simde_mm_xor_si128(c2[1402],simde_mm_xor_si128(c2[5808],simde_mm_xor_si128(c2[2211],simde_mm_xor_si128(c2[618],simde_mm_xor_si128(c2[218],simde_mm_xor_si128(c2[1442],simde_mm_xor_si128(c2[5848],simde_mm_xor_si128(c2[4647],simde_mm_xor_si128(c2[2654],simde_mm_xor_si128(c2[6242],simde_mm_xor_si128(c2[4649],simde_mm_xor_si128(c2[4249],simde_mm_xor_si128(c2[1482],simde_mm_xor_si128(c2[5888],simde_mm_xor_si128(c2[5085],simde_mm_xor_si128(c2[3492],simde_mm_xor_si128(c2[3092],simde_mm_xor_si128(c2[1522],simde_mm_xor_si128(c2[5928],simde_mm_xor_si128(c2[6328],simde_mm_xor_si128(c2[4335],simde_mm_xor_si128(c2[3525],simde_mm_xor_si128(c2[1932],simde_mm_xor_si128(c2[1532],simde_mm_xor_si128(c2[1562],simde_mm_xor_si128(c2[5968],simde_mm_xor_si128(c2[4365],simde_mm_xor_si128(c2[2372],simde_mm_xor_si128(c2[765],simde_mm_xor_si128(c2[5571],c2[5171]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 41
+     d2[820]=simde_mm_xor_si128(c2[2803],simde_mm_xor_si128(c2[2403],simde_mm_xor_si128(c2[6011],simde_mm_xor_si128(c2[4004],simde_mm_xor_si128(c2[2843],simde_mm_xor_si128(c2[2443],simde_mm_xor_si128(c2[1649],simde_mm_xor_si128(c2[4845],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[2883],simde_mm_xor_si128(c2[2483],simde_mm_xor_si128(c2[2084],simde_mm_xor_si128(c2[2923],simde_mm_xor_si128(c2[2523],simde_mm_xor_si128(c2[931],simde_mm_xor_si128(c2[3729],simde_mm_xor_si128(c2[2563],simde_mm_xor_si128(c2[6163],simde_mm_xor_si128(c2[4969],simde_mm_xor_si128(c2[2603],simde_mm_xor_si128(c2[3412],simde_mm_xor_si128(c2[1000],simde_mm_xor_si128(c2[3043],simde_mm_xor_si128(c2[2643],simde_mm_xor_si128(c2[5848],simde_mm_xor_si128(c2[1044],simde_mm_xor_si128(c2[2683],simde_mm_xor_si128(c2[6286],simde_mm_xor_si128(c2[2723],simde_mm_xor_si128(c2[1130],simde_mm_xor_si128(c2[4726],simde_mm_xor_si128(c2[3163],simde_mm_xor_si128(c2[2763],simde_mm_xor_si128(c2[5566],c2[1966]))))))))))))))))))))))))))))))))));
+  }
+}
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc32_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc32_byte.c
index 5b983af26cee14e88f614aa42a62152bdf92a810..e3de7e7140b4903e27e5ea1dceb341222a002015 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc32_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc32_byte.c
@@ -1,9 +1,10 @@
+#ifdef __AVX2__
 #include "PHY/sse_intrin.h"
 // generated code for Zc=32, byte encoding
 static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG2_Zc32_byte(uint8_t *c,uint8_t *d) {
-  __m256i *csimd=(__m256i *)c,*dsimd=(__m256i *)d;
+  simde__m256i *csimd=(simde__m256i *)c,*dsimd=(simde__m256i *)d;
 
-  __m256i *c2,*d2;
+  simde__m256i *c2,*d2;
 
   int i2;
   for (i2=0; i2<1; i2++) {
@@ -137,3 +138,4 @@ static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG
      d2[41]=simde_mm256_xor_si256(c2[372],simde_mm256_xor_si256(c2[360],simde_mm256_xor_si256(c2[336],simde_mm256_xor_si256(c2[72],simde_mm256_xor_si256(c2[134],simde_mm256_xor_si256(c2[122],simde_mm256_xor_si256(c2[86],simde_mm256_xor_si256(c2[350],simde_mm256_xor_si256(c2[14],simde_mm256_xor_si256(c2[28],simde_mm256_xor_si256(c2[16],simde_mm256_xor_si256(c2[184],simde_mm256_xor_si256(c2[198],simde_mm256_xor_si256(c2[186],simde_mm256_xor_si256(c2[330],simde_mm256_xor_si256(c2[18],simde_mm256_xor_si256(c2[224],simde_mm256_xor_si256(c2[116],simde_mm256_xor_si256(c2[236],simde_mm256_xor_si256(c2[226],simde_mm256_xor_si256(c2[358],simde_mm256_xor_si256(c2[70],simde_mm256_xor_si256(c2[240],simde_mm256_xor_si256(c2[228],simde_mm256_xor_si256(c2[288],simde_mm256_xor_si256(c2[84],simde_mm256_xor_si256(c2[218],simde_mm256_xor_si256(c2[146],simde_mm256_xor_si256(c2[268],simde_mm256_xor_si256(c2[76],simde_mm256_xor_si256(c2[100],simde_mm256_xor_si256(c2[54],simde_mm256_xor_si256(c2[42],simde_mm256_xor_si256(c2[222],c2[270]))))))))))))))))))))))))))))))))));
   }
 }
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc32_byte_128.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc32_byte_128.c
new file mode 100644
index 0000000000000000000000000000000000000000..6e9a92efe5a8d63b54bbced45795db5e38a42eed
--- /dev/null
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc32_byte_128.c
@@ -0,0 +1,141 @@
+#ifndef __AVX2__
+#include "PHY/sse_intrin.h"
+// generated code for Zc=32, byte encoding
+static inline void ldpc_BG2_Zc32_byte(uint8_t *c,uint8_t *d) {
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
+
+  simde__m128i *c2,*d2;
+
+  int i2;
+  for (i2=0; i2<2; i2++) {
+     c2=&csimd[i2];
+     d2=&dsimd[i2];
+
+//row: 0
+     d2[0]=simde_mm_xor_si128(c2[640],simde_mm_xor_si128(c2[480],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[325],simde_mm_xor_si128(c2[85],simde_mm_xor_si128(c2[564],simde_mm_xor_si128(c2[888],simde_mm_xor_si128(c2[729],simde_mm_xor_si128(c2[733],simde_mm_xor_si128(c2[412],simde_mm_xor_si128(c2[892],simde_mm_xor_si128(c2[977],simde_mm_xor_si128(c2[257],simde_mm_xor_si128(c2[1057],simde_mm_xor_si128(c2[981],simde_mm_xor_si128(c2[580],simde_mm_xor_si128(c2[985],simde_mm_xor_si128(c2[104],simde_mm_xor_si128(c2[25],simde_mm_xor_si128(c2[909],simde_mm_xor_si128(c2[429],simde_mm_xor_si128(c2[1233],simde_mm_xor_si128(c2[1232],simde_mm_xor_si128(c2[113],simde_mm_xor_si128(c2[996],simde_mm_xor_si128(c2[917],c2[1237]))))))))))))))))))))))))));
+
+//row: 1
+     d2[2]=simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[640],simde_mm_xor_si128(c2[480],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[405],simde_mm_xor_si128(c2[325],simde_mm_xor_si128(c2[85],simde_mm_xor_si128(c2[564],simde_mm_xor_si128(c2[968],simde_mm_xor_si128(c2[888],simde_mm_xor_si128(c2[729],simde_mm_xor_si128(c2[813],simde_mm_xor_si128(c2[733],simde_mm_xor_si128(c2[412],simde_mm_xor_si128(c2[892],simde_mm_xor_si128(c2[977],simde_mm_xor_si128(c2[257],simde_mm_xor_si128(c2[1057],simde_mm_xor_si128(c2[981],simde_mm_xor_si128(c2[580],simde_mm_xor_si128(c2[1065],simde_mm_xor_si128(c2[985],simde_mm_xor_si128(c2[104],simde_mm_xor_si128(c2[25],simde_mm_xor_si128(c2[909],simde_mm_xor_si128(c2[429],simde_mm_xor_si128(c2[1233],simde_mm_xor_si128(c2[1232],simde_mm_xor_si128(c2[113],simde_mm_xor_si128(c2[1076],simde_mm_xor_si128(c2[996],simde_mm_xor_si128(c2[917],c2[1237]))))))))))))))))))))))))))))))));
+
+//row: 2
+     d2[4]=simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[640],simde_mm_xor_si128(c2[560],simde_mm_xor_si128(c2[480],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[405],simde_mm_xor_si128(c2[325],simde_mm_xor_si128(c2[85],simde_mm_xor_si128(c2[564],simde_mm_xor_si128(c2[968],simde_mm_xor_si128(c2[888],simde_mm_xor_si128(c2[729],simde_mm_xor_si128(c2[813],simde_mm_xor_si128(c2[733],simde_mm_xor_si128(c2[492],simde_mm_xor_si128(c2[412],simde_mm_xor_si128(c2[892],simde_mm_xor_si128(c2[1057],simde_mm_xor_si128(c2[977],simde_mm_xor_si128(c2[257],simde_mm_xor_si128(c2[1057],simde_mm_xor_si128(c2[1061],simde_mm_xor_si128(c2[981],simde_mm_xor_si128(c2[580],simde_mm_xor_si128(c2[1065],simde_mm_xor_si128(c2[985],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[104],simde_mm_xor_si128(c2[25],simde_mm_xor_si128(c2[989],simde_mm_xor_si128(c2[909],simde_mm_xor_si128(c2[429],simde_mm_xor_si128(c2[32],simde_mm_xor_si128(c2[1233],simde_mm_xor_si128(c2[1232],simde_mm_xor_si128(c2[113],simde_mm_xor_si128(c2[1076],simde_mm_xor_si128(c2[996],simde_mm_xor_si128(c2[997],simde_mm_xor_si128(c2[917],c2[1237]))))))))))))))))))))))))))))))))))))))));
+
+//row: 3
+     d2[6]=simde_mm_xor_si128(c2[640],simde_mm_xor_si128(c2[480],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[325],simde_mm_xor_si128(c2[85],simde_mm_xor_si128(c2[644],simde_mm_xor_si128(c2[564],simde_mm_xor_si128(c2[888],simde_mm_xor_si128(c2[809],simde_mm_xor_si128(c2[729],simde_mm_xor_si128(c2[733],simde_mm_xor_si128(c2[412],simde_mm_xor_si128(c2[892],simde_mm_xor_si128(c2[977],simde_mm_xor_si128(c2[257],simde_mm_xor_si128(c2[1137],simde_mm_xor_si128(c2[1057],simde_mm_xor_si128(c2[981],simde_mm_xor_si128(c2[660],simde_mm_xor_si128(c2[580],simde_mm_xor_si128(c2[985],simde_mm_xor_si128(c2[104],simde_mm_xor_si128(c2[105],simde_mm_xor_si128(c2[25],simde_mm_xor_si128(c2[909],simde_mm_xor_si128(c2[509],simde_mm_xor_si128(c2[429],simde_mm_xor_si128(c2[1233],simde_mm_xor_si128(c2[1232],simde_mm_xor_si128(c2[193],simde_mm_xor_si128(c2[113],simde_mm_xor_si128(c2[996],simde_mm_xor_si128(c2[917],simde_mm_xor_si128(c2[36],c2[1237]))))))))))))))))))))))))))))))))));
+
+//row: 4
+     d2[8]=simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[1200],simde_mm_xor_si128(c2[1040],simde_mm_xor_si128(c2[561],simde_mm_xor_si128(c2[241],simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[885],simde_mm_xor_si128(c2[645],simde_mm_xor_si128(c2[1124],simde_mm_xor_si128(c2[485],simde_mm_xor_si128(c2[249],simde_mm_xor_si128(c2[169],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[92],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[972],simde_mm_xor_si128(c2[173],simde_mm_xor_si128(c2[256],simde_mm_xor_si128(c2[817],simde_mm_xor_si128(c2[336],simde_mm_xor_si128(c2[260],simde_mm_xor_si128(c2[1140],simde_mm_xor_si128(c2[344],simde_mm_xor_si128(c2[264],simde_mm_xor_si128(c2[664],simde_mm_xor_si128(c2[585],simde_mm_xor_si128(c2[188],simde_mm_xor_si128(c2[989],simde_mm_xor_si128(c2[512],simde_mm_xor_si128(c2[513],simde_mm_xor_si128(c2[673],simde_mm_xor_si128(c2[357],simde_mm_xor_si128(c2[277],simde_mm_xor_si128(c2[196],c2[516]))))))))))))))))))))))))))))))))));
+
+//row: 5
+     d2[10]=simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[1200],simde_mm_xor_si128(c2[1040],simde_mm_xor_si128(c2[561],simde_mm_xor_si128(c2[560],simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[885],simde_mm_xor_si128(c2[645],simde_mm_xor_si128(c2[1124],simde_mm_xor_si128(c2[724],simde_mm_xor_si128(c2[249],simde_mm_xor_si128(c2[169],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[92],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[972],simde_mm_xor_si128(c2[173],simde_mm_xor_si128(c2[256],simde_mm_xor_si128(c2[817],simde_mm_xor_si128(c2[336],simde_mm_xor_si128(c2[260],simde_mm_xor_si128(c2[1140],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[344],simde_mm_xor_si128(c2[264],simde_mm_xor_si128(c2[664],simde_mm_xor_si128(c2[585],simde_mm_xor_si128(c2[188],simde_mm_xor_si128(c2[989],simde_mm_xor_si128(c2[1229],simde_mm_xor_si128(c2[512],simde_mm_xor_si128(c2[513],simde_mm_xor_si128(c2[673],simde_mm_xor_si128(c2[357],simde_mm_xor_si128(c2[277],simde_mm_xor_si128(c2[196],c2[516]))))))))))))))))))))))))))))))))))));
+
+//row: 6
+     d2[12]=simde_mm_xor_si128(c2[560],simde_mm_xor_si128(c2[480],simde_mm_xor_si128(c2[320],simde_mm_xor_si128(c2[1120],simde_mm_xor_si128(c2[881],simde_mm_xor_si128(c2[245],simde_mm_xor_si128(c2[165],simde_mm_xor_si128(c2[1204],simde_mm_xor_si128(c2[404],simde_mm_xor_si128(c2[808],simde_mm_xor_si128(c2[728],simde_mm_xor_si128(c2[569],simde_mm_xor_si128(c2[653],simde_mm_xor_si128(c2[573],simde_mm_xor_si128(c2[252],simde_mm_xor_si128(c2[732],simde_mm_xor_si128(c2[817],simde_mm_xor_si128(c2[97],simde_mm_xor_si128(c2[897],simde_mm_xor_si128(c2[821],simde_mm_xor_si128(c2[420],simde_mm_xor_si128(c2[340],simde_mm_xor_si128(c2[905],simde_mm_xor_si128(c2[825],simde_mm_xor_si128(c2[1225],simde_mm_xor_si128(c2[1144],simde_mm_xor_si128(c2[749],simde_mm_xor_si128(c2[269],simde_mm_xor_si128(c2[1068],simde_mm_xor_si128(c2[1073],simde_mm_xor_si128(c2[1072],simde_mm_xor_si128(c2[1232],simde_mm_xor_si128(c2[916],simde_mm_xor_si128(c2[836],simde_mm_xor_si128(c2[757],simde_mm_xor_si128(c2[1077],c2[997]))))))))))))))))))))))))))))))))))));
+
+//row: 7
+     d2[14]=simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[880],simde_mm_xor_si128(c2[961],simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[801],simde_mm_xor_si128(c2[241],simde_mm_xor_si128(c2[320],simde_mm_xor_si128(c2[645],simde_mm_xor_si128(c2[565],simde_mm_xor_si128(c2[644],simde_mm_xor_si128(c2[325],simde_mm_xor_si128(c2[404],simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[804],simde_mm_xor_si128(c2[885],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[1208],simde_mm_xor_si128(c2[1128],simde_mm_xor_si128(c2[1209],simde_mm_xor_si128(c2[1128],simde_mm_xor_si128(c2[969],simde_mm_xor_si128(c2[1048],simde_mm_xor_si128(c2[1053],simde_mm_xor_si128(c2[973],simde_mm_xor_si128(c2[1052],simde_mm_xor_si128(c2[652],simde_mm_xor_si128(c2[733],simde_mm_xor_si128(c2[1132],simde_mm_xor_si128(c2[1213],simde_mm_xor_si128(c2[1217],simde_mm_xor_si128(c2[17],simde_mm_xor_si128(c2[497],simde_mm_xor_si128(c2[576],simde_mm_xor_si128(c2[177],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[97],simde_mm_xor_si128(c2[1221],simde_mm_xor_si128(c2[21],simde_mm_xor_si128(c2[981],simde_mm_xor_si128(c2[820],simde_mm_xor_si128(c2[901],simde_mm_xor_si128(c2[261],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[1225],simde_mm_xor_si128(c2[25],simde_mm_xor_si128(c2[344],simde_mm_xor_si128(c2[425],simde_mm_xor_si128(c2[424],simde_mm_xor_si128(c2[265],simde_mm_xor_si128(c2[344],simde_mm_xor_si128(c2[1149],simde_mm_xor_si128(c2[1228],simde_mm_xor_si128(c2[828],simde_mm_xor_si128(c2[669],simde_mm_xor_si128(c2[748],simde_mm_xor_si128(c2[988],simde_mm_xor_si128(c2[192],simde_mm_xor_si128(c2[273],simde_mm_xor_si128(c2[193],simde_mm_xor_si128(c2[272],simde_mm_xor_si128(c2[512],simde_mm_xor_si128(c2[353],simde_mm_xor_si128(c2[432],simde_mm_xor_si128(c2[37],simde_mm_xor_si128(c2[1236],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[1157],simde_mm_xor_si128(c2[1236],simde_mm_xor_si128(c2[357],simde_mm_xor_si128(c2[196],c2[277]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 8
+     d2[16]=simde_mm_xor_si128(c2[1200],simde_mm_xor_si128(c2[1120],simde_mm_xor_si128(c2[1040],simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[481],simde_mm_xor_si128(c2[1120],simde_mm_xor_si128(c2[885],simde_mm_xor_si128(c2[805],simde_mm_xor_si128(c2[565],simde_mm_xor_si128(c2[1044],simde_mm_xor_si128(c2[1125],simde_mm_xor_si128(c2[169],simde_mm_xor_si128(c2[89],simde_mm_xor_si128(c2[1209],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[1213],simde_mm_xor_si128(c2[972],simde_mm_xor_si128(c2[892],simde_mm_xor_si128(c2[93],simde_mm_xor_si128(c2[256],simde_mm_xor_si128(c2[176],simde_mm_xor_si128(c2[737],simde_mm_xor_si128(c2[256],simde_mm_xor_si128(c2[260],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[1060],simde_mm_xor_si128(c2[264],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[664],simde_mm_xor_si128(c2[584],simde_mm_xor_si128(c2[505],simde_mm_xor_si128(c2[188],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[909],simde_mm_xor_si128(c2[512],simde_mm_xor_si128(c2[432],simde_mm_xor_si128(c2[433],simde_mm_xor_si128(c2[593],simde_mm_xor_si128(c2[277],simde_mm_xor_si128(c2[197],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[116],c2[436]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 9
+     d2[18]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[400],simde_mm_xor_si128(c2[1201],simde_mm_xor_si128(c2[240],simde_mm_xor_si128(c2[1041],simde_mm_xor_si128(c2[1040],simde_mm_xor_si128(c2[560],simde_mm_xor_si128(c2[964],simde_mm_xor_si128(c2[85],simde_mm_xor_si128(c2[884],simde_mm_xor_si128(c2[1124],simde_mm_xor_si128(c2[644],simde_mm_xor_si128(c2[324],simde_mm_xor_si128(c2[1125],simde_mm_xor_si128(c2[884],simde_mm_xor_si128(c2[248],simde_mm_xor_si128(c2[648],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[489],simde_mm_xor_si128(c2[9],simde_mm_xor_si128(c2[93],simde_mm_xor_si128(c2[493],simde_mm_xor_si128(c2[13],simde_mm_xor_si128(c2[172],simde_mm_xor_si128(c2[973],simde_mm_xor_si128(c2[652],simde_mm_xor_si128(c2[172],simde_mm_xor_si128(c2[737],simde_mm_xor_si128(c2[257],simde_mm_xor_si128(c2[17],simde_mm_xor_si128(c2[816],simde_mm_xor_si128(c2[817],simde_mm_xor_si128(c2[337],simde_mm_xor_si128(c2[741],simde_mm_xor_si128(c2[261],simde_mm_xor_si128(c2[340],simde_mm_xor_si128(c2[1141],simde_mm_xor_si128(c2[345],simde_mm_xor_si128(c2[745],simde_mm_xor_si128(c2[265],simde_mm_xor_si128(c2[1145],simde_mm_xor_si128(c2[665],simde_mm_xor_si128(c2[1064],simde_mm_xor_si128(c2[584],simde_mm_xor_si128(c2[669],simde_mm_xor_si128(c2[189],simde_mm_xor_si128(c2[189],simde_mm_xor_si128(c2[988],simde_mm_xor_si128(c2[993],simde_mm_xor_si128(c2[513],simde_mm_xor_si128(c2[992],simde_mm_xor_si128(c2[512],simde_mm_xor_si128(c2[1152],simde_mm_xor_si128(c2[672],simde_mm_xor_si128(c2[1072],simde_mm_xor_si128(c2[356],simde_mm_xor_si128(c2[756],simde_mm_xor_si128(c2[276],simde_mm_xor_si128(c2[677],simde_mm_xor_si128(c2[197],simde_mm_xor_si128(c2[997],c2[517])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 10
+     d2[20]=simde_mm_xor_si128(c2[880],simde_mm_xor_si128(c2[725],simde_mm_xor_si128(c2[24],c2[429])));
+
+//row: 11
+     d2[22]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[1121],simde_mm_xor_si128(c2[640],simde_mm_xor_si128(c2[880],simde_mm_xor_si128(c2[964],simde_mm_xor_si128(c2[724],simde_mm_xor_si128(c2[4],simde_mm_xor_si128(c2[1205],simde_mm_xor_si128(c2[248],simde_mm_xor_si128(c2[169],simde_mm_xor_si128(c2[89],simde_mm_xor_si128(c2[93],simde_mm_xor_si128(c2[1053],simde_mm_xor_si128(c2[252],simde_mm_xor_si128(c2[337],simde_mm_xor_si128(c2[896],simde_mm_xor_si128(c2[497],simde_mm_xor_si128(c2[417],simde_mm_xor_si128(c2[341],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[1221],simde_mm_xor_si128(c2[345],simde_mm_xor_si128(c2[745],simde_mm_xor_si128(c2[744],simde_mm_xor_si128(c2[664],simde_mm_xor_si128(c2[269],simde_mm_xor_si128(c2[1148],simde_mm_xor_si128(c2[1068],simde_mm_xor_si128(c2[988],simde_mm_xor_si128(c2[593],simde_mm_xor_si128(c2[592],simde_mm_xor_si128(c2[832],simde_mm_xor_si128(c2[752],simde_mm_xor_si128(c2[356],simde_mm_xor_si128(c2[277],simde_mm_xor_si128(c2[677],simde_mm_xor_si128(c2[597],c2[197])))))))))))))))))))))))))))))))))))));
+
+//row: 12
+     d2[24]=simde_mm_xor_si128(c2[561],simde_mm_xor_si128(c2[481],simde_mm_xor_si128(c2[321],simde_mm_xor_si128(c2[1121],simde_mm_xor_si128(c2[244],simde_mm_xor_si128(c2[164],simde_mm_xor_si128(c2[1205],simde_mm_xor_si128(c2[405],simde_mm_xor_si128(c2[1205],simde_mm_xor_si128(c2[809],simde_mm_xor_si128(c2[729],simde_mm_xor_si128(c2[568],simde_mm_xor_si128(c2[652],simde_mm_xor_si128(c2[572],simde_mm_xor_si128(c2[253],simde_mm_xor_si128(c2[733],simde_mm_xor_si128(c2[1212],simde_mm_xor_si128(c2[816],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[896],simde_mm_xor_si128(c2[820],simde_mm_xor_si128(c2[421],simde_mm_xor_si128(c2[904],simde_mm_xor_si128(c2[824],simde_mm_xor_si128(c2[1224],simde_mm_xor_si128(c2[1145],simde_mm_xor_si128(c2[748],simde_mm_xor_si128(c2[268],simde_mm_xor_si128(c2[1072],simde_mm_xor_si128(c2[1073],simde_mm_xor_si128(c2[1233],simde_mm_xor_si128(c2[917],simde_mm_xor_si128(c2[837],simde_mm_xor_si128(c2[756],c2[1076]))))))))))))))))))))))))))))))))));
+
+//row: 13
+     d2[26]=simde_mm_xor_si128(c2[480],simde_mm_xor_si128(c2[320],simde_mm_xor_si128(c2[1120],simde_mm_xor_si128(c2[241],simde_mm_xor_si128(c2[165],simde_mm_xor_si128(c2[1204],simde_mm_xor_si128(c2[484],simde_mm_xor_si128(c2[404],simde_mm_xor_si128(c2[164],simde_mm_xor_si128(c2[728],simde_mm_xor_si128(c2[649],simde_mm_xor_si128(c2[569],simde_mm_xor_si128(c2[573],simde_mm_xor_si128(c2[252],simde_mm_xor_si128(c2[732],simde_mm_xor_si128(c2[817],simde_mm_xor_si128(c2[97],simde_mm_xor_si128(c2[977],simde_mm_xor_si128(c2[897],simde_mm_xor_si128(c2[821],simde_mm_xor_si128(c2[500],simde_mm_xor_si128(c2[420],simde_mm_xor_si128(c2[825],simde_mm_xor_si128(c2[1225],simde_mm_xor_si128(c2[1224],simde_mm_xor_si128(c2[1144],simde_mm_xor_si128(c2[749],simde_mm_xor_si128(c2[349],simde_mm_xor_si128(c2[269],simde_mm_xor_si128(c2[1073],simde_mm_xor_si128(c2[1072],simde_mm_xor_si128(c2[33],simde_mm_xor_si128(c2[1232],simde_mm_xor_si128(c2[512],simde_mm_xor_si128(c2[836],simde_mm_xor_si128(c2[757],simde_mm_xor_si128(c2[1157],c2[1077])))))))))))))))))))))))))))))))))))));
+
+//row: 14
+     d2[28]=simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[880],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[1120],simde_mm_xor_si128(c2[241],simde_mm_xor_si128(c2[641],simde_mm_xor_si128(c2[645],simde_mm_xor_si128(c2[565],simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[325],simde_mm_xor_si128(c2[725],simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[804],simde_mm_xor_si128(c2[1204],simde_mm_xor_si128(c2[245],simde_mm_xor_si128(c2[1208],simde_mm_xor_si128(c2[1128],simde_mm_xor_si128(c2[249],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[969],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[1053],simde_mm_xor_si128(c2[973],simde_mm_xor_si128(c2[92],simde_mm_xor_si128(c2[652],simde_mm_xor_si128(c2[1052],simde_mm_xor_si128(c2[1132],simde_mm_xor_si128(c2[253],simde_mm_xor_si128(c2[1217],simde_mm_xor_si128(c2[336],simde_mm_xor_si128(c2[497],simde_mm_xor_si128(c2[897],simde_mm_xor_si128(c2[496],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[416],simde_mm_xor_si128(c2[1221],simde_mm_xor_si128(c2[340],simde_mm_xor_si128(c2[21],simde_mm_xor_si128(c2[820],simde_mm_xor_si128(c2[1220],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[1225],simde_mm_xor_si128(c2[344],simde_mm_xor_si128(c2[344],simde_mm_xor_si128(c2[744],simde_mm_xor_si128(c2[745],simde_mm_xor_si128(c2[265],simde_mm_xor_si128(c2[665],simde_mm_xor_si128(c2[105],simde_mm_xor_si128(c2[1149],simde_mm_xor_si128(c2[268],simde_mm_xor_si128(c2[1149],simde_mm_xor_si128(c2[669],simde_mm_xor_si128(c2[1069],simde_mm_xor_si128(c2[192],simde_mm_xor_si128(c2[592],simde_mm_xor_si128(c2[193],simde_mm_xor_si128(c2[593],simde_mm_xor_si128(c2[833],simde_mm_xor_si128(c2[353],simde_mm_xor_si128(c2[753],simde_mm_xor_si128(c2[37],simde_mm_xor_si128(c2[1236],simde_mm_xor_si128(c2[357],simde_mm_xor_si128(c2[1157],simde_mm_xor_si128(c2[276],simde_mm_xor_si128(c2[676],simde_mm_xor_si128(c2[196],c2[596])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 15
+     d2[30]=simde_mm_xor_si128(c2[1121],simde_mm_xor_si128(c2[561],simde_mm_xor_si128(c2[1041],simde_mm_xor_si128(c2[401],simde_mm_xor_si128(c2[881],simde_mm_xor_si128(c2[1201],simde_mm_xor_si128(c2[400],simde_mm_xor_si128(c2[241],simde_mm_xor_si128(c2[804],simde_mm_xor_si128(c2[244],simde_mm_xor_si128(c2[724],simde_mm_xor_si128(c2[4],simde_mm_xor_si128(c2[484],simde_mm_xor_si128(c2[485],simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[809],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[648],simde_mm_xor_si128(c2[1128],simde_mm_xor_si128(c2[1212],simde_mm_xor_si128(c2[652],simde_mm_xor_si128(c2[1132],simde_mm_xor_si128(c2[333],simde_mm_xor_si128(c2[813],simde_mm_xor_si128(c2[813],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[896],simde_mm_xor_si128(c2[97],simde_mm_xor_si128(c2[176],simde_mm_xor_si128(c2[656],simde_mm_xor_si128(c2[976],simde_mm_xor_si128(c2[177],simde_mm_xor_si128(c2[900],simde_mm_xor_si128(c2[101],simde_mm_xor_si128(c2[501],simde_mm_xor_si128(c2[981],simde_mm_xor_si128(c2[185],simde_mm_xor_si128(c2[904],simde_mm_xor_si128(c2[105],simde_mm_xor_si128(c2[25],simde_mm_xor_si128(c2[505],simde_mm_xor_si128(c2[1225],simde_mm_xor_si128(c2[424],simde_mm_xor_si128(c2[828],simde_mm_xor_si128(c2[29],simde_mm_xor_si128(c2[348],simde_mm_xor_si128(c2[828],simde_mm_xor_si128(c2[1152],simde_mm_xor_si128(c2[353],simde_mm_xor_si128(c2[1153],simde_mm_xor_si128(c2[352],simde_mm_xor_si128(c2[32],simde_mm_xor_si128(c2[512],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[917],simde_mm_xor_si128(c2[116],simde_mm_xor_si128(c2[836],simde_mm_xor_si128(c2[37],simde_mm_xor_si128(c2[1156],c2[357]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 16
+     d2[32]=simde_mm_xor_si128(c2[81],simde_mm_xor_si128(c2[881],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[801],simde_mm_xor_si128(c2[721],simde_mm_xor_si128(c2[1120],simde_mm_xor_si128(c2[641],simde_mm_xor_si128(c2[641],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[1045],simde_mm_xor_si128(c2[564],simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[484],simde_mm_xor_si128(c2[725],simde_mm_xor_si128(c2[244],simde_mm_xor_si128(c2[1204],simde_mm_xor_si128(c2[725],simde_mm_xor_si128(c2[884],simde_mm_xor_si128(c2[329],simde_mm_xor_si128(c2[1129],simde_mm_xor_si128(c2[249],simde_mm_xor_si128(c2[1049],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[888],simde_mm_xor_si128(c2[172],simde_mm_xor_si128(c2[972],simde_mm_xor_si128(c2[92],simde_mm_xor_si128(c2[892],simde_mm_xor_si128(c2[653],simde_mm_xor_si128(c2[1052],simde_mm_xor_si128(c2[573],simde_mm_xor_si128(c2[253],simde_mm_xor_si128(c2[1053],simde_mm_xor_si128(c2[1216],simde_mm_xor_si128(c2[336],simde_mm_xor_si128(c2[1136],simde_mm_xor_si128(c2[897],simde_mm_xor_si128(c2[416],simde_mm_xor_si128(c2[416],simde_mm_xor_si128(c2[1216],simde_mm_xor_si128(c2[1220],simde_mm_xor_si128(c2[340],simde_mm_xor_si128(c2[1140],simde_mm_xor_si128(c2[1220],simde_mm_xor_si128(c2[741],simde_mm_xor_si128(c2[424],simde_mm_xor_si128(c2[1224],simde_mm_xor_si128(c2[344],simde_mm_xor_si128(c2[1144],simde_mm_xor_si128(c2[345],simde_mm_xor_si128(c2[744],simde_mm_xor_si128(c2[265],simde_mm_xor_si128(c2[665],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[1148],simde_mm_xor_si128(c2[268],simde_mm_xor_si128(c2[1068],simde_mm_xor_si128(c2[1069],simde_mm_xor_si128(c2[588],simde_mm_xor_si128(c2[193],simde_mm_xor_si128(c2[592],simde_mm_xor_si128(c2[113],simde_mm_xor_si128(c2[593],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[753],simde_mm_xor_si128(c2[272],simde_mm_xor_si128(c2[437],simde_mm_xor_si128(c2[1237],simde_mm_xor_si128(c2[357],simde_mm_xor_si128(c2[1157],simde_mm_xor_si128(c2[1156],simde_mm_xor_si128(c2[276],simde_mm_xor_si128(c2[1076],simde_mm_xor_si128(c2[596],simde_mm_xor_si128(c2[117],c2[1156])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 17
+     d2[34]=simde_mm_xor_si128(c2[881],simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[801],simde_mm_xor_si128(c2[640],simde_mm_xor_si128(c2[560],simde_mm_xor_si128(c2[641],simde_mm_xor_si128(c2[480],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[564],simde_mm_xor_si128(c2[405],simde_mm_xor_si128(c2[484],simde_mm_xor_si128(c2[325],simde_mm_xor_si128(c2[244],simde_mm_xor_si128(c2[85],simde_mm_xor_si128(c2[725],simde_mm_xor_si128(c2[564],simde_mm_xor_si128(c2[1125],simde_mm_xor_si128(c2[1129],simde_mm_xor_si128(c2[968],simde_mm_xor_si128(c2[1049],simde_mm_xor_si128(c2[888],simde_mm_xor_si128(c2[888],simde_mm_xor_si128(c2[729],simde_mm_xor_si128(c2[972],simde_mm_xor_si128(c2[813],simde_mm_xor_si128(c2[892],simde_mm_xor_si128(c2[733],simde_mm_xor_si128(c2[492],simde_mm_xor_si128(c2[573],simde_mm_xor_si128(c2[412],simde_mm_xor_si128(c2[1053],simde_mm_xor_si128(c2[892],simde_mm_xor_si128(c2[1057],simde_mm_xor_si128(c2[1136],simde_mm_xor_si128(c2[977],simde_mm_xor_si128(c2[416],simde_mm_xor_si128(c2[257],simde_mm_xor_si128(c2[1216],simde_mm_xor_si128(c2[1057],simde_mm_xor_si128(c2[1061],simde_mm_xor_si128(c2[1140],simde_mm_xor_si128(c2[981],simde_mm_xor_si128(c2[741],simde_mm_xor_si128(c2[580],simde_mm_xor_si128(c2[981],simde_mm_xor_si128(c2[1224],simde_mm_xor_si128(c2[1065],simde_mm_xor_si128(c2[1144],simde_mm_xor_si128(c2[985],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[265],simde_mm_xor_si128(c2[104],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[25],simde_mm_xor_si128(c2[989],simde_mm_xor_si128(c2[1068],simde_mm_xor_si128(c2[909],simde_mm_xor_si128(c2[588],simde_mm_xor_si128(c2[429],simde_mm_xor_si128(c2[32],simde_mm_xor_si128(c2[113],simde_mm_xor_si128(c2[1233],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[1232],simde_mm_xor_si128(c2[272],simde_mm_xor_si128(c2[113],simde_mm_xor_si128(c2[1237],simde_mm_xor_si128(c2[1076],simde_mm_xor_si128(c2[1157],simde_mm_xor_si128(c2[996],simde_mm_xor_si128(c2[997],simde_mm_xor_si128(c2[1076],simde_mm_xor_si128(c2[917],simde_mm_xor_si128(c2[117],c2[1237])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 18
+     d2[36]=simde_mm_xor_si128(c2[961],simde_mm_xor_si128(c2[184],c2[189]));
+
+//row: 19
+     d2[38]=simde_mm_xor_si128(c2[80],simde_mm_xor_si128(c2[1201],simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[561],simde_mm_xor_si128(c2[1044],simde_mm_xor_si128(c2[804],simde_mm_xor_si128(c2[4],simde_mm_xor_si128(c2[325],simde_mm_xor_si128(c2[328],simde_mm_xor_si128(c2[169],simde_mm_xor_si128(c2[173],simde_mm_xor_si128(c2[1133],simde_mm_xor_si128(c2[332],simde_mm_xor_si128(c2[417],simde_mm_xor_si128(c2[976],simde_mm_xor_si128(c2[497],simde_mm_xor_si128(c2[421],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[425],simde_mm_xor_si128(c2[825],simde_mm_xor_si128(c2[744],simde_mm_xor_si128(c2[349],simde_mm_xor_si128(c2[1148],simde_mm_xor_si128(c2[673],simde_mm_xor_si128(c2[672],simde_mm_xor_si128(c2[832],simde_mm_xor_si128(c2[436],simde_mm_xor_si128(c2[357],c2[677]))))))))))))))))))))))))))));
+
+//row: 20
+     d2[40]=simde_mm_xor_si128(c2[480],simde_mm_xor_si128(c2[400],simde_mm_xor_si128(c2[240],simde_mm_xor_si128(c2[1040],simde_mm_xor_si128(c2[165],simde_mm_xor_si128(c2[85],simde_mm_xor_si128(c2[1124],simde_mm_xor_si128(c2[324],simde_mm_xor_si128(c2[805],simde_mm_xor_si128(c2[728],simde_mm_xor_si128(c2[648],simde_mm_xor_si128(c2[489],simde_mm_xor_si128(c2[573],simde_mm_xor_si128(c2[493],simde_mm_xor_si128(c2[172],simde_mm_xor_si128(c2[652],simde_mm_xor_si128(c2[737],simde_mm_xor_si128(c2[17],simde_mm_xor_si128(c2[817],simde_mm_xor_si128(c2[736],simde_mm_xor_si128(c2[741],simde_mm_xor_si128(c2[340],simde_mm_xor_si128(c2[825],simde_mm_xor_si128(c2[745],simde_mm_xor_si128(c2[1145],simde_mm_xor_si128(c2[1064],simde_mm_xor_si128(c2[669],simde_mm_xor_si128(c2[189],simde_mm_xor_si128(c2[993],simde_mm_xor_si128(c2[992],simde_mm_xor_si128(c2[1152],simde_mm_xor_si128(c2[836],simde_mm_xor_si128(c2[756],simde_mm_xor_si128(c2[677],c2[997]))))))))))))))))))))))))))))))))));
+
+//row: 21
+     d2[42]=simde_mm_xor_si128(c2[801],simde_mm_xor_si128(c2[641],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[484],simde_mm_xor_si128(c2[244],simde_mm_xor_si128(c2[805],simde_mm_xor_si128(c2[725],simde_mm_xor_si128(c2[1049],simde_mm_xor_si128(c2[968],simde_mm_xor_si128(c2[888],simde_mm_xor_si128(c2[892],simde_mm_xor_si128(c2[573],simde_mm_xor_si128(c2[1053],simde_mm_xor_si128(c2[1136],simde_mm_xor_si128(c2[416],simde_mm_xor_si128(c2[17],simde_mm_xor_si128(c2[1216],simde_mm_xor_si128(c2[1140],simde_mm_xor_si128(c2[821],simde_mm_xor_si128(c2[741],simde_mm_xor_si128(c2[1144],simde_mm_xor_si128(c2[265],simde_mm_xor_si128(c2[264],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[1068],simde_mm_xor_si128(c2[668],simde_mm_xor_si128(c2[588],simde_mm_xor_si128(c2[113],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[352],simde_mm_xor_si128(c2[272],simde_mm_xor_si128(c2[832],simde_mm_xor_si128(c2[1157],simde_mm_xor_si128(c2[1076],simde_mm_xor_si128(c2[197],c2[117]))))))))))))))))))))))))))))))))))));
+
+//row: 22
+     d2[44]=simde_mm_xor_si128(c2[1125],c2[1209]);
+
+//row: 23
+     d2[46]=simde_mm_xor_si128(c2[561],simde_mm_xor_si128(c2[892],c2[1140]));
+
+//row: 24
+     d2[48]=simde_mm_xor_si128(c2[1124],simde_mm_xor_si128(c2[888],c2[676]));
+
+//row: 25
+     d2[50]=simde_mm_xor_si128(c2[320],c2[981]);
+
+//row: 26
+     d2[52]=simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[640],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[560],simde_mm_xor_si128(c2[480],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[800],simde_mm_xor_si128(c2[405],simde_mm_xor_si128(c2[325],simde_mm_xor_si128(c2[1124],simde_mm_xor_si128(c2[85],simde_mm_xor_si128(c2[884],simde_mm_xor_si128(c2[164],simde_mm_xor_si128(c2[564],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[968],simde_mm_xor_si128(c2[888],simde_mm_xor_si128(c2[408],simde_mm_xor_si128(c2[329],simde_mm_xor_si128(c2[729],simde_mm_xor_si128(c2[249],simde_mm_xor_si128(c2[1049],simde_mm_xor_si128(c2[813],simde_mm_xor_si128(c2[733],simde_mm_xor_si128(c2[253],simde_mm_xor_si128(c2[492],simde_mm_xor_si128(c2[412],simde_mm_xor_si128(c2[1213],simde_mm_xor_si128(c2[892],simde_mm_xor_si128(c2[412],simde_mm_xor_si128(c2[1057],simde_mm_xor_si128(c2[977],simde_mm_xor_si128(c2[497],simde_mm_xor_si128(c2[257],simde_mm_xor_si128(c2[1056],simde_mm_xor_si128(c2[657],simde_mm_xor_si128(c2[1057],simde_mm_xor_si128(c2[577],simde_mm_xor_si128(c2[1061],simde_mm_xor_si128(c2[981],simde_mm_xor_si128(c2[501],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[580],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[1065],simde_mm_xor_si128(c2[985],simde_mm_xor_si128(c2[505],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[104],simde_mm_xor_si128(c2[905],simde_mm_xor_si128(c2[904],simde_mm_xor_si128(c2[25],simde_mm_xor_si128(c2[824],simde_mm_xor_si128(c2[989],simde_mm_xor_si128(c2[909],simde_mm_xor_si128(c2[429],simde_mm_xor_si128(c2[29],simde_mm_xor_si128(c2[429],simde_mm_xor_si128(c2[1228],simde_mm_xor_si128(c2[1228],simde_mm_xor_si128(c2[32],simde_mm_xor_si128(c2[1233],simde_mm_xor_si128(c2[753],simde_mm_xor_si128(c2[1232],simde_mm_xor_si128(c2[752],simde_mm_xor_si128(c2[992],simde_mm_xor_si128(c2[113],simde_mm_xor_si128(c2[912],simde_mm_xor_si128(c2[1076],simde_mm_xor_si128(c2[996],simde_mm_xor_si128(c2[516],simde_mm_xor_si128(c2[997],simde_mm_xor_si128(c2[917],simde_mm_xor_si128(c2[437],simde_mm_xor_si128(c2[837],simde_mm_xor_si128(c2[1237],c2[757])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 27
+     d2[54]=simde_mm_xor_si128(c2[640],c2[585]);
+
+//row: 28
+     d2[56]=simde_mm_xor_si128(c2[164],simde_mm_xor_si128(c2[408],c2[580]));
+
+//row: 29
+     d2[58]=simde_mm_xor_si128(c2[161],c2[977]);
+
+//row: 30
+     d2[60]=simde_mm_xor_si128(c2[568],simde_mm_xor_si128(c2[21],simde_mm_xor_si128(c2[748],c2[357])));
+
+//row: 31
+     d2[62]=simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[560],simde_mm_xor_si128(c2[81],simde_mm_xor_si128(c2[405],simde_mm_xor_si128(c2[165],simde_mm_xor_si128(c2[724],simde_mm_xor_si128(c2[644],simde_mm_xor_si128(c2[804],simde_mm_xor_si128(c2[968],simde_mm_xor_si128(c2[889],simde_mm_xor_si128(c2[809],simde_mm_xor_si128(c2[813],simde_mm_xor_si128(c2[492],simde_mm_xor_si128(c2[972],simde_mm_xor_si128(c2[1057],simde_mm_xor_si128(c2[337],simde_mm_xor_si128(c2[1217],simde_mm_xor_si128(c2[1137],simde_mm_xor_si128(c2[1061],simde_mm_xor_si128(c2[740],simde_mm_xor_si128(c2[660],simde_mm_xor_si128(c2[1065],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[185],simde_mm_xor_si128(c2[105],simde_mm_xor_si128(c2[989],simde_mm_xor_si128(c2[589],simde_mm_xor_si128(c2[509],simde_mm_xor_si128(c2[32],simde_mm_xor_si128(c2[33],simde_mm_xor_si128(c2[273],simde_mm_xor_si128(c2[193],simde_mm_xor_si128(c2[1076],simde_mm_xor_si128(c2[997],simde_mm_xor_si128(c2[116],c2[36])))))))))))))))))))))))))))))))))));
+
+//row: 32
+     d2[64]=simde_mm_xor_si128(c2[1200],simde_mm_xor_si128(c2[1120],simde_mm_xor_si128(c2[1040],simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[481],simde_mm_xor_si128(c2[161],simde_mm_xor_si128(c2[885],simde_mm_xor_si128(c2[805],simde_mm_xor_si128(c2[565],simde_mm_xor_si128(c2[1044],simde_mm_xor_si128(c2[169],simde_mm_xor_si128(c2[89],simde_mm_xor_si128(c2[1209],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[1213],simde_mm_xor_si128(c2[972],simde_mm_xor_si128(c2[892],simde_mm_xor_si128(c2[93],simde_mm_xor_si128(c2[256],simde_mm_xor_si128(c2[176],simde_mm_xor_si128(c2[737],simde_mm_xor_si128(c2[256],simde_mm_xor_si128(c2[260],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[1060],simde_mm_xor_si128(c2[980],simde_mm_xor_si128(c2[264],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[664],simde_mm_xor_si128(c2[584],simde_mm_xor_si128(c2[505],simde_mm_xor_si128(c2[188],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[909],simde_mm_xor_si128(c2[512],simde_mm_xor_si128(c2[432],simde_mm_xor_si128(c2[433],simde_mm_xor_si128(c2[593],simde_mm_xor_si128(c2[277],simde_mm_xor_si128(c2[197],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[116],c2[436]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 33
+     d2[66]=simde_mm_xor_si128(c2[241],simde_mm_xor_si128(c2[81],simde_mm_xor_si128(c2[881],simde_mm_xor_si128(c2[1205],simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[165],simde_mm_xor_si128(c2[489],simde_mm_xor_si128(c2[328],simde_mm_xor_si128(c2[328],simde_mm_xor_si128(c2[332],simde_mm_xor_si128(c2[13],simde_mm_xor_si128(c2[493],simde_mm_xor_si128(c2[576],simde_mm_xor_si128(c2[1137],simde_mm_xor_si128(c2[656],simde_mm_xor_si128(c2[580],simde_mm_xor_si128(c2[181],simde_mm_xor_si128(c2[584],simde_mm_xor_si128(c2[984],simde_mm_xor_si128(c2[905],simde_mm_xor_si128(c2[508],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[348],simde_mm_xor_si128(c2[832],simde_mm_xor_si128(c2[833],simde_mm_xor_si128(c2[993],simde_mm_xor_si128(c2[597],simde_mm_xor_si128(c2[516],c2[836]))))))))))))))))))))))))))));
+
+//row: 34
+     d2[68]=simde_mm_xor_si128(c2[1121],simde_mm_xor_si128(c2[1041],simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[961],simde_mm_xor_si128(c2[881],simde_mm_xor_si128(c2[800],simde_mm_xor_si128(c2[400],simde_mm_xor_si128(c2[321],simde_mm_xor_si128(c2[241],simde_mm_xor_si128(c2[804],simde_mm_xor_si128(c2[724],simde_mm_xor_si128(c2[645],simde_mm_xor_si128(c2[484],simde_mm_xor_si128(c2[405],simde_mm_xor_si128(c2[964],simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[884],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[1208],simde_mm_xor_si128(c2[1129],simde_mm_xor_si128(c2[1128],simde_mm_xor_si128(c2[1049],simde_mm_xor_si128(c2[1212],simde_mm_xor_si128(c2[1132],simde_mm_xor_si128(c2[1053],simde_mm_xor_si128(c2[893],simde_mm_xor_si128(c2[813],simde_mm_xor_si128(c2[732],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[1212],simde_mm_xor_si128(c2[177],simde_mm_xor_si128(c2[97],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[656],simde_mm_xor_si128(c2[577],simde_mm_xor_si128(c2[176],simde_mm_xor_si128(c2[177],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[181],simde_mm_xor_si128(c2[101],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[980],simde_mm_xor_si128(c2[981],simde_mm_xor_si128(c2[900],simde_mm_xor_si128(c2[185],simde_mm_xor_si128(c2[105],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[585],simde_mm_xor_si128(c2[505],simde_mm_xor_si128(c2[424],simde_mm_xor_si128(c2[425],simde_mm_xor_si128(c2[424],simde_mm_xor_si128(c2[345],simde_mm_xor_si128(c2[109],simde_mm_xor_si128(c2[29],simde_mm_xor_si128(c2[1229],simde_mm_xor_si128(c2[829],simde_mm_xor_si128(c2[828],simde_mm_xor_si128(c2[749],simde_mm_xor_si128(c2[433],simde_mm_xor_si128(c2[353],simde_mm_xor_si128(c2[272],simde_mm_xor_si128(c2[352],simde_mm_xor_si128(c2[273],simde_mm_xor_si128(c2[513],simde_mm_xor_si128(c2[512],simde_mm_xor_si128(c2[433],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[116],simde_mm_xor_si128(c2[37],simde_mm_xor_si128(c2[117],simde_mm_xor_si128(c2[37],simde_mm_xor_si128(c2[1237],simde_mm_xor_si128(c2[356],simde_mm_xor_si128(c2[357],c2[276]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 35
+     d2[70]=simde_mm_xor_si128(c2[640],simde_mm_xor_si128(c2[560],simde_mm_xor_si128(c2[400],simde_mm_xor_si128(c2[1200],simde_mm_xor_si128(c2[325],simde_mm_xor_si128(c2[245],simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[484],simde_mm_xor_si128(c2[725],simde_mm_xor_si128(c2[888],simde_mm_xor_si128(c2[808],simde_mm_xor_si128(c2[649],simde_mm_xor_si128(c2[733],simde_mm_xor_si128(c2[653],simde_mm_xor_si128(c2[332],simde_mm_xor_si128(c2[812],simde_mm_xor_si128(c2[897],simde_mm_xor_si128(c2[177],simde_mm_xor_si128(c2[977],simde_mm_xor_si128(c2[901],simde_mm_xor_si128(c2[500],simde_mm_xor_si128(c2[660],simde_mm_xor_si128(c2[985],simde_mm_xor_si128(c2[905],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[1224],simde_mm_xor_si128(c2[829],simde_mm_xor_si128(c2[349],simde_mm_xor_si128(c2[1153],simde_mm_xor_si128(c2[1152],simde_mm_xor_si128(c2[33],simde_mm_xor_si128(c2[996],simde_mm_xor_si128(c2[916],simde_mm_xor_si128(c2[837],c2[1157]))))))))))))))))))))))))))))))))));
+
+//row: 36
+     d2[72]=simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[488],c2[829]));
+
+//row: 37
+     d2[74]=simde_mm_xor_si128(c2[240],simde_mm_xor_si128(c2[1201],simde_mm_xor_si128(c2[80],simde_mm_xor_si128(c2[1041],simde_mm_xor_si128(c2[880],simde_mm_xor_si128(c2[560],simde_mm_xor_si128(c2[1204],simde_mm_xor_si128(c2[884],simde_mm_xor_si128(c2[964],simde_mm_xor_si128(c2[644],simde_mm_xor_si128(c2[1205],simde_mm_xor_si128(c2[164],simde_mm_xor_si128(c2[1125],simde_mm_xor_si128(c2[488],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[89],simde_mm_xor_si128(c2[329],simde_mm_xor_si128(c2[9],simde_mm_xor_si128(c2[333],simde_mm_xor_si128(c2[13],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[973],simde_mm_xor_si128(c2[492],simde_mm_xor_si128(c2[172],simde_mm_xor_si128(c2[577],simde_mm_xor_si128(c2[257],simde_mm_xor_si128(c2[1136],simde_mm_xor_si128(c2[816],simde_mm_xor_si128(c2[417],simde_mm_xor_si128(c2[657],simde_mm_xor_si128(c2[337],simde_mm_xor_si128(c2[581],simde_mm_xor_si128(c2[261],simde_mm_xor_si128(c2[1221],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[1141],simde_mm_xor_si128(c2[585],simde_mm_xor_si128(c2[265],simde_mm_xor_si128(c2[985],simde_mm_xor_si128(c2[665],simde_mm_xor_si128(c2[664],simde_mm_xor_si128(c2[904],simde_mm_xor_si128(c2[584],simde_mm_xor_si128(c2[509],simde_mm_xor_si128(c2[189],simde_mm_xor_si128(c2[1068],simde_mm_xor_si128(c2[29],simde_mm_xor_si128(c2[988],simde_mm_xor_si128(c2[833],simde_mm_xor_si128(c2[513],simde_mm_xor_si128(c2[832],simde_mm_xor_si128(c2[512],simde_mm_xor_si128(c2[752],simde_mm_xor_si128(c2[992],simde_mm_xor_si128(c2[672],simde_mm_xor_si128(c2[596],simde_mm_xor_si128(c2[276],simde_mm_xor_si128(c2[517],simde_mm_xor_si128(c2[197],simde_mm_xor_si128(c2[597],simde_mm_xor_si128(c2[837],c2[517])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 38
+     d2[76]=simde_mm_xor_si128(c2[1200],simde_mm_xor_si128(c2[1120],simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[481],simde_mm_xor_si128(c2[885],simde_mm_xor_si128(c2[805],simde_mm_xor_si128(c2[565],simde_mm_xor_si128(c2[1044],simde_mm_xor_si128(c2[1205],simde_mm_xor_si128(c2[169],simde_mm_xor_si128(c2[89],simde_mm_xor_si128(c2[1209],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[1213],simde_mm_xor_si128(c2[892],simde_mm_xor_si128(c2[93],simde_mm_xor_si128(c2[176],simde_mm_xor_si128(c2[737],simde_mm_xor_si128(c2[256],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[1060],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[264],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[584],simde_mm_xor_si128(c2[505],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[909],simde_mm_xor_si128(c2[432],simde_mm_xor_si128(c2[433],simde_mm_xor_si128(c2[593],simde_mm_xor_si128(c2[277],simde_mm_xor_si128(c2[197],simde_mm_xor_si128(c2[116],c2[436]))))))))))))))))))))))))))))))))));
+
+//row: 39
+     d2[78]=simde_mm_xor_si128(c2[880],simde_mm_xor_si128(c2[800],simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[640],simde_mm_xor_si128(c2[161],simde_mm_xor_si128(c2[1200],simde_mm_xor_si128(c2[565],simde_mm_xor_si128(c2[485],simde_mm_xor_si128(c2[245],simde_mm_xor_si128(c2[724],simde_mm_xor_si128(c2[1128],simde_mm_xor_si128(c2[1048],simde_mm_xor_si128(c2[889],simde_mm_xor_si128(c2[973],simde_mm_xor_si128(c2[893],simde_mm_xor_si128(c2[652],simde_mm_xor_si128(c2[572],simde_mm_xor_si128(c2[1052],simde_mm_xor_si128(c2[1217],simde_mm_xor_si128(c2[1137],simde_mm_xor_si128(c2[417],simde_mm_xor_si128(c2[1217],simde_mm_xor_si128(c2[1221],simde_mm_xor_si128(c2[1141],simde_mm_xor_si128(c2[740],simde_mm_xor_si128(c2[1225],simde_mm_xor_si128(c2[1145],simde_mm_xor_si128(c2[344],simde_mm_xor_si128(c2[264],simde_mm_xor_si128(c2[185],simde_mm_xor_si128(c2[1149],simde_mm_xor_si128(c2[1069],simde_mm_xor_si128(c2[589],simde_mm_xor_si128(c2[988],simde_mm_xor_si128(c2[192],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[113],simde_mm_xor_si128(c2[273],simde_mm_xor_si128(c2[1236],simde_mm_xor_si128(c2[1156],simde_mm_xor_si128(c2[1157],simde_mm_xor_si128(c2[1077],c2[116]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 40
+     d2[80]=simde_mm_xor_si128(c2[241],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[81],simde_mm_xor_si128(c2[1121],simde_mm_xor_si128(c2[881],simde_mm_xor_si128(c2[640],simde_mm_xor_si128(c2[1205],simde_mm_xor_si128(c2[964],simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[724],simde_mm_xor_si128(c2[4],simde_mm_xor_si128(c2[165],simde_mm_xor_si128(c2[1205],simde_mm_xor_si128(c2[489],simde_mm_xor_si128(c2[248],simde_mm_xor_si128(c2[169],simde_mm_xor_si128(c2[328],simde_mm_xor_si128(c2[89],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[332],simde_mm_xor_si128(c2[93],simde_mm_xor_si128(c2[13],simde_mm_xor_si128(c2[1053],simde_mm_xor_si128(c2[493],simde_mm_xor_si128(c2[252],simde_mm_xor_si128(c2[576],simde_mm_xor_si128(c2[337],simde_mm_xor_si128(c2[1137],simde_mm_xor_si128(c2[896],simde_mm_xor_si128(c2[497],simde_mm_xor_si128(c2[656],simde_mm_xor_si128(c2[417],simde_mm_xor_si128(c2[580],simde_mm_xor_si128(c2[341],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[181],simde_mm_xor_si128(c2[1221],simde_mm_xor_si128(c2[584],simde_mm_xor_si128(c2[345],simde_mm_xor_si128(c2[984],simde_mm_xor_si128(c2[745],simde_mm_xor_si128(c2[744],simde_mm_xor_si128(c2[905],simde_mm_xor_si128(c2[664],simde_mm_xor_si128(c2[508],simde_mm_xor_si128(c2[269],simde_mm_xor_si128(c2[1148],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[1068],simde_mm_xor_si128(c2[832],simde_mm_xor_si128(c2[593],simde_mm_xor_si128(c2[833],simde_mm_xor_si128(c2[592],simde_mm_xor_si128(c2[832],simde_mm_xor_si128(c2[993],simde_mm_xor_si128(c2[752],simde_mm_xor_si128(c2[597],simde_mm_xor_si128(c2[356],simde_mm_xor_si128(c2[516],simde_mm_xor_si128(c2[277],simde_mm_xor_si128(c2[677],simde_mm_xor_si128(c2[836],c2[597]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 41
+     d2[82]=simde_mm_xor_si128(c2[1201],simde_mm_xor_si128(c2[1121],simde_mm_xor_si128(c2[961],simde_mm_xor_si128(c2[480],simde_mm_xor_si128(c2[884],simde_mm_xor_si128(c2[804],simde_mm_xor_si128(c2[564],simde_mm_xor_si128(c2[1045],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[1208],simde_mm_xor_si128(c2[13],simde_mm_xor_si128(c2[1212],simde_mm_xor_si128(c2[893],simde_mm_xor_si128(c2[92],simde_mm_xor_si128(c2[177],simde_mm_xor_si128(c2[736],simde_mm_xor_si128(c2[257],simde_mm_xor_si128(c2[181],simde_mm_xor_si128(c2[1061],simde_mm_xor_si128(c2[420],simde_mm_xor_si128(c2[265],simde_mm_xor_si128(c2[185],simde_mm_xor_si128(c2[585],simde_mm_xor_si128(c2[504],simde_mm_xor_si128(c2[109],simde_mm_xor_si128(c2[908],simde_mm_xor_si128(c2[433],simde_mm_xor_si128(c2[432],simde_mm_xor_si128(c2[592],simde_mm_xor_si128(c2[276],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[117],c2[437]))))))))))))))))))))))))))))))))));
+  }
+}
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc352_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc352_byte.c
index 4fc2acf610c9b343dbca3c7ec6bc81635328fe16..002e107f9ce5ff926bc5dafa8c5ff1a2c835b495 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc352_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc352_byte.c
@@ -1,9 +1,10 @@
+#ifdef __AVX2__
 #include "PHY/sse_intrin.h"
 // generated code for Zc=352, byte encoding
 static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG2_Zc352_byte(uint8_t *c,uint8_t *d) {
-  __m256i *csimd=(__m256i *)c,*dsimd=(__m256i *)d;
+  simde__m256i *csimd=(simde__m256i *)c,*dsimd=(simde__m256i *)d;
 
-  __m256i *c2,*d2;
+  simde__m256i *c2,*d2;
 
   int i2;
   for (i2=0; i2<11; i2++) {
@@ -137,3 +138,4 @@ static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG
      d2[451]=simde_mm256_xor_si256(c2[885],simde_mm256_xor_si256(c2[665],simde_mm256_xor_si256(c2[5280],simde_mm256_xor_si256(c2[1983],simde_mm256_xor_si256(c2[5086],simde_mm256_xor_si256(c2[4866],simde_mm256_xor_si256(c2[3327],simde_mm256_xor_si256(c2[6183],simde_mm256_xor_si256(c2[906],simde_mm256_xor_si256(c2[4884],simde_mm256_xor_si256(c2[4664],simde_mm256_xor_si256(c2[4669],simde_mm256_xor_si256(c2[2486],simde_mm256_xor_si256(c2[2266],simde_mm256_xor_si256(c2[1828],simde_mm256_xor_si256(c2[4029],simde_mm256_xor_si256(c2[1410],simde_mm256_xor_si256(c2[5590],simde_mm256_xor_si256(c2[4713],simde_mm256_xor_si256(c2[1870],simde_mm256_xor_si256(c2[1434],simde_mm256_xor_si256(c2[1430],simde_mm256_xor_si256(c2[3653],simde_mm256_xor_si256(c2[3433],simde_mm256_xor_si256(c2[6733],simde_mm256_xor_si256(c2[5412],simde_mm256_xor_si256(c2[2795],simde_mm256_xor_si256(c2[157],simde_mm256_xor_si256(c2[4141],simde_mm256_xor_si256(c2[4800],simde_mm256_xor_si256(c2[5456],simde_mm256_xor_si256(c2[862],simde_mm256_xor_si256(c2[642],simde_mm256_xor_si256(c2[2842],c2[3721]))))))))))))))))))))))))))))))))));
   }
 }
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc352_byte_128.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc352_byte_128.c
new file mode 100644
index 0000000000000000000000000000000000000000..42e8b9453c0ad99bdbc2a4ab5c809d0ee8083fd4
--- /dev/null
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc352_byte_128.c
@@ -0,0 +1,141 @@
+#ifndef __AVX2__
+#include "PHY/sse_intrin.h"
+// generated code for Zc=352, byte encoding
+static inline void ldpc_BG2_Zc352_byte(uint8_t *c,uint8_t *d) {
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
+
+  simde__m128i *c2,*d2;
+
+  int i2;
+  for (i2=0; i2<22; i2++) {
+     c2=&csimd[i2];
+     d2=&dsimd[i2];
+
+//row: 0
+     d2[0]=simde_mm_xor_si128(c2[4849],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[446],simde_mm_xor_si128(c2[6212],simde_mm_xor_si128(c2[3134],simde_mm_xor_si128(c2[1807],simde_mm_xor_si128(c2[5808],simde_mm_xor_si128(c2[5818],simde_mm_xor_si128(c2[1012],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[4538],simde_mm_xor_si128(c2[6339],simde_mm_xor_si128(c2[621],simde_mm_xor_si128(c2[5906],simde_mm_xor_si128(c2[220],simde_mm_xor_si128(c2[6387],simde_mm_xor_si128(c2[3346],simde_mm_xor_si128(c2[2907],simde_mm_xor_si128(c2[265],simde_mm_xor_si128(c2[2070],simde_mm_xor_si128(c2[3833],simde_mm_xor_si128(c2[4762],simde_mm_xor_si128(c2[6080],simde_mm_xor_si128(c2[353],simde_mm_xor_si128(c2[4803],simde_mm_xor_si128(c2[2164],c2[3922]))))))))))))))))))))))))));
+
+//row: 1
+     d2[22]=simde_mm_xor_si128(c2[5289],simde_mm_xor_si128(c2[4849],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[446],simde_mm_xor_si128(c2[6652],simde_mm_xor_si128(c2[6212],simde_mm_xor_si128(c2[3134],simde_mm_xor_si128(c2[1807],simde_mm_xor_si128(c2[6248],simde_mm_xor_si128(c2[5808],simde_mm_xor_si128(c2[5818],simde_mm_xor_si128(c2[1452],simde_mm_xor_si128(c2[1012],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[4538],simde_mm_xor_si128(c2[6339],simde_mm_xor_si128(c2[621],simde_mm_xor_si128(c2[5906],simde_mm_xor_si128(c2[220],simde_mm_xor_si128(c2[6387],simde_mm_xor_si128(c2[3786],simde_mm_xor_si128(c2[3346],simde_mm_xor_si128(c2[2907],simde_mm_xor_si128(c2[265],simde_mm_xor_si128(c2[2070],simde_mm_xor_si128(c2[3833],simde_mm_xor_si128(c2[4762],simde_mm_xor_si128(c2[6080],simde_mm_xor_si128(c2[353],simde_mm_xor_si128(c2[5243],simde_mm_xor_si128(c2[4803],simde_mm_xor_si128(c2[2164],c2[3922]))))))))))))))))))))))))))))))));
+
+//row: 2
+     d2[44]=simde_mm_xor_si128(c2[5289],simde_mm_xor_si128(c2[4849],simde_mm_xor_si128(c2[441],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[446],simde_mm_xor_si128(c2[6652],simde_mm_xor_si128(c2[6212],simde_mm_xor_si128(c2[3134],simde_mm_xor_si128(c2[1807],simde_mm_xor_si128(c2[6248],simde_mm_xor_si128(c2[5808],simde_mm_xor_si128(c2[5818],simde_mm_xor_si128(c2[1452],simde_mm_xor_si128(c2[1012],simde_mm_xor_si128(c2[576],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[4538],simde_mm_xor_si128(c2[6779],simde_mm_xor_si128(c2[6339],simde_mm_xor_si128(c2[621],simde_mm_xor_si128(c2[5906],simde_mm_xor_si128(c2[660],simde_mm_xor_si128(c2[220],simde_mm_xor_si128(c2[6387],simde_mm_xor_si128(c2[3786],simde_mm_xor_si128(c2[3346],simde_mm_xor_si128(c2[3347],simde_mm_xor_si128(c2[2907],simde_mm_xor_si128(c2[265],simde_mm_xor_si128(c2[2510],simde_mm_xor_si128(c2[2070],simde_mm_xor_si128(c2[3833],simde_mm_xor_si128(c2[5202],simde_mm_xor_si128(c2[4762],simde_mm_xor_si128(c2[6080],simde_mm_xor_si128(c2[353],simde_mm_xor_si128(c2[5243],simde_mm_xor_si128(c2[4803],simde_mm_xor_si128(c2[2604],simde_mm_xor_si128(c2[2164],c2[3922]))))))))))))))))))))))))))))))))))))))));
+
+//row: 3
+     d2[66]=simde_mm_xor_si128(c2[4849],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[446],simde_mm_xor_si128(c2[6212],simde_mm_xor_si128(c2[3134],simde_mm_xor_si128(c2[2247],simde_mm_xor_si128(c2[1807],simde_mm_xor_si128(c2[5808],simde_mm_xor_si128(c2[6258],simde_mm_xor_si128(c2[5818],simde_mm_xor_si128(c2[1012],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[4538],simde_mm_xor_si128(c2[6339],simde_mm_xor_si128(c2[621],simde_mm_xor_si128(c2[6346],simde_mm_xor_si128(c2[5906],simde_mm_xor_si128(c2[220],simde_mm_xor_si128(c2[6827],simde_mm_xor_si128(c2[6387],simde_mm_xor_si128(c2[3346],simde_mm_xor_si128(c2[2907],simde_mm_xor_si128(c2[705],simde_mm_xor_si128(c2[265],simde_mm_xor_si128(c2[2070],simde_mm_xor_si128(c2[4273],simde_mm_xor_si128(c2[3833],simde_mm_xor_si128(c2[4762],simde_mm_xor_si128(c2[6080],simde_mm_xor_si128(c2[793],simde_mm_xor_si128(c2[353],simde_mm_xor_si128(c2[4803],simde_mm_xor_si128(c2[2164],simde_mm_xor_si128(c2[4362],c2[3922]))))))))))))))))))))))))))))))))));
+
+//row: 4
+     d2[88]=simde_mm_xor_si128(c2[6614],simde_mm_xor_si128(c2[6174],simde_mm_xor_si128(c2[1326],simde_mm_xor_si128(c2[1771],simde_mm_xor_si128(c2[2645],simde_mm_xor_si128(c2[938],simde_mm_xor_si128(c2[498],simde_mm_xor_si128(c2[4459],simde_mm_xor_si128(c2[3132],simde_mm_xor_si128(c2[1368],simde_mm_xor_si128(c2[534],simde_mm_xor_si128(c2[94],simde_mm_xor_si128(c2[104],simde_mm_xor_si128(c2[2777],simde_mm_xor_si128(c2[2337],simde_mm_xor_si128(c2[1461],simde_mm_xor_si128(c2[5863],simde_mm_xor_si128(c2[625],simde_mm_xor_si128(c2[1946],simde_mm_xor_si128(c2[192],simde_mm_xor_si128(c2[1545],simde_mm_xor_si128(c2[673],simde_mm_xor_si128(c2[5111],simde_mm_xor_si128(c2[4671],simde_mm_xor_si128(c2[4232],simde_mm_xor_si128(c2[1590],simde_mm_xor_si128(c2[3395],simde_mm_xor_si128(c2[5158],simde_mm_xor_si128(c2[6087],simde_mm_xor_si128(c2[366],simde_mm_xor_si128(c2[1678],simde_mm_xor_si128(c2[6568],simde_mm_xor_si128(c2[6128],simde_mm_xor_si128(c2[3489],c2[5247]))))))))))))))))))))))))))))))))));
+
+//row: 5
+     d2[110]=simde_mm_xor_si128(c2[3533],simde_mm_xor_si128(c2[3093],simde_mm_xor_si128(c2[5284],simde_mm_xor_si128(c2[5729],simde_mm_xor_si128(c2[6604],simde_mm_xor_si128(c2[4896],simde_mm_xor_si128(c2[4456],simde_mm_xor_si128(c2[1378],simde_mm_xor_si128(c2[51],simde_mm_xor_si128(c2[1809],simde_mm_xor_si128(c2[4492],simde_mm_xor_si128(c2[4052],simde_mm_xor_si128(c2[4062],simde_mm_xor_si128(c2[6735],simde_mm_xor_si128(c2[6295],simde_mm_xor_si128(c2[5419],simde_mm_xor_si128(c2[2782],simde_mm_xor_si128(c2[4583],simde_mm_xor_si128(c2[5904],simde_mm_xor_si128(c2[4150],simde_mm_xor_si128(c2[5503],simde_mm_xor_si128(c2[4631],simde_mm_xor_si128(c2[1542],simde_mm_xor_si128(c2[2030],simde_mm_xor_si128(c2[1590],simde_mm_xor_si128(c2[1151],simde_mm_xor_si128(c2[5548],simde_mm_xor_si128(c2[314],simde_mm_xor_si128(c2[2077],simde_mm_xor_si128(c2[3394],simde_mm_xor_si128(c2[3006],simde_mm_xor_si128(c2[4324],simde_mm_xor_si128(c2[5636],simde_mm_xor_si128(c2[3487],simde_mm_xor_si128(c2[3047],simde_mm_xor_si128(c2[408],c2[2166]))))))))))))))))))))))))))))))))))));
+
+//row: 6
+     d2[132]=simde_mm_xor_si128(c2[3971],simde_mm_xor_si128(c2[3531],simde_mm_xor_si128(c2[5722],simde_mm_xor_si128(c2[6167],simde_mm_xor_si128(c2[6602],simde_mm_xor_si128(c2[5334],simde_mm_xor_si128(c2[4894],simde_mm_xor_si128(c2[1816],simde_mm_xor_si128(c2[489],simde_mm_xor_si128(c2[4930],simde_mm_xor_si128(c2[4490],simde_mm_xor_si128(c2[4500],simde_mm_xor_si128(c2[134],simde_mm_xor_si128(c2[6733],simde_mm_xor_si128(c2[5857],simde_mm_xor_si128(c2[3220],simde_mm_xor_si128(c2[5021],simde_mm_xor_si128(c2[6342],simde_mm_xor_si128(c2[4588],simde_mm_xor_si128(c2[5941],simde_mm_xor_si128(c2[5069],simde_mm_xor_si128(c2[4629],simde_mm_xor_si128(c2[2468],simde_mm_xor_si128(c2[2028],simde_mm_xor_si128(c2[1589],simde_mm_xor_si128(c2[5986],simde_mm_xor_si128(c2[752],simde_mm_xor_si128(c2[2515],simde_mm_xor_si128(c2[4708],simde_mm_xor_si128(c2[3444],simde_mm_xor_si128(c2[4762],simde_mm_xor_si128(c2[6074],simde_mm_xor_si128(c2[3925],simde_mm_xor_si128(c2[3485],simde_mm_xor_si128(c2[846],simde_mm_xor_si128(c2[2604],c2[5245]))))))))))))))))))))))))))))))))))));
+
+//row: 7
+     d2[154]=simde_mm_xor_si128(c2[4852],simde_mm_xor_si128(c2[4412],simde_mm_xor_si128(c2[893],simde_mm_xor_si128(c2[6603],simde_mm_xor_si128(c2[3084],simde_mm_xor_si128(c2[9],simde_mm_xor_si128(c2[3529],simde_mm_xor_si128(c2[6215],simde_mm_xor_si128(c2[5775],simde_mm_xor_si128(c2[2256],simde_mm_xor_si128(c2[2697],simde_mm_xor_si128(c2[6217],simde_mm_xor_si128(c2[1370],simde_mm_xor_si128(c2[5330],simde_mm_xor_si128(c2[4890],simde_mm_xor_si128(c2[47],simde_mm_xor_si128(c2[5811],simde_mm_xor_si128(c2[5371],simde_mm_xor_si128(c2[1852],simde_mm_xor_si128(c2[5381],simde_mm_xor_si128(c2[2302],simde_mm_xor_si128(c2[1862],simde_mm_xor_si128(c2[1015],simde_mm_xor_si128(c2[575],simde_mm_xor_si128(c2[4095],simde_mm_xor_si128(c2[6738],simde_mm_xor_si128(c2[3219],simde_mm_xor_si128(c2[4101],simde_mm_xor_si128(c2[582],simde_mm_xor_si128(c2[5902],simde_mm_xor_si128(c2[2383],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[3704],simde_mm_xor_si128(c2[5469],simde_mm_xor_si128(c2[2390],simde_mm_xor_si128(c2[1950],simde_mm_xor_si128(c2[6822],simde_mm_xor_si128(c2[3303],simde_mm_xor_si128(c2[5950],simde_mm_xor_si128(c2[2871],simde_mm_xor_si128(c2[2431],simde_mm_xor_si128(c2[5947],simde_mm_xor_si128(c2[3349],simde_mm_xor_si128(c2[2909],simde_mm_xor_si128(c2[6429],simde_mm_xor_si128(c2[2470],simde_mm_xor_si128(c2[5990],simde_mm_xor_si128(c2[6867],simde_mm_xor_si128(c2[3788],simde_mm_xor_si128(c2[3348],simde_mm_xor_si128(c2[1633],simde_mm_xor_si128(c2[5153],simde_mm_xor_si128(c2[3396],simde_mm_xor_si128(c2[317],simde_mm_xor_si128(c2[6916],simde_mm_xor_si128(c2[3829],simde_mm_xor_si128(c2[4325],simde_mm_xor_si128(c2[806],simde_mm_xor_si128(c2[5643],simde_mm_xor_si128(c2[2124],simde_mm_xor_si128(c2[6955],simde_mm_xor_si128(c2[3876],simde_mm_xor_si128(c2[3436],simde_mm_xor_si128(c2[4806],simde_mm_xor_si128(c2[4366],simde_mm_xor_si128(c2[847],simde_mm_xor_si128(c2[1727],simde_mm_xor_si128(c2[5247],simde_mm_xor_si128(c2[3485],simde_mm_xor_si128(c2[406],c2[7005]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 8
+     d2[176]=simde_mm_xor_si128(c2[5739],simde_mm_xor_si128(c2[5299],simde_mm_xor_si128(c2[891],simde_mm_xor_si128(c2[451],simde_mm_xor_si128(c2[896],simde_mm_xor_si128(c2[2203],simde_mm_xor_si128(c2[63],simde_mm_xor_si128(c2[6662],simde_mm_xor_si128(c2[3584],simde_mm_xor_si128(c2[2257],simde_mm_xor_si128(c2[6645],simde_mm_xor_si128(c2[6698],simde_mm_xor_si128(c2[6258],simde_mm_xor_si128(c2[6268],simde_mm_xor_si128(c2[1902],simde_mm_xor_si128(c2[1462],simde_mm_xor_si128(c2[1026],simde_mm_xor_si128(c2[586],simde_mm_xor_si128(c2[4988],simde_mm_xor_si128(c2[190],simde_mm_xor_si128(c2[6789],simde_mm_xor_si128(c2[1071],simde_mm_xor_si128(c2[6356],simde_mm_xor_si128(c2[1110],simde_mm_xor_si128(c2[670],simde_mm_xor_si128(c2[6837],simde_mm_xor_si128(c2[4236],simde_mm_xor_si128(c2[3796],simde_mm_xor_si128(c2[3797],simde_mm_xor_si128(c2[3357],simde_mm_xor_si128(c2[715],simde_mm_xor_si128(c2[2960],simde_mm_xor_si128(c2[2520],simde_mm_xor_si128(c2[4283],simde_mm_xor_si128(c2[5652],simde_mm_xor_si128(c2[5212],simde_mm_xor_si128(c2[6530],simde_mm_xor_si128(c2[803],simde_mm_xor_si128(c2[5693],simde_mm_xor_si128(c2[5253],simde_mm_xor_si128(c2[3054],simde_mm_xor_si128(c2[2614],c2[4372]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 9
+     d2[198]=simde_mm_xor_si128(c2[6175],simde_mm_xor_si128(c2[5293],simde_mm_xor_si128(c2[4853],simde_mm_xor_si128(c2[1327],simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[1772],simde_mm_xor_si128(c2[450],simde_mm_xor_si128(c2[499],simde_mm_xor_si128(c2[6656],simde_mm_xor_si128(c2[6216],simde_mm_xor_si128(c2[4460],simde_mm_xor_si128(c2[3138],simde_mm_xor_si128(c2[3133],simde_mm_xor_si128(c2[1811],simde_mm_xor_si128(c2[3570],simde_mm_xor_si128(c2[95],simde_mm_xor_si128(c2[6252],simde_mm_xor_si128(c2[5812],simde_mm_xor_si128(c2[105],simde_mm_xor_si128(c2[5822],simde_mm_xor_si128(c2[2338],simde_mm_xor_si128(c2[1456],simde_mm_xor_si128(c2[1016],simde_mm_xor_si128(c2[1462],simde_mm_xor_si128(c2[140],simde_mm_xor_si128(c2[5864],simde_mm_xor_si128(c2[4542],simde_mm_xor_si128(c2[626],simde_mm_xor_si128(c2[6343],simde_mm_xor_si128(c2[1947],simde_mm_xor_si128(c2[625],simde_mm_xor_si128(c2[193],simde_mm_xor_si128(c2[5910],simde_mm_xor_si128(c2[1546],simde_mm_xor_si128(c2[224],simde_mm_xor_si128(c2[674],simde_mm_xor_si128(c2[6391],simde_mm_xor_si128(c2[4672],simde_mm_xor_si128(c2[3790],simde_mm_xor_si128(c2[3350],simde_mm_xor_si128(c2[4233],simde_mm_xor_si128(c2[2911],simde_mm_xor_si128(c2[1591],simde_mm_xor_si128(c2[269],simde_mm_xor_si128(c2[3396],simde_mm_xor_si128(c2[2074],simde_mm_xor_si128(c2[5159],simde_mm_xor_si128(c2[3837],simde_mm_xor_si128(c2[6088],simde_mm_xor_si128(c2[4766],simde_mm_xor_si128(c2[367],simde_mm_xor_si128(c2[6084],simde_mm_xor_si128(c2[1679],simde_mm_xor_si128(c2[357],simde_mm_xor_si128(c2[6520],simde_mm_xor_si128(c2[6129],simde_mm_xor_si128(c2[5247],simde_mm_xor_si128(c2[4807],simde_mm_xor_si128(c2[3490],simde_mm_xor_si128(c2[2168],simde_mm_xor_si128(c2[5248],c2[3926])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 10
+     d2[220]=simde_mm_xor_si128(c2[6606],simde_mm_xor_si128(c2[4005],simde_mm_xor_si128(c2[6434],c2[3389])));
+
+//row: 11
+     d2[242]=simde_mm_xor_si128(c2[3538],simde_mm_xor_si128(c2[5729],simde_mm_xor_si128(c2[6174],simde_mm_xor_si128(c2[4845],simde_mm_xor_si128(c2[4901],simde_mm_xor_si128(c2[1823],simde_mm_xor_si128(c2[936],simde_mm_xor_si128(c2[496],simde_mm_xor_si128(c2[4497],simde_mm_xor_si128(c2[4947],simde_mm_xor_si128(c2[4507],simde_mm_xor_si128(c2[6740],simde_mm_xor_si128(c2[5864],simde_mm_xor_si128(c2[3227],simde_mm_xor_si128(c2[5028],simde_mm_xor_si128(c2[6349],simde_mm_xor_si128(c2[5035],simde_mm_xor_si128(c2[4595],simde_mm_xor_si128(c2[5948],simde_mm_xor_si128(c2[5516],simde_mm_xor_si128(c2[5076],simde_mm_xor_si128(c2[2035],simde_mm_xor_si128(c2[1596],simde_mm_xor_si128(c2[6433],simde_mm_xor_si128(c2[5993],simde_mm_xor_si128(c2[759],simde_mm_xor_si128(c2[2962],simde_mm_xor_si128(c2[2522],simde_mm_xor_si128(c2[6918],simde_mm_xor_si128(c2[3451],simde_mm_xor_si128(c2[4769],simde_mm_xor_si128(c2[6521],simde_mm_xor_si128(c2[6081],simde_mm_xor_si128(c2[3492],simde_mm_xor_si128(c2[853],simde_mm_xor_si128(c2[3051],simde_mm_xor_si128(c2[2611],c2[3917])))))))))))))))))))))))))))))))))))));
+
+//row: 12
+     d2[264]=simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[6609],simde_mm_xor_si128(c2[1761],simde_mm_xor_si128(c2[2206],simde_mm_xor_si128(c2[1373],simde_mm_xor_si128(c2[933],simde_mm_xor_si128(c2[4894],simde_mm_xor_si128(c2[3567],simde_mm_xor_si128(c2[4451],simde_mm_xor_si128(c2[969],simde_mm_xor_si128(c2[529],simde_mm_xor_si128(c2[539],simde_mm_xor_si128(c2[3212],simde_mm_xor_si128(c2[2772],simde_mm_xor_si128(c2[1896],simde_mm_xor_si128(c2[6298],simde_mm_xor_si128(c2[4972],simde_mm_xor_si128(c2[1060],simde_mm_xor_si128(c2[2381],simde_mm_xor_si128(c2[627],simde_mm_xor_si128(c2[1980],simde_mm_xor_si128(c2[1108],simde_mm_xor_si128(c2[5546],simde_mm_xor_si128(c2[5106],simde_mm_xor_si128(c2[4667],simde_mm_xor_si128(c2[2025],simde_mm_xor_si128(c2[3830],simde_mm_xor_si128(c2[5593],simde_mm_xor_si128(c2[6522],simde_mm_xor_si128(c2[801],simde_mm_xor_si128(c2[2113],simde_mm_xor_si128(c2[7003],simde_mm_xor_si128(c2[6563],simde_mm_xor_si128(c2[3924],c2[5682]))))))))))))))))))))))))))))))))));
+
+//row: 13
+     d2[286]=simde_mm_xor_si128(c2[4417],simde_mm_xor_si128(c2[6608],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[5721],simde_mm_xor_si128(c2[5780],simde_mm_xor_si128(c2[2702],simde_mm_xor_si128(c2[1815],simde_mm_xor_si128(c2[1375],simde_mm_xor_si128(c2[4889],simde_mm_xor_si128(c2[5376],simde_mm_xor_si128(c2[5826],simde_mm_xor_si128(c2[5386],simde_mm_xor_si128(c2[580],simde_mm_xor_si128(c2[6743],simde_mm_xor_si128(c2[4106],simde_mm_xor_si128(c2[5907],simde_mm_xor_si128(c2[189],simde_mm_xor_si128(c2[5914],simde_mm_xor_si128(c2[5474],simde_mm_xor_si128(c2[6827],simde_mm_xor_si128(c2[6395],simde_mm_xor_si128(c2[5955],simde_mm_xor_si128(c2[2914],simde_mm_xor_si128(c2[2475],simde_mm_xor_si128(c2[273],simde_mm_xor_si128(c2[6872],simde_mm_xor_si128(c2[1638],simde_mm_xor_si128(c2[3841],simde_mm_xor_si128(c2[3401],simde_mm_xor_si128(c2[4330],simde_mm_xor_si128(c2[5648],simde_mm_xor_si128(c2[361],simde_mm_xor_si128(c2[6960],simde_mm_xor_si128(c2[5193],simde_mm_xor_si128(c2[4371],simde_mm_xor_si128(c2[1732],simde_mm_xor_si128(c2[3930],c2[3490])))))))))))))))))))))))))))))))))))));
+
+//row: 14
+     d2[308]=simde_mm_xor_si128(c2[1770],simde_mm_xor_si128(c2[1330],simde_mm_xor_si128(c2[460],simde_mm_xor_si128(c2[3521],simde_mm_xor_si128(c2[2651],simde_mm_xor_si128(c2[3966],simde_mm_xor_si128(c2[3096],simde_mm_xor_si128(c2[3133],simde_mm_xor_si128(c2[2693],simde_mm_xor_si128(c2[1823],simde_mm_xor_si128(c2[6654],simde_mm_xor_si128(c2[5784],simde_mm_xor_si128(c2[5327],simde_mm_xor_si128(c2[4897],simde_mm_xor_si128(c2[4457],simde_mm_xor_si128(c2[4884],simde_mm_xor_si128(c2[2729],simde_mm_xor_si128(c2[2289],simde_mm_xor_si128(c2[1419],simde_mm_xor_si128(c2[2299],simde_mm_xor_si128(c2[1869],simde_mm_xor_si128(c2[1429],simde_mm_xor_si128(c2[4972],simde_mm_xor_si128(c2[4532],simde_mm_xor_si128(c2[3662],simde_mm_xor_si128(c2[3656],simde_mm_xor_si128(c2[2786],simde_mm_xor_si128(c2[1019],simde_mm_xor_si128(c2[149],simde_mm_xor_si128(c2[2820],simde_mm_xor_si128(c2[1950],simde_mm_xor_si128(c2[4141],simde_mm_xor_si128(c2[3271],simde_mm_xor_si128(c2[2387],simde_mm_xor_si128(c2[1957],simde_mm_xor_si128(c2[1517],simde_mm_xor_si128(c2[3740],simde_mm_xor_si128(c2[2870],simde_mm_xor_si128(c2[2868],simde_mm_xor_si128(c2[2438],simde_mm_xor_si128(c2[1998],simde_mm_xor_si128(c2[267],simde_mm_xor_si128(c2[6866],simde_mm_xor_si128(c2[5996],simde_mm_xor_si128(c2[6427],simde_mm_xor_si128(c2[5557],simde_mm_xor_si128(c2[3785],simde_mm_xor_si128(c2[3355],simde_mm_xor_si128(c2[2915],simde_mm_xor_si128(c2[713],simde_mm_xor_si128(c2[5590],simde_mm_xor_si128(c2[4720],simde_mm_xor_si128(c2[314],simde_mm_xor_si128(c2[6923],simde_mm_xor_si128(c2[6483],simde_mm_xor_si128(c2[1243],simde_mm_xor_si128(c2[373],simde_mm_xor_si128(c2[2561],simde_mm_xor_si128(c2[1691],simde_mm_xor_si128(c2[3873],simde_mm_xor_si128(c2[3443],simde_mm_xor_si128(c2[3003],simde_mm_xor_si128(c2[1724],simde_mm_xor_si128(c2[1284],simde_mm_xor_si128(c2[414],simde_mm_xor_si128(c2[5684],simde_mm_xor_si128(c2[4814],simde_mm_xor_si128(c2[403],simde_mm_xor_si128(c2[7012],c2[6572])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 15
+     d2[330]=simde_mm_xor_si128(c2[896],simde_mm_xor_si128(c2[1772],simde_mm_xor_si128(c2[1332],simde_mm_xor_si128(c2[3087],simde_mm_xor_si128(c2[3523],simde_mm_xor_si128(c2[3532],simde_mm_xor_si128(c2[3968],simde_mm_xor_si128(c2[3968],simde_mm_xor_si128(c2[2259],simde_mm_xor_si128(c2[3135],simde_mm_xor_si128(c2[2695],simde_mm_xor_si128(c2[6220],simde_mm_xor_si128(c2[6656],simde_mm_xor_si128(c2[4893],simde_mm_xor_si128(c2[5329],simde_mm_xor_si128(c2[1855],simde_mm_xor_si128(c2[2731],simde_mm_xor_si128(c2[2291],simde_mm_xor_si128(c2[1865],simde_mm_xor_si128(c2[2301],simde_mm_xor_si128(c2[4098],simde_mm_xor_si128(c2[4974],simde_mm_xor_si128(c2[4534],simde_mm_xor_si128(c2[3222],simde_mm_xor_si128(c2[3658],simde_mm_xor_si128(c2[585],simde_mm_xor_si128(c2[1021],simde_mm_xor_si128(c2[2386],simde_mm_xor_si128(c2[2822],simde_mm_xor_si128(c2[3707],simde_mm_xor_si128(c2[4143],simde_mm_xor_si128(c2[1953],simde_mm_xor_si128(c2[2389],simde_mm_xor_si128(c2[3306],simde_mm_xor_si128(c2[3742],simde_mm_xor_si128(c2[2434],simde_mm_xor_si128(c2[2870],simde_mm_xor_si128(c2[6432],simde_mm_xor_si128(c2[269],simde_mm_xor_si128(c2[6868],simde_mm_xor_si128(c2[5993],simde_mm_xor_si128(c2[6429],simde_mm_xor_si128(c2[3351],simde_mm_xor_si128(c2[3787],simde_mm_xor_si128(c2[5156],simde_mm_xor_si128(c2[5592],simde_mm_xor_si128(c2[6919],simde_mm_xor_si128(c2[316],simde_mm_xor_si128(c2[809],simde_mm_xor_si128(c2[1245],simde_mm_xor_si128(c2[2127],simde_mm_xor_si128(c2[2563],simde_mm_xor_si128(c2[3439],simde_mm_xor_si128(c2[3875],simde_mm_xor_si128(c2[850],simde_mm_xor_si128(c2[1726],simde_mm_xor_si128(c2[1286],simde_mm_xor_si128(c2[5250],simde_mm_xor_si128(c2[5686],simde_mm_xor_si128(c2[7008],c2[405]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 16
+     d2[352]=simde_mm_xor_si128(c2[5730],simde_mm_xor_si128(c2[5290],simde_mm_xor_si128(c2[460],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[442],simde_mm_xor_si128(c2[2651],simde_mm_xor_si128(c2[2211],simde_mm_xor_si128(c2[887],simde_mm_xor_si128(c2[2656],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[6653],simde_mm_xor_si128(c2[1823],simde_mm_xor_si128(c2[1383],simde_mm_xor_si128(c2[3575],simde_mm_xor_si128(c2[5344],simde_mm_xor_si128(c2[2248],simde_mm_xor_si128(c2[4017],simde_mm_xor_si128(c2[6208],simde_mm_xor_si128(c2[6689],simde_mm_xor_si128(c2[6249],simde_mm_xor_si128(c2[1419],simde_mm_xor_si128(c2[979],simde_mm_xor_si128(c2[6259],simde_mm_xor_si128(c2[989],simde_mm_xor_si128(c2[1893],simde_mm_xor_si128(c2[1453],simde_mm_xor_si128(c2[3662],simde_mm_xor_si128(c2[3222],simde_mm_xor_si128(c2[577],simde_mm_xor_si128(c2[2786],simde_mm_xor_si128(c2[2346],simde_mm_xor_si128(c2[4979],simde_mm_xor_si128(c2[6748],simde_mm_xor_si128(c2[6780],simde_mm_xor_si128(c2[1950],simde_mm_xor_si128(c2[1510],simde_mm_xor_si128(c2[1062],simde_mm_xor_si128(c2[2831],simde_mm_xor_si128(c2[6347],simde_mm_xor_si128(c2[1077],simde_mm_xor_si128(c2[661],simde_mm_xor_si128(c2[2870],simde_mm_xor_si128(c2[2430],simde_mm_xor_si128(c2[6828],simde_mm_xor_si128(c2[1558],simde_mm_xor_si128(c2[4227],simde_mm_xor_si128(c2[3787],simde_mm_xor_si128(c2[5996],simde_mm_xor_si128(c2[5556],simde_mm_xor_si128(c2[3348],simde_mm_xor_si128(c2[5557],simde_mm_xor_si128(c2[5117],simde_mm_xor_si128(c2[706],simde_mm_xor_si128(c2[2475],simde_mm_xor_si128(c2[2511],simde_mm_xor_si128(c2[4720],simde_mm_xor_si128(c2[4280],simde_mm_xor_si128(c2[4274],simde_mm_xor_si128(c2[6043],simde_mm_xor_si128(c2[5203],simde_mm_xor_si128(c2[373],simde_mm_xor_si128(c2[6972],simde_mm_xor_si128(c2[6521],simde_mm_xor_si128(c2[1251],simde_mm_xor_si128(c2[794],simde_mm_xor_si128(c2[2563],simde_mm_xor_si128(c2[5684],simde_mm_xor_si128(c2[5244],simde_mm_xor_si128(c2[414],simde_mm_xor_si128(c2[7013],simde_mm_xor_si128(c2[2605],simde_mm_xor_si128(c2[4814],simde_mm_xor_si128(c2[4374],simde_mm_xor_si128(c2[4363],simde_mm_xor_si128(c2[6132],c2[6565])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 17
+     d2[374]=simde_mm_xor_si128(c2[4413],simde_mm_xor_si128(c2[3973],simde_mm_xor_si128(c2[6619],simde_mm_xor_si128(c2[6179],simde_mm_xor_si128(c2[6164],simde_mm_xor_si128(c2[1771],simde_mm_xor_si128(c2[1331],simde_mm_xor_si128(c2[6609],simde_mm_xor_si128(c2[1776],simde_mm_xor_si128(c2[5776],simde_mm_xor_si128(c2[5336],simde_mm_xor_si128(c2[943],simde_mm_xor_si128(c2[503],simde_mm_xor_si128(c2[2258],simde_mm_xor_si128(c2[4464],simde_mm_xor_si128(c2[931],simde_mm_xor_si128(c2[3137],simde_mm_xor_si128(c2[2692],simde_mm_xor_si128(c2[5372],simde_mm_xor_si128(c2[4932],simde_mm_xor_si128(c2[539],simde_mm_xor_si128(c2[99],simde_mm_xor_si128(c2[4942],simde_mm_xor_si128(c2[109],simde_mm_xor_si128(c2[576],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[2782],simde_mm_xor_si128(c2[2342],simde_mm_xor_si128(c2[6299],simde_mm_xor_si128(c2[1906],simde_mm_xor_si128(c2[1466],simde_mm_xor_si128(c2[3662],simde_mm_xor_si128(c2[5868],simde_mm_xor_si128(c2[5463],simde_mm_xor_si128(c2[1070],simde_mm_xor_si128(c2[630],simde_mm_xor_si128(c2[6784],simde_mm_xor_si128(c2[1951],simde_mm_xor_si128(c2[5030],simde_mm_xor_si128(c2[197],simde_mm_xor_si128(c2[6383],simde_mm_xor_si128(c2[1990],simde_mm_xor_si128(c2[1550],simde_mm_xor_si128(c2[5511],simde_mm_xor_si128(c2[678],simde_mm_xor_si128(c2[3301],simde_mm_xor_si128(c2[2910],simde_mm_xor_si128(c2[2470],simde_mm_xor_si128(c2[5116],simde_mm_xor_si128(c2[4676],simde_mm_xor_si128(c2[2031],simde_mm_xor_si128(c2[4677],simde_mm_xor_si128(c2[4237],simde_mm_xor_si128(c2[6428],simde_mm_xor_si128(c2[1595],simde_mm_xor_si128(c2[1194],simde_mm_xor_si128(c2[3840],simde_mm_xor_si128(c2[3400],simde_mm_xor_si128(c2[2957],simde_mm_xor_si128(c2[5163],simde_mm_xor_si128(c2[3886],simde_mm_xor_si128(c2[6532],simde_mm_xor_si128(c2[6092],simde_mm_xor_si128(c2[5204],simde_mm_xor_si128(c2[371],simde_mm_xor_si128(c2[6516],simde_mm_xor_si128(c2[1683],simde_mm_xor_si128(c2[4367],simde_mm_xor_si128(c2[3927],simde_mm_xor_si128(c2[6573],simde_mm_xor_si128(c2[6133],simde_mm_xor_si128(c2[1288],simde_mm_xor_si128(c2[3934],simde_mm_xor_si128(c2[3494],simde_mm_xor_si128(c2[3046],c2[5252])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 18
+     d2[396]=simde_mm_xor_si128(c2[5730],simde_mm_xor_si128(c2[6865],c2[2949]));
+
+//row: 19
+     d2[418]=simde_mm_xor_si128(c2[5298],simde_mm_xor_si128(c2[450],simde_mm_xor_si128(c2[895],simde_mm_xor_si128(c2[5720],simde_mm_xor_si128(c2[6661],simde_mm_xor_si128(c2[3583],simde_mm_xor_si128(c2[2256],simde_mm_xor_si128(c2[3132],simde_mm_xor_si128(c2[6257],simde_mm_xor_si128(c2[6267],simde_mm_xor_si128(c2[1461],simde_mm_xor_si128(c2[585],simde_mm_xor_si128(c2[4987],simde_mm_xor_si128(c2[6788],simde_mm_xor_si128(c2[1070],simde_mm_xor_si128(c2[6355],simde_mm_xor_si128(c2[669],simde_mm_xor_si128(c2[6836],simde_mm_xor_si128(c2[3795],simde_mm_xor_si128(c2[3356],simde_mm_xor_si128(c2[714],simde_mm_xor_si128(c2[2519],simde_mm_xor_si128(c2[4282],simde_mm_xor_si128(c2[5211],simde_mm_xor_si128(c2[6529],simde_mm_xor_si128(c2[802],simde_mm_xor_si128(c2[5252],simde_mm_xor_si128(c2[2613],c2[4371]))))))))))))))))))))))))))));
+
+//row: 20
+     d2[440]=simde_mm_xor_si128(c2[3980],simde_mm_xor_si128(c2[3540],simde_mm_xor_si128(c2[5731],simde_mm_xor_si128(c2[6176],simde_mm_xor_si128(c2[5343],simde_mm_xor_si128(c2[4903],simde_mm_xor_si128(c2[1825],simde_mm_xor_si128(c2[498],simde_mm_xor_si128(c2[52],simde_mm_xor_si128(c2[4939],simde_mm_xor_si128(c2[4499],simde_mm_xor_si128(c2[4509],simde_mm_xor_si128(c2[143],simde_mm_xor_si128(c2[6742],simde_mm_xor_si128(c2[5866],simde_mm_xor_si128(c2[3229],simde_mm_xor_si128(c2[5030],simde_mm_xor_si128(c2[6351],simde_mm_xor_si128(c2[4597],simde_mm_xor_si128(c2[1939],simde_mm_xor_si128(c2[5950],simde_mm_xor_si128(c2[5078],simde_mm_xor_si128(c2[2477],simde_mm_xor_si128(c2[2037],simde_mm_xor_si128(c2[1598],simde_mm_xor_si128(c2[5995],simde_mm_xor_si128(c2[761],simde_mm_xor_si128(c2[2524],simde_mm_xor_si128(c2[3453],simde_mm_xor_si128(c2[4771],simde_mm_xor_si128(c2[6083],simde_mm_xor_si128(c2[3934],simde_mm_xor_si128(c2[3494],simde_mm_xor_si128(c2[855],c2[2613]))))))))))))))))))))))))))))))))));
+
+//row: 21
+     d2[462]=simde_mm_xor_si128(c2[1332],simde_mm_xor_si128(c2[3523],simde_mm_xor_si128(c2[3968],simde_mm_xor_si128(c2[5289],simde_mm_xor_si128(c2[2695],simde_mm_xor_si128(c2[6656],simde_mm_xor_si128(c2[5769],simde_mm_xor_si128(c2[5329],simde_mm_xor_si128(c2[2291],simde_mm_xor_si128(c2[2741],simde_mm_xor_si128(c2[2301],simde_mm_xor_si128(c2[4534],simde_mm_xor_si128(c2[3658],simde_mm_xor_si128(c2[1021],simde_mm_xor_si128(c2[2822],simde_mm_xor_si128(c2[4143],simde_mm_xor_si128(c2[2829],simde_mm_xor_si128(c2[2389],simde_mm_xor_si128(c2[3742],simde_mm_xor_si128(c2[3310],simde_mm_xor_si128(c2[2870],simde_mm_xor_si128(c2[6868],simde_mm_xor_si128(c2[6429],simde_mm_xor_si128(c2[4227],simde_mm_xor_si128(c2[3787],simde_mm_xor_si128(c2[5592],simde_mm_xor_si128(c2[756],simde_mm_xor_si128(c2[316],simde_mm_xor_si128(c2[1245],simde_mm_xor_si128(c2[2563],simde_mm_xor_si128(c2[4315],simde_mm_xor_si128(c2[3875],simde_mm_xor_si128(c2[3002],simde_mm_xor_si128(c2[1286],simde_mm_xor_si128(c2[5686],simde_mm_xor_si128(c2[845],c2[405]))))))))))))))))))))))))))))))))))));
+
+//row: 22
+     d2[484]=simde_mm_xor_si128(c2[925],c2[1418]);
+
+//row: 23
+     d2[506]=simde_mm_xor_si128(c2[6166],simde_mm_xor_si128(c2[1900],c2[2869]));
+
+//row: 24
+     d2[528]=simde_mm_xor_si128(c2[491],simde_mm_xor_si128(c2[5374],c2[6119]));
+
+//row: 25
+     d2[550]=simde_mm_xor_si128(c2[3524],c2[3748]);
+
+//row: 26
+     d2[572]=simde_mm_xor_si128(c2[453],simde_mm_xor_si128(c2[13],simde_mm_xor_si128(c2[5298],simde_mm_xor_si128(c2[2644],simde_mm_xor_si128(c2[2204],simde_mm_xor_si128(c2[450],simde_mm_xor_si128(c2[2649],simde_mm_xor_si128(c2[895],simde_mm_xor_si128(c2[1816],simde_mm_xor_si128(c2[1376],simde_mm_xor_si128(c2[6661],simde_mm_xor_si128(c2[5337],simde_mm_xor_si128(c2[3583],simde_mm_xor_si128(c2[4010],simde_mm_xor_si128(c2[2696],simde_mm_xor_si128(c2[2256],simde_mm_xor_si128(c2[1412],simde_mm_xor_si128(c2[972],simde_mm_xor_si128(c2[6257],simde_mm_xor_si128(c2[982],simde_mm_xor_si128(c2[6707],simde_mm_xor_si128(c2[6267],simde_mm_xor_si128(c2[1850],simde_mm_xor_si128(c2[3655],simde_mm_xor_si128(c2[3215],simde_mm_xor_si128(c2[1461],simde_mm_xor_si128(c2[2779],simde_mm_xor_si128(c2[2339],simde_mm_xor_si128(c2[585],simde_mm_xor_si128(c2[6741],simde_mm_xor_si128(c2[4987],simde_mm_xor_si128(c2[1943],simde_mm_xor_si128(c2[1503],simde_mm_xor_si128(c2[6788],simde_mm_xor_si128(c2[2824],simde_mm_xor_si128(c2[1070],simde_mm_xor_si128(c2[1070],simde_mm_xor_si128(c2[6795],simde_mm_xor_si128(c2[6355],simde_mm_xor_si128(c2[2863],simde_mm_xor_si128(c2[2423],simde_mm_xor_si128(c2[669],simde_mm_xor_si128(c2[1551],simde_mm_xor_si128(c2[237],simde_mm_xor_si128(c2[6836],simde_mm_xor_si128(c2[5989],simde_mm_xor_si128(c2[5549],simde_mm_xor_si128(c2[3795],simde_mm_xor_si128(c2[5550],simde_mm_xor_si128(c2[5110],simde_mm_xor_si128(c2[3356],simde_mm_xor_si128(c2[2468],simde_mm_xor_si128(c2[1154],simde_mm_xor_si128(c2[714],simde_mm_xor_si128(c2[4713],simde_mm_xor_si128(c2[4273],simde_mm_xor_si128(c2[2519],simde_mm_xor_si128(c2[6036],simde_mm_xor_si128(c2[4722],simde_mm_xor_si128(c2[4282],simde_mm_xor_si128(c2[2950],simde_mm_xor_si128(c2[366],simde_mm_xor_si128(c2[6965],simde_mm_xor_si128(c2[5211],simde_mm_xor_si128(c2[1244],simde_mm_xor_si128(c2[6529],simde_mm_xor_si128(c2[2556],simde_mm_xor_si128(c2[1242],simde_mm_xor_si128(c2[802],simde_mm_xor_si128(c2[407],simde_mm_xor_si128(c2[7006],simde_mm_xor_si128(c2[5252],simde_mm_xor_si128(c2[4807],simde_mm_xor_si128(c2[4367],simde_mm_xor_si128(c2[2613],simde_mm_xor_si128(c2[6125],simde_mm_xor_si128(c2[4811],c2[4371])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 27
+     d2[594]=simde_mm_xor_si128(c2[4402],c2[3790]);
+
+//row: 28
+     d2[616]=simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[3609],c2[2429]));
+
+//row: 29
+     d2[638]=simde_mm_xor_si128(c2[4848],c2[626]);
+
+//row: 30
+     d2[660]=simde_mm_xor_si128(c2[1853],simde_mm_xor_si128(c2[5950],simde_mm_xor_si128(c2[6033],c2[6117])));
+
+//row: 31
+     d2[682]=simde_mm_xor_si128(c2[6618],simde_mm_xor_si128(c2[1770],simde_mm_xor_si128(c2[2215],simde_mm_xor_si128(c2[942],simde_mm_xor_si128(c2[4903],simde_mm_xor_si128(c2[4016],simde_mm_xor_si128(c2[3576],simde_mm_xor_si128(c2[2251],simde_mm_xor_si128(c2[538],simde_mm_xor_si128(c2[988],simde_mm_xor_si128(c2[548],simde_mm_xor_si128(c2[2781],simde_mm_xor_si128(c2[1905],simde_mm_xor_si128(c2[6307],simde_mm_xor_si128(c2[1069],simde_mm_xor_si128(c2[2390],simde_mm_xor_si128(c2[1076],simde_mm_xor_si128(c2[636],simde_mm_xor_si128(c2[1989],simde_mm_xor_si128(c2[1557],simde_mm_xor_si128(c2[1117],simde_mm_xor_si128(c2[5115],simde_mm_xor_si128(c2[4676],simde_mm_xor_si128(c2[2474],simde_mm_xor_si128(c2[2034],simde_mm_xor_si128(c2[3839],simde_mm_xor_si128(c2[6042],simde_mm_xor_si128(c2[5602],simde_mm_xor_si128(c2[6531],simde_mm_xor_si128(c2[810],simde_mm_xor_si128(c2[2562],simde_mm_xor_si128(c2[2122],simde_mm_xor_si128(c2[6572],simde_mm_xor_si128(c2[3933],simde_mm_xor_si128(c2[6131],c2[5691])))))))))))))))))))))))))))))))))));
+
+//row: 32
+     d2[704]=simde_mm_xor_si128(c2[4418],simde_mm_xor_si128(c2[3978],simde_mm_xor_si128(c2[6609],simde_mm_xor_si128(c2[6169],simde_mm_xor_si128(c2[6614],simde_mm_xor_si128(c2[1767],simde_mm_xor_si128(c2[5781],simde_mm_xor_si128(c2[5341],simde_mm_xor_si128(c2[2263],simde_mm_xor_si128(c2[936],simde_mm_xor_si128(c2[5377],simde_mm_xor_si128(c2[4937],simde_mm_xor_si128(c2[4947],simde_mm_xor_si128(c2[581],simde_mm_xor_si128(c2[141],simde_mm_xor_si128(c2[6744],simde_mm_xor_si128(c2[6304],simde_mm_xor_si128(c2[3667],simde_mm_xor_si128(c2[5908],simde_mm_xor_si128(c2[5468],simde_mm_xor_si128(c2[6789],simde_mm_xor_si128(c2[5035],simde_mm_xor_si128(c2[6828],simde_mm_xor_si128(c2[6388],simde_mm_xor_si128(c2[5516],simde_mm_xor_si128(c2[4184],simde_mm_xor_si128(c2[2915],simde_mm_xor_si128(c2[2475],simde_mm_xor_si128(c2[2476],simde_mm_xor_si128(c2[2036],simde_mm_xor_si128(c2[6433],simde_mm_xor_si128(c2[1639],simde_mm_xor_si128(c2[1199],simde_mm_xor_si128(c2[2962],simde_mm_xor_si128(c2[4331],simde_mm_xor_si128(c2[3891],simde_mm_xor_si128(c2[5209],simde_mm_xor_si128(c2[6521],simde_mm_xor_si128(c2[4372],simde_mm_xor_si128(c2[3932],simde_mm_xor_si128(c2[1733],simde_mm_xor_si128(c2[1293],c2[3051]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 33
+     d2[726]=simde_mm_xor_si128(c2[3531],simde_mm_xor_si128(c2[5722],simde_mm_xor_si128(c2[6167],simde_mm_xor_si128(c2[4894],simde_mm_xor_si128(c2[1816],simde_mm_xor_si128(c2[489],simde_mm_xor_si128(c2[4490],simde_mm_xor_si128(c2[4500],simde_mm_xor_si128(c2[4054],simde_mm_xor_si128(c2[6733],simde_mm_xor_si128(c2[5857],simde_mm_xor_si128(c2[3220],simde_mm_xor_si128(c2[5021],simde_mm_xor_si128(c2[6342],simde_mm_xor_si128(c2[4588],simde_mm_xor_si128(c2[5941],simde_mm_xor_si128(c2[5069],simde_mm_xor_si128(c2[2028],simde_mm_xor_si128(c2[1589],simde_mm_xor_si128(c2[5986],simde_mm_xor_si128(c2[752],simde_mm_xor_si128(c2[2515],simde_mm_xor_si128(c2[4276],simde_mm_xor_si128(c2[3444],simde_mm_xor_si128(c2[4762],simde_mm_xor_si128(c2[6074],simde_mm_xor_si128(c2[3485],simde_mm_xor_si128(c2[846],c2[2604]))))))))))))))))))))))))))));
+
+//row: 34
+     d2[748]=simde_mm_xor_si128(c2[2212],simde_mm_xor_si128(c2[1772],simde_mm_xor_si128(c2[5739],simde_mm_xor_si128(c2[4403],simde_mm_xor_si128(c2[3963],simde_mm_xor_si128(c2[891],simde_mm_xor_si128(c2[4408],simde_mm_xor_si128(c2[1336],simde_mm_xor_si128(c2[4840],simde_mm_xor_si128(c2[3575],simde_mm_xor_si128(c2[3135],simde_mm_xor_si128(c2[63],simde_mm_xor_si128(c2[57],simde_mm_xor_si128(c2[4024],simde_mm_xor_si128(c2[5769],simde_mm_xor_si128(c2[3137],simde_mm_xor_si128(c2[2697],simde_mm_xor_si128(c2[3171],simde_mm_xor_si128(c2[2731],simde_mm_xor_si128(c2[6698],simde_mm_xor_si128(c2[2741],simde_mm_xor_si128(c2[109],simde_mm_xor_si128(c2[6708],simde_mm_xor_si128(c2[5414],simde_mm_xor_si128(c2[4974],simde_mm_xor_si128(c2[1902],simde_mm_xor_si128(c2[4538],simde_mm_xor_si128(c2[4098],simde_mm_xor_si128(c2[1026],simde_mm_xor_si128(c2[1461],simde_mm_xor_si128(c2[5428],simde_mm_xor_si128(c2[3702],simde_mm_xor_si128(c2[3262],simde_mm_xor_si128(c2[190],simde_mm_xor_si128(c2[4583],simde_mm_xor_si128(c2[1511],simde_mm_xor_si128(c2[2829],simde_mm_xor_si128(c2[197],simde_mm_xor_si128(c2[6796],simde_mm_xor_si128(c2[4622],simde_mm_xor_si128(c2[4182],simde_mm_xor_si128(c2[1110],simde_mm_xor_si128(c2[3310],simde_mm_xor_si128(c2[678],simde_mm_xor_si128(c2[238],simde_mm_xor_si128(c2[709],simde_mm_xor_si128(c2[269],simde_mm_xor_si128(c2[4236],simde_mm_xor_si128(c2[270],simde_mm_xor_si128(c2[6869],simde_mm_xor_si128(c2[3797],simde_mm_xor_si128(c2[4227],simde_mm_xor_si128(c2[1595],simde_mm_xor_si128(c2[1155],simde_mm_xor_si128(c2[6472],simde_mm_xor_si128(c2[6032],simde_mm_xor_si128(c2[2960],simde_mm_xor_si128(c2[756],simde_mm_xor_si128(c2[5163],simde_mm_xor_si128(c2[4723],simde_mm_xor_si128(c2[2125],simde_mm_xor_si128(c2[1685],simde_mm_xor_si128(c2[5652],simde_mm_xor_si128(c2[3003],simde_mm_xor_si128(c2[6970],simde_mm_xor_si128(c2[4315],simde_mm_xor_si128(c2[1683],simde_mm_xor_si128(c2[1243],simde_mm_xor_si128(c2[2166],simde_mm_xor_si128(c2[1726],simde_mm_xor_si128(c2[5693],simde_mm_xor_si128(c2[6566],simde_mm_xor_si128(c2[6126],simde_mm_xor_si128(c2[3054],simde_mm_xor_si128(c2[845],simde_mm_xor_si128(c2[5252],c2[4812]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 35
+     d2[770]=simde_mm_xor_si128(c2[3540],simde_mm_xor_si128(c2[3100],simde_mm_xor_si128(c2[5291],simde_mm_xor_si128(c2[5736],simde_mm_xor_si128(c2[4903],simde_mm_xor_si128(c2[4463],simde_mm_xor_si128(c2[1385],simde_mm_xor_si128(c2[58],simde_mm_xor_si128(c2[6211],simde_mm_xor_si128(c2[4499],simde_mm_xor_si128(c2[4059],simde_mm_xor_si128(c2[4069],simde_mm_xor_si128(c2[6742],simde_mm_xor_si128(c2[6302],simde_mm_xor_si128(c2[5426],simde_mm_xor_si128(c2[2789],simde_mm_xor_si128(c2[4590],simde_mm_xor_si128(c2[5911],simde_mm_xor_si128(c2[4157],simde_mm_xor_si128(c2[5510],simde_mm_xor_si128(c2[4638],simde_mm_xor_si128(c2[3749],simde_mm_xor_si128(c2[2037],simde_mm_xor_si128(c2[1597],simde_mm_xor_si128(c2[1158],simde_mm_xor_si128(c2[5555],simde_mm_xor_si128(c2[321],simde_mm_xor_si128(c2[2084],simde_mm_xor_si128(c2[3013],simde_mm_xor_si128(c2[4331],simde_mm_xor_si128(c2[5643],simde_mm_xor_si128(c2[3494],simde_mm_xor_si128(c2[3054],simde_mm_xor_si128(c2[415],c2[2173]))))))))))))))))))))))))))))))))));
+
+//row: 36
+     d2[792]=simde_mm_xor_si128(c2[3964],simde_mm_xor_si128(c2[4497],c2[756]));
+
+//row: 37
+     d2[814]=simde_mm_xor_si128(c2[900],simde_mm_xor_si128(c2[452],simde_mm_xor_si128(c2[3091],simde_mm_xor_si128(c2[2643],simde_mm_xor_si128(c2[3536],simde_mm_xor_si128(c2[3088],simde_mm_xor_si128(c2[2263],simde_mm_xor_si128(c2[1815],simde_mm_xor_si128(c2[6224],simde_mm_xor_si128(c2[5776],simde_mm_xor_si128(c2[4897],simde_mm_xor_si128(c2[4889],simde_mm_xor_si128(c2[4449],simde_mm_xor_si128(c2[1859],simde_mm_xor_si128(c2[1411],simde_mm_xor_si128(c2[1869],simde_mm_xor_si128(c2[1861],simde_mm_xor_si128(c2[1421],simde_mm_xor_si128(c2[4102],simde_mm_xor_si128(c2[3654],simde_mm_xor_si128(c2[3226],simde_mm_xor_si128(c2[2778],simde_mm_xor_si128(c2[589],simde_mm_xor_si128(c2[141],simde_mm_xor_si128(c2[2390],simde_mm_xor_si128(c2[1942],simde_mm_xor_si128(c2[3711],simde_mm_xor_si128(c2[3263],simde_mm_xor_si128(c2[1957],simde_mm_xor_si128(c2[1949],simde_mm_xor_si128(c2[1509],simde_mm_xor_si128(c2[3310],simde_mm_xor_si128(c2[2862],simde_mm_xor_si128(c2[2438],simde_mm_xor_si128(c2[2430],simde_mm_xor_si128(c2[1990],simde_mm_xor_si128(c2[6436],simde_mm_xor_si128(c2[5988],simde_mm_xor_si128(c2[5997],simde_mm_xor_si128(c2[5549],simde_mm_xor_si128(c2[3355],simde_mm_xor_si128(c2[3347],simde_mm_xor_si128(c2[2907],simde_mm_xor_si128(c2[5160],simde_mm_xor_si128(c2[4712],simde_mm_xor_si128(c2[6923],simde_mm_xor_si128(c2[6915],simde_mm_xor_si128(c2[6475],simde_mm_xor_si128(c2[813],simde_mm_xor_si128(c2[365],simde_mm_xor_si128(c2[2131],simde_mm_xor_si128(c2[1683],simde_mm_xor_si128(c2[3443],simde_mm_xor_si128(c2[3435],simde_mm_xor_si128(c2[2995],simde_mm_xor_si128(c2[854],simde_mm_xor_si128(c2[406],simde_mm_xor_si128(c2[5254],simde_mm_xor_si128(c2[4806],simde_mm_xor_si128(c2[7012],simde_mm_xor_si128(c2[7004],c2[6564])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 38
+     d2[836]=simde_mm_xor_si128(c2[6610],simde_mm_xor_si128(c2[6170],simde_mm_xor_si128(c2[1322],simde_mm_xor_si128(c2[1767],simde_mm_xor_si128(c2[934],simde_mm_xor_si128(c2[494],simde_mm_xor_si128(c2[4455],simde_mm_xor_si128(c2[3128],simde_mm_xor_si128(c2[51],simde_mm_xor_si128(c2[530],simde_mm_xor_si128(c2[90],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[2773],simde_mm_xor_si128(c2[2333],simde_mm_xor_si128(c2[1457],simde_mm_xor_si128(c2[5859],simde_mm_xor_si128(c2[621],simde_mm_xor_si128(c2[1942],simde_mm_xor_si128(c2[188],simde_mm_xor_si128(c2[1541],simde_mm_xor_si128(c2[669],simde_mm_xor_si128(c2[3300],simde_mm_xor_si128(c2[5107],simde_mm_xor_si128(c2[4667],simde_mm_xor_si128(c2[4228],simde_mm_xor_si128(c2[1586],simde_mm_xor_si128(c2[3391],simde_mm_xor_si128(c2[5154],simde_mm_xor_si128(c2[6083],simde_mm_xor_si128(c2[362],simde_mm_xor_si128(c2[1674],simde_mm_xor_si128(c2[6564],simde_mm_xor_si128(c2[6124],simde_mm_xor_si128(c2[3485],c2[5243]))))))))))))))))))))))))))))))))));
+
+//row: 39
+     d2[858]=simde_mm_xor_si128(c2[2216],simde_mm_xor_si128(c2[1776],simde_mm_xor_si128(c2[4407],simde_mm_xor_si128(c2[3967],simde_mm_xor_si128(c2[4412],simde_mm_xor_si128(c2[5726],simde_mm_xor_si128(c2[3579],simde_mm_xor_si128(c2[3139],simde_mm_xor_si128(c2[61],simde_mm_xor_si128(c2[5773],simde_mm_xor_si128(c2[3175],simde_mm_xor_si128(c2[2735],simde_mm_xor_si128(c2[2745],simde_mm_xor_si128(c2[5418],simde_mm_xor_si128(c2[4978],simde_mm_xor_si128(c2[4542],simde_mm_xor_si128(c2[4102],simde_mm_xor_si128(c2[1465],simde_mm_xor_si128(c2[3706],simde_mm_xor_si128(c2[3266],simde_mm_xor_si128(c2[4587],simde_mm_xor_si128(c2[2833],simde_mm_xor_si128(c2[4626],simde_mm_xor_si128(c2[4186],simde_mm_xor_si128(c2[3314],simde_mm_xor_si128(c2[713],simde_mm_xor_si128(c2[273],simde_mm_xor_si128(c2[274],simde_mm_xor_si128(c2[6873],simde_mm_xor_si128(c2[4231],simde_mm_xor_si128(c2[6476],simde_mm_xor_si128(c2[6036],simde_mm_xor_si128(c2[760],simde_mm_xor_si128(c2[2948],simde_mm_xor_si128(c2[2129],simde_mm_xor_si128(c2[1689],simde_mm_xor_si128(c2[3007],simde_mm_xor_si128(c2[4319],simde_mm_xor_si128(c2[2170],simde_mm_xor_si128(c2[1730],simde_mm_xor_si128(c2[6570],simde_mm_xor_si128(c2[6130],c2[849]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 40
+     d2[880]=simde_mm_xor_si128(c2[3099],simde_mm_xor_si128(c2[5734],simde_mm_xor_si128(c2[5290],simde_mm_xor_si128(c2[886],simde_mm_xor_si128(c2[5735],simde_mm_xor_si128(c2[1331],simde_mm_xor_si128(c2[4462],simde_mm_xor_si128(c2[58],simde_mm_xor_si128(c2[1384],simde_mm_xor_si128(c2[4019],simde_mm_xor_si128(c2[57],simde_mm_xor_si128(c2[3132],simde_mm_xor_si128(c2[2692],simde_mm_xor_si128(c2[4058],simde_mm_xor_si128(c2[6693],simde_mm_xor_si128(c2[4068],simde_mm_xor_si128(c2[104],simde_mm_xor_si128(c2[6703],simde_mm_xor_si128(c2[98],simde_mm_xor_si128(c2[6301],simde_mm_xor_si128(c2[1897],simde_mm_xor_si128(c2[5425],simde_mm_xor_si128(c2[1021],simde_mm_xor_si128(c2[2788],simde_mm_xor_si128(c2[5423],simde_mm_xor_si128(c2[4589],simde_mm_xor_si128(c2[185],simde_mm_xor_si128(c2[5910],simde_mm_xor_si128(c2[1506],simde_mm_xor_si128(c2[4156],simde_mm_xor_si128(c2[192],simde_mm_xor_si128(c2[6791],simde_mm_xor_si128(c2[5509],simde_mm_xor_si128(c2[1105],simde_mm_xor_si128(c2[4637],simde_mm_xor_si128(c2[673],simde_mm_xor_si128(c2[233],simde_mm_xor_si128(c2[1596],simde_mm_xor_si128(c2[4231],simde_mm_xor_si128(c2[1157],simde_mm_xor_si128(c2[3792],simde_mm_xor_si128(c2[5554],simde_mm_xor_si128(c2[1590],simde_mm_xor_si128(c2[1150],simde_mm_xor_si128(c2[320],simde_mm_xor_si128(c2[2955],simde_mm_xor_si128(c2[2083],simde_mm_xor_si128(c2[5158],simde_mm_xor_si128(c2[4718],simde_mm_xor_si128(c2[3012],simde_mm_xor_si128(c2[5647],simde_mm_xor_si128(c2[4330],simde_mm_xor_si128(c2[6965],simde_mm_xor_si128(c2[5642],simde_mm_xor_si128(c2[1678],simde_mm_xor_si128(c2[1238],simde_mm_xor_si128(c2[3053],simde_mm_xor_si128(c2[5688],simde_mm_xor_si128(c2[414],simde_mm_xor_si128(c2[3049],simde_mm_xor_si128(c2[2172],simde_mm_xor_si128(c2[5247],c2[4807]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 41
+     d2[902]=simde_mm_xor_si128(c2[1770],simde_mm_xor_si128(c2[1330],simde_mm_xor_si128(c2[3521],simde_mm_xor_si128(c2[3966],simde_mm_xor_si128(c2[3133],simde_mm_xor_si128(c2[2693],simde_mm_xor_si128(c2[6654],simde_mm_xor_si128(c2[5327],simde_mm_xor_si128(c2[1812],simde_mm_xor_si128(c2[2729],simde_mm_xor_si128(c2[2289],simde_mm_xor_si128(c2[2299],simde_mm_xor_si128(c2[4972],simde_mm_xor_si128(c2[4532],simde_mm_xor_si128(c2[3656],simde_mm_xor_si128(c2[1019],simde_mm_xor_si128(c2[2820],simde_mm_xor_si128(c2[4141],simde_mm_xor_si128(c2[2387],simde_mm_xor_si128(c2[3740],simde_mm_xor_si128(c2[2868],simde_mm_xor_si128(c2[2860],simde_mm_xor_si128(c2[267],simde_mm_xor_si128(c2[6866],simde_mm_xor_si128(c2[6427],simde_mm_xor_si128(c2[3785],simde_mm_xor_si128(c2[5590],simde_mm_xor_si128(c2[314],simde_mm_xor_si128(c2[1243],simde_mm_xor_si128(c2[2561],simde_mm_xor_si128(c2[3873],simde_mm_xor_si128(c2[1724],simde_mm_xor_si128(c2[1284],simde_mm_xor_si128(c2[5684],c2[403]))))))))))))))))))))))))))))))))));
+  }
+}
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc384_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc384_byte.c
index ea87348f28effc7002a7fb15a07926258677c802..204289a0a8d5d0006fe26092989693c793d6933a 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc384_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc384_byte.c
@@ -1,9 +1,10 @@
+#ifdef __AVX2__
 #include "PHY/sse_intrin.h"
 // generated code for Zc=384, byte encoding
 static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG2_Zc384_byte(uint8_t *c,uint8_t *d) {
-  __m256i *csimd=(__m256i *)c,*dsimd=(__m256i *)d;
+  simde__m256i *csimd=(simde__m256i *)c,*dsimd=(simde__m256i *)d;
 
-  __m256i *c2,*d2;
+  simde__m256i *c2,*d2;
 
   int i2;
   for (i2=0; i2<12; i2++) {
@@ -137,3 +138,4 @@ static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG
      d2[492]=simde_mm256_xor_si256(c2[2407],simde_mm256_xor_si256(c2[2167],simde_mm256_xor_si256(c2[5282],simde_mm256_xor_si256(c2[4802],simde_mm256_xor_si256(c2[6988],simde_mm256_xor_si256(c2[6748],simde_mm256_xor_si256(c2[3149],simde_mm256_xor_si256(c2[750],simde_mm256_xor_si256(c2[4588],simde_mm256_xor_si256(c2[535],simde_mm256_xor_si256(c2[295],simde_mm256_xor_si256(c2[2455],simde_mm256_xor_si256(c2[7275],simde_mm256_xor_si256(c2[7035],simde_mm256_xor_si256(c2[7514],simde_mm256_xor_si256(c2[3917],simde_mm256_xor_si256(c2[2739],simde_mm256_xor_si256(c2[2261],simde_mm256_xor_si256(c2[2981],simde_mm256_xor_si256(c2[5644],simde_mm256_xor_si256(c2[844],simde_mm256_xor_si256(c2[1800],simde_mm256_xor_si256(c2[868],simde_mm256_xor_si256(c2[628],simde_mm256_xor_si256(c2[6386],simde_mm256_xor_si256(c2[5429],simde_mm256_xor_si256(c2[5455],simde_mm256_xor_si256(c2[4253],simde_mm256_xor_si256(c2[4999],simde_mm256_xor_si256(c2[3317],simde_mm256_xor_si256(c2[5714],simde_mm256_xor_si256(c2[2143],simde_mm256_xor_si256(c2[1903],simde_mm256_xor_si256(c2[7417],c2[5263]))))))))))))))))))))))))))))))))));
   }
 }
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc384_byte_128.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc384_byte_128.c
new file mode 100644
index 0000000000000000000000000000000000000000..906ddd812d34ca04ae3818d81361cdcfcebb5142
--- /dev/null
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc384_byte_128.c
@@ -0,0 +1,141 @@
+#ifndef __AVX2__
+#include "PHY/sse_intrin.h"
+// generated code for Zc=384, byte encoding
+static inline void ldpc_BG2_Zc384_byte(uint8_t *c,uint8_t *d) {
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
+
+  simde__m128i *c2,*d2;
+
+  int i2;
+  for (i2=0; i2<24; i2++) {
+     c2=&csimd[i2];
+     d2=&dsimd[i2];
+
+//row: 0
+     d2[0]=simde_mm_xor_si128(c2[6250],simde_mm_xor_si128(c2[4801],simde_mm_xor_si128(c2[3841],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[535],simde_mm_xor_si128(c2[3416],simde_mm_xor_si128(c2[2506],simde_mm_xor_si128(c2[6826],simde_mm_xor_si128(c2[628],simde_mm_xor_si128(c2[1586],simde_mm_xor_si128(c2[2071],simde_mm_xor_si128(c2[7394],simde_mm_xor_si128(c2[6438],simde_mm_xor_si128(c2[199],simde_mm_xor_si128(c2[5525],simde_mm_xor_si128(c2[3604],simde_mm_xor_si128(c2[3172],simde_mm_xor_si128(c2[7009],simde_mm_xor_si128(c2[5095],simde_mm_xor_si128(c2[5147],simde_mm_xor_si128(c2[2743],simde_mm_xor_si128(c2[4235],simde_mm_xor_si128(c2[871],simde_mm_xor_si128(c2[5665],simde_mm_xor_si128(c2[5722],simde_mm_xor_si128(c2[1392],c2[4763]))))))))))))))))))))))))));
+
+//row: 1
+     d2[24]=simde_mm_xor_si128(c2[6730],simde_mm_xor_si128(c2[6250],simde_mm_xor_si128(c2[4801],simde_mm_xor_si128(c2[3841],simde_mm_xor_si128(c2[534],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[535],simde_mm_xor_si128(c2[3416],simde_mm_xor_si128(c2[2986],simde_mm_xor_si128(c2[2506],simde_mm_xor_si128(c2[6826],simde_mm_xor_si128(c2[1108],simde_mm_xor_si128(c2[628],simde_mm_xor_si128(c2[1586],simde_mm_xor_si128(c2[2071],simde_mm_xor_si128(c2[7394],simde_mm_xor_si128(c2[6438],simde_mm_xor_si128(c2[199],simde_mm_xor_si128(c2[5525],simde_mm_xor_si128(c2[3604],simde_mm_xor_si128(c2[3652],simde_mm_xor_si128(c2[3172],simde_mm_xor_si128(c2[7009],simde_mm_xor_si128(c2[5095],simde_mm_xor_si128(c2[5147],simde_mm_xor_si128(c2[2743],simde_mm_xor_si128(c2[4235],simde_mm_xor_si128(c2[871],simde_mm_xor_si128(c2[5665],simde_mm_xor_si128(c2[6202],simde_mm_xor_si128(c2[5722],simde_mm_xor_si128(c2[1392],c2[4763]))))))))))))))))))))))))))))))));
+
+//row: 2
+     d2[48]=simde_mm_xor_si128(c2[6730],simde_mm_xor_si128(c2[6250],simde_mm_xor_si128(c2[5281],simde_mm_xor_si128(c2[4801],simde_mm_xor_si128(c2[3841],simde_mm_xor_si128(c2[534],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[535],simde_mm_xor_si128(c2[3416],simde_mm_xor_si128(c2[2986],simde_mm_xor_si128(c2[2506],simde_mm_xor_si128(c2[6826],simde_mm_xor_si128(c2[1108],simde_mm_xor_si128(c2[628],simde_mm_xor_si128(c2[2066],simde_mm_xor_si128(c2[1586],simde_mm_xor_si128(c2[2071],simde_mm_xor_si128(c2[195],simde_mm_xor_si128(c2[7394],simde_mm_xor_si128(c2[6438],simde_mm_xor_si128(c2[199],simde_mm_xor_si128(c2[6005],simde_mm_xor_si128(c2[5525],simde_mm_xor_si128(c2[3604],simde_mm_xor_si128(c2[3652],simde_mm_xor_si128(c2[3172],simde_mm_xor_si128(c2[7489],simde_mm_xor_si128(c2[7009],simde_mm_xor_si128(c2[5095],simde_mm_xor_si128(c2[5627],simde_mm_xor_si128(c2[5147],simde_mm_xor_si128(c2[2743],simde_mm_xor_si128(c2[4715],simde_mm_xor_si128(c2[4235],simde_mm_xor_si128(c2[871],simde_mm_xor_si128(c2[5665],simde_mm_xor_si128(c2[6202],simde_mm_xor_si128(c2[5722],simde_mm_xor_si128(c2[1872],simde_mm_xor_si128(c2[1392],c2[4763]))))))))))))))))))))))))))))))))))))))));
+
+//row: 3
+     d2[72]=simde_mm_xor_si128(c2[6250],simde_mm_xor_si128(c2[4801],simde_mm_xor_si128(c2[3841],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[535],simde_mm_xor_si128(c2[3896],simde_mm_xor_si128(c2[3416],simde_mm_xor_si128(c2[2506],simde_mm_xor_si128(c2[7306],simde_mm_xor_si128(c2[6826],simde_mm_xor_si128(c2[628],simde_mm_xor_si128(c2[1586],simde_mm_xor_si128(c2[2071],simde_mm_xor_si128(c2[7394],simde_mm_xor_si128(c2[6438],simde_mm_xor_si128(c2[679],simde_mm_xor_si128(c2[199],simde_mm_xor_si128(c2[5525],simde_mm_xor_si128(c2[4084],simde_mm_xor_si128(c2[3604],simde_mm_xor_si128(c2[3172],simde_mm_xor_si128(c2[7009],simde_mm_xor_si128(c2[5575],simde_mm_xor_si128(c2[5095],simde_mm_xor_si128(c2[5147],simde_mm_xor_si128(c2[3223],simde_mm_xor_si128(c2[2743],simde_mm_xor_si128(c2[4235],simde_mm_xor_si128(c2[871],simde_mm_xor_si128(c2[6145],simde_mm_xor_si128(c2[5665],simde_mm_xor_si128(c2[5722],simde_mm_xor_si128(c2[1392],simde_mm_xor_si128(c2[5243],c2[4763]))))))))))))))))))))))))))))))))));
+
+//row: 4
+     d2[96]=simde_mm_xor_si128(c2[5292],simde_mm_xor_si128(c2[4812],simde_mm_xor_si128(c2[3363],simde_mm_xor_si128(c2[2403],simde_mm_xor_si128(c2[3844],simde_mm_xor_si128(c2[6775],simde_mm_xor_si128(c2[6295],simde_mm_xor_si128(c2[6776],simde_mm_xor_si128(c2[1978],simde_mm_xor_si128(c2[4852],simde_mm_xor_si128(c2[1548],simde_mm_xor_si128(c2[1068],simde_mm_xor_si128(c2[5388],simde_mm_xor_si128(c2[7349],simde_mm_xor_si128(c2[6869],simde_mm_xor_si128(c2[148],simde_mm_xor_si128(c2[633],simde_mm_xor_si128(c2[5956],simde_mm_xor_si128(c2[5000],simde_mm_xor_si128(c2[6440],simde_mm_xor_si128(c2[4087],simde_mm_xor_si128(c2[2166],simde_mm_xor_si128(c2[2214],simde_mm_xor_si128(c2[1734],simde_mm_xor_si128(c2[5571],simde_mm_xor_si128(c2[3657],simde_mm_xor_si128(c2[3709],simde_mm_xor_si128(c2[1305],simde_mm_xor_si128(c2[2797],simde_mm_xor_si128(c2[7112],simde_mm_xor_si128(c2[4227],simde_mm_xor_si128(c2[4764],simde_mm_xor_si128(c2[4284],simde_mm_xor_si128(c2[7633],c2[3325]))))))))))))))))))))))))))))))))));
+
+//row: 5
+     d2[120]=simde_mm_xor_si128(c2[6733],simde_mm_xor_si128(c2[6253],simde_mm_xor_si128(c2[4804],simde_mm_xor_si128(c2[3844],simde_mm_xor_si128(c2[4800],simde_mm_xor_si128(c2[537],simde_mm_xor_si128(c2[57],simde_mm_xor_si128(c2[538],simde_mm_xor_si128(c2[3419],simde_mm_xor_si128(c2[5810],simde_mm_xor_si128(c2[2989],simde_mm_xor_si128(c2[2509],simde_mm_xor_si128(c2[6829],simde_mm_xor_si128(c2[1111],simde_mm_xor_si128(c2[631],simde_mm_xor_si128(c2[1589],simde_mm_xor_si128(c2[2074],simde_mm_xor_si128(c2[7397],simde_mm_xor_si128(c2[6441],simde_mm_xor_si128(c2[202],simde_mm_xor_si128(c2[5528],simde_mm_xor_si128(c2[3607],simde_mm_xor_si128(c2[4567],simde_mm_xor_si128(c2[3655],simde_mm_xor_si128(c2[3175],simde_mm_xor_si128(c2[7012],simde_mm_xor_si128(c2[5098],simde_mm_xor_si128(c2[5150],simde_mm_xor_si128(c2[2746],simde_mm_xor_si128(c2[341],simde_mm_xor_si128(c2[4238],simde_mm_xor_si128(c2[874],simde_mm_xor_si128(c2[5668],simde_mm_xor_si128(c2[6205],simde_mm_xor_si128(c2[5725],simde_mm_xor_si128(c2[1395],c2[4766]))))))))))))))))))))))))))))))))))));
+
+//row: 6
+     d2[144]=simde_mm_xor_si128(c2[2902],simde_mm_xor_si128(c2[2422],simde_mm_xor_si128(c2[973],simde_mm_xor_si128(c2[13],simde_mm_xor_si128(c2[488],simde_mm_xor_si128(c2[4385],simde_mm_xor_si128(c2[3905],simde_mm_xor_si128(c2[4386],simde_mm_xor_si128(c2[7267],simde_mm_xor_si128(c2[6837],simde_mm_xor_si128(c2[6357],simde_mm_xor_si128(c2[2998],simde_mm_xor_si128(c2[4959],simde_mm_xor_si128(c2[4479],simde_mm_xor_si128(c2[5437],simde_mm_xor_si128(c2[5922],simde_mm_xor_si128(c2[3566],simde_mm_xor_si128(c2[2610],simde_mm_xor_si128(c2[4050],simde_mm_xor_si128(c2[1697],simde_mm_xor_si128(c2[7455],simde_mm_xor_si128(c2[6005],simde_mm_xor_si128(c2[7503],simde_mm_xor_si128(c2[7023],simde_mm_xor_si128(c2[3181],simde_mm_xor_si128(c2[1267],simde_mm_xor_si128(c2[1319],simde_mm_xor_si128(c2[6594],simde_mm_xor_si128(c2[2262],simde_mm_xor_si128(c2[407],simde_mm_xor_si128(c2[4722],simde_mm_xor_si128(c2[1837],simde_mm_xor_si128(c2[2374],simde_mm_xor_si128(c2[1894],simde_mm_xor_si128(c2[5243],simde_mm_xor_si128(c2[935],c2[915]))))))))))))))))))))))))))))))))))));
+
+//row: 7
+     d2[168]=simde_mm_xor_si128(c2[1937],simde_mm_xor_si128(c2[1457],simde_mm_xor_si128(c2[5779],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[4330],simde_mm_xor_si128(c2[6727],simde_mm_xor_si128(c2[3370],simde_mm_xor_si128(c2[3420],simde_mm_xor_si128(c2[2940],simde_mm_xor_si128(c2[7262],simde_mm_xor_si128(c2[3421],simde_mm_xor_si128(c2[64],simde_mm_xor_si128(c2[6302],simde_mm_xor_si128(c2[3425],simde_mm_xor_si128(c2[2945],simde_mm_xor_si128(c2[53],simde_mm_xor_si128(c2[5872],simde_mm_xor_si128(c2[5392],simde_mm_xor_si128(c2[2035],simde_mm_xor_si128(c2[2033],simde_mm_xor_si128(c2[6835],simde_mm_xor_si128(c2[6355],simde_mm_xor_si128(c2[3994],simde_mm_xor_si128(c2[3514],simde_mm_xor_si128(c2[157],simde_mm_xor_si128(c2[4472],simde_mm_xor_si128(c2[1115],simde_mm_xor_si128(c2[4957],simde_mm_xor_si128(c2[1600],simde_mm_xor_si128(c2[2601],simde_mm_xor_si128(c2[6923],simde_mm_xor_si128(c2[1645],simde_mm_xor_si128(c2[5967],simde_mm_xor_si128(c2[3085],simde_mm_xor_si128(c2[208],simde_mm_xor_si128(c2[7407],simde_mm_xor_si128(c2[732],simde_mm_xor_si128(c2[5054],simde_mm_xor_si128(c2[6490],simde_mm_xor_si128(c2[3613],simde_mm_xor_si128(c2[3133],simde_mm_xor_si128(c2[5051],simde_mm_xor_si128(c2[6538],simde_mm_xor_si128(c2[6058],simde_mm_xor_si128(c2[2701],simde_mm_xor_si128(c2[2216],simde_mm_xor_si128(c2[6538],simde_mm_xor_si128(c2[302],simde_mm_xor_si128(c2[5104],simde_mm_xor_si128(c2[4624],simde_mm_xor_si128(c2[354],simde_mm_xor_si128(c2[4676],simde_mm_xor_si128(c2[5629],simde_mm_xor_si128(c2[2752],simde_mm_xor_si128(c2[2272],simde_mm_xor_si128(c2[337],simde_mm_xor_si128(c2[7121],simde_mm_xor_si128(c2[3764],simde_mm_xor_si128(c2[3757],simde_mm_xor_si128(c2[400],simde_mm_xor_si128(c2[872],simde_mm_xor_si128(c2[5674],simde_mm_xor_si128(c2[5194],simde_mm_xor_si128(c2[1409],simde_mm_xor_si128(c2[929],simde_mm_xor_si128(c2[5251],simde_mm_xor_si128(c2[4278],simde_mm_xor_si128(c2[921],simde_mm_xor_si128(c2[7649],simde_mm_xor_si128(c2[4772],c2[4292]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 8
+     d2[192]=simde_mm_xor_si128(c2[2900],simde_mm_xor_si128(c2[2420],simde_mm_xor_si128(c2[1451],simde_mm_xor_si128(c2[971],simde_mm_xor_si128(c2[11],simde_mm_xor_si128(c2[2887],simde_mm_xor_si128(c2[4383],simde_mm_xor_si128(c2[3903],simde_mm_xor_si128(c2[4384],simde_mm_xor_si128(c2[7265],simde_mm_xor_si128(c2[2932],simde_mm_xor_si128(c2[6835],simde_mm_xor_si128(c2[6355],simde_mm_xor_si128(c2[2996],simde_mm_xor_si128(c2[4957],simde_mm_xor_si128(c2[4477],simde_mm_xor_si128(c2[5915],simde_mm_xor_si128(c2[5435],simde_mm_xor_si128(c2[5920],simde_mm_xor_si128(c2[4044],simde_mm_xor_si128(c2[3564],simde_mm_xor_si128(c2[2608],simde_mm_xor_si128(c2[4048],simde_mm_xor_si128(c2[2175],simde_mm_xor_si128(c2[1695],simde_mm_xor_si128(c2[7453],simde_mm_xor_si128(c2[7501],simde_mm_xor_si128(c2[7021],simde_mm_xor_si128(c2[3659],simde_mm_xor_si128(c2[3179],simde_mm_xor_si128(c2[1265],simde_mm_xor_si128(c2[1797],simde_mm_xor_si128(c2[1317],simde_mm_xor_si128(c2[6592],simde_mm_xor_si128(c2[885],simde_mm_xor_si128(c2[405],simde_mm_xor_si128(c2[4720],simde_mm_xor_si128(c2[1835],simde_mm_xor_si128(c2[2372],simde_mm_xor_si128(c2[1892],simde_mm_xor_si128(c2[5721],simde_mm_xor_si128(c2[5241],c2[933]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 9
+     d2[216]=simde_mm_xor_si128(c2[2902],simde_mm_xor_si128(c2[22],simde_mm_xor_si128(c2[7221],simde_mm_xor_si128(c2[1453],simde_mm_xor_si128(c2[5772],simde_mm_xor_si128(c2[493],simde_mm_xor_si128(c2[4812],simde_mm_xor_si128(c2[4385],simde_mm_xor_si128(c2[1505],simde_mm_xor_si128(c2[1025],simde_mm_xor_si128(c2[4866],simde_mm_xor_si128(c2[1506],simde_mm_xor_si128(c2[68],simde_mm_xor_si128(c2[4387],simde_mm_xor_si128(c2[5809],simde_mm_xor_si128(c2[6837],simde_mm_xor_si128(c2[3957],simde_mm_xor_si128(c2[3477],simde_mm_xor_si128(c2[3478],simde_mm_xor_si128(c2[118],simde_mm_xor_si128(c2[4959],simde_mm_xor_si128(c2[2079],simde_mm_xor_si128(c2[1599],simde_mm_xor_si128(c2[5917],simde_mm_xor_si128(c2[2557],simde_mm_xor_si128(c2[6402],simde_mm_xor_si128(c2[3042],simde_mm_xor_si128(c2[4046],simde_mm_xor_si128(c2[686],simde_mm_xor_si128(c2[3090],simde_mm_xor_si128(c2[7409],simde_mm_xor_si128(c2[4530],simde_mm_xor_si128(c2[1170],simde_mm_xor_si128(c2[2177],simde_mm_xor_si128(c2[6496],simde_mm_xor_si128(c2[256],simde_mm_xor_si128(c2[4575],simde_mm_xor_si128(c2[7503],simde_mm_xor_si128(c2[4623],simde_mm_xor_si128(c2[4143],simde_mm_xor_si128(c2[3661],simde_mm_xor_si128(c2[301],simde_mm_xor_si128(c2[1747],simde_mm_xor_si128(c2[6066],simde_mm_xor_si128(c2[1799],simde_mm_xor_si128(c2[6118],simde_mm_xor_si128(c2[7074],simde_mm_xor_si128(c2[3714],simde_mm_xor_si128(c2[887],simde_mm_xor_si128(c2[5206],simde_mm_xor_si128(c2[5202],simde_mm_xor_si128(c2[1842],simde_mm_xor_si128(c2[2317],simde_mm_xor_si128(c2[6636],simde_mm_xor_si128(c2[2312],simde_mm_xor_si128(c2[2374],simde_mm_xor_si128(c2[7173],simde_mm_xor_si128(c2[6693],simde_mm_xor_si128(c2[5723],simde_mm_xor_si128(c2[2363],simde_mm_xor_si128(c2[1415],c2[5734])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 10
+     d2[240]=simde_mm_xor_si128(c2[5283],simde_mm_xor_si128(c2[3894],simde_mm_xor_si128(c2[3169],c2[2259])));
+
+//row: 11
+     d2[264]=simde_mm_xor_si128(c2[3380],simde_mm_xor_si128(c2[1931],simde_mm_xor_si128(c2[971],simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[4863],simde_mm_xor_si128(c2[5344],simde_mm_xor_si128(c2[1026],simde_mm_xor_si128(c2[546],simde_mm_xor_si128(c2[7315],simde_mm_xor_si128(c2[4436],simde_mm_xor_si128(c2[3956],simde_mm_xor_si128(c2[5437],simde_mm_xor_si128(c2[6395],simde_mm_xor_si128(c2[6880],simde_mm_xor_si128(c2[4524],simde_mm_xor_si128(c2[3568],simde_mm_xor_si128(c2[5488],simde_mm_xor_si128(c2[5008],simde_mm_xor_si128(c2[2655],simde_mm_xor_si128(c2[1214],simde_mm_xor_si128(c2[734],simde_mm_xor_si128(c2[302],simde_mm_xor_si128(c2[4139],simde_mm_xor_si128(c2[2705],simde_mm_xor_si128(c2[2225],simde_mm_xor_si128(c2[2277],simde_mm_xor_si128(c2[353],simde_mm_xor_si128(c2[7552],simde_mm_xor_si128(c2[6101],simde_mm_xor_si128(c2[1365],simde_mm_xor_si128(c2[5680],simde_mm_xor_si128(c2[3275],simde_mm_xor_si128(c2[2795],simde_mm_xor_si128(c2[2852],simde_mm_xor_si128(c2[6201],simde_mm_xor_si128(c2[2373],simde_mm_xor_si128(c2[1893],c2[7162])))))))))))))))))))))))))))))))))))));
+
+//row: 12
+     d2[288]=simde_mm_xor_si128(c2[4331],simde_mm_xor_si128(c2[3851],simde_mm_xor_si128(c2[2402],simde_mm_xor_si128(c2[1442],simde_mm_xor_si128(c2[5814],simde_mm_xor_si128(c2[5334],simde_mm_xor_si128(c2[5815],simde_mm_xor_si128(c2[1017],simde_mm_xor_si128(c2[3410],simde_mm_xor_si128(c2[587],simde_mm_xor_si128(c2[107],simde_mm_xor_si128(c2[4427],simde_mm_xor_si128(c2[6388],simde_mm_xor_si128(c2[5908],simde_mm_xor_si128(c2[6866],simde_mm_xor_si128(c2[7351],simde_mm_xor_si128(c2[6389],simde_mm_xor_si128(c2[4995],simde_mm_xor_si128(c2[4039],simde_mm_xor_si128(c2[5479],simde_mm_xor_si128(c2[3126],simde_mm_xor_si128(c2[1205],simde_mm_xor_si128(c2[1253],simde_mm_xor_si128(c2[773],simde_mm_xor_si128(c2[4610],simde_mm_xor_si128(c2[2696],simde_mm_xor_si128(c2[2748],simde_mm_xor_si128(c2[344],simde_mm_xor_si128(c2[1836],simde_mm_xor_si128(c2[6151],simde_mm_xor_si128(c2[3266],simde_mm_xor_si128(c2[3803],simde_mm_xor_si128(c2[3323],simde_mm_xor_si128(c2[6672],c2[2364]))))))))))))))))))))))))))))))))));
+
+//row: 13
+     d2[312]=simde_mm_xor_si128(c2[1461],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[6731],simde_mm_xor_si128(c2[483],simde_mm_xor_si128(c2[2944],simde_mm_xor_si128(c2[3425],simde_mm_xor_si128(c2[6786],simde_mm_xor_si128(c2[6306],simde_mm_xor_si128(c2[6295],simde_mm_xor_si128(c2[5396],simde_mm_xor_si128(c2[2517],simde_mm_xor_si128(c2[2037],simde_mm_xor_si128(c2[3518],simde_mm_xor_si128(c2[4476],simde_mm_xor_si128(c2[4961],simde_mm_xor_si128(c2[2605],simde_mm_xor_si128(c2[1649],simde_mm_xor_si128(c2[3569],simde_mm_xor_si128(c2[3089],simde_mm_xor_si128(c2[736],simde_mm_xor_si128(c2[6974],simde_mm_xor_si128(c2[6494],simde_mm_xor_si128(c2[6062],simde_mm_xor_si128(c2[2220],simde_mm_xor_si128(c2[786],simde_mm_xor_si128(c2[306],simde_mm_xor_si128(c2[358],simde_mm_xor_si128(c2[6113],simde_mm_xor_si128(c2[5633],simde_mm_xor_si128(c2[7125],simde_mm_xor_si128(c2[3761],simde_mm_xor_si128(c2[1356],simde_mm_xor_si128(c2[876],simde_mm_xor_si128(c2[1826],simde_mm_xor_si128(c2[933],simde_mm_xor_si128(c2[4282],simde_mm_xor_si128(c2[454],c2[7653])))))))))))))))))))))))))))))))))))));
+
+//row: 14
+     d2[336]=simde_mm_xor_si128(c2[1452],simde_mm_xor_si128(c2[972],simde_mm_xor_si128(c2[21],simde_mm_xor_si128(c2[7202],simde_mm_xor_si128(c2[6251],simde_mm_xor_si128(c2[6242],simde_mm_xor_si128(c2[5291],simde_mm_xor_si128(c2[2935],simde_mm_xor_si128(c2[2455],simde_mm_xor_si128(c2[1504],simde_mm_xor_si128(c2[2936],simde_mm_xor_si128(c2[1985],simde_mm_xor_si128(c2[5817],simde_mm_xor_si128(c2[5346],simde_mm_xor_si128(c2[4866],simde_mm_xor_si128(c2[1489],simde_mm_xor_si128(c2[5387],simde_mm_xor_si128(c2[4907],simde_mm_xor_si128(c2[3956],simde_mm_xor_si128(c2[1548],simde_mm_xor_si128(c2[1077],simde_mm_xor_si128(c2[597],simde_mm_xor_si128(c2[3509],simde_mm_xor_si128(c2[3029],simde_mm_xor_si128(c2[2078],simde_mm_xor_si128(c2[3987],simde_mm_xor_si128(c2[3036],simde_mm_xor_si128(c2[4472],simde_mm_xor_si128(c2[3521],simde_mm_xor_si128(c2[2116],simde_mm_xor_si128(c2[1165],simde_mm_xor_si128(c2[1160],simde_mm_xor_si128(c2[209],simde_mm_xor_si128(c2[2600],simde_mm_xor_si128(c2[2129],simde_mm_xor_si128(c2[1649],simde_mm_xor_si128(c2[247],simde_mm_xor_si128(c2[6975],simde_mm_xor_si128(c2[6005],simde_mm_xor_si128(c2[5534],simde_mm_xor_si128(c2[5054],simde_mm_xor_si128(c2[6053],simde_mm_xor_si128(c2[5573],simde_mm_xor_si128(c2[4622],simde_mm_xor_si128(c2[1731],simde_mm_xor_si128(c2[780],simde_mm_xor_si128(c2[7496],simde_mm_xor_si128(c2[7025],simde_mm_xor_si128(c2[6545],simde_mm_xor_si128(c2[3175],simde_mm_xor_si128(c2[7548],simde_mm_xor_si128(c2[6597],simde_mm_xor_si128(c2[5144],simde_mm_xor_si128(c2[4673],simde_mm_xor_si128(c2[4193],simde_mm_xor_si128(c2[6636],simde_mm_xor_si128(c2[5685],simde_mm_xor_si128(c2[3272],simde_mm_xor_si128(c2[2321],simde_mm_xor_si128(c2[387],simde_mm_xor_si128(c2[7595],simde_mm_xor_si128(c2[7115],simde_mm_xor_si128(c2[924],simde_mm_xor_si128(c2[444],simde_mm_xor_si128(c2[7172],simde_mm_xor_si128(c2[3793],simde_mm_xor_si128(c2[2842],simde_mm_xor_si128(c2[7164],simde_mm_xor_si128(c2[6693],c2[6213])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 15
+     d2[360]=simde_mm_xor_si128(c2[5774],simde_mm_xor_si128(c2[7215],simde_mm_xor_si128(c2[6735],simde_mm_xor_si128(c2[4325],simde_mm_xor_si128(c2[5286],simde_mm_xor_si128(c2[3365],simde_mm_xor_si128(c2[4326],simde_mm_xor_si128(c2[1924],simde_mm_xor_si128(c2[7257],simde_mm_xor_si128(c2[1019],simde_mm_xor_si128(c2[539],simde_mm_xor_si128(c2[59],simde_mm_xor_si128(c2[1020],simde_mm_xor_si128(c2[2940],simde_mm_xor_si128(c2[3901],simde_mm_xor_si128(c2[2030],simde_mm_xor_si128(c2[3471],simde_mm_xor_si128(c2[2991],simde_mm_xor_si128(c2[6350],simde_mm_xor_si128(c2[7311],simde_mm_xor_si128(c2[152],simde_mm_xor_si128(c2[1593],simde_mm_xor_si128(c2[1113],simde_mm_xor_si128(c2[1110],simde_mm_xor_si128(c2[2071],simde_mm_xor_si128(c2[1595],simde_mm_xor_si128(c2[2556],simde_mm_xor_si128(c2[6918],simde_mm_xor_si128(c2[200],simde_mm_xor_si128(c2[5962],simde_mm_xor_si128(c2[6923],simde_mm_xor_si128(c2[7402],simde_mm_xor_si128(c2[684],simde_mm_xor_si128(c2[5049],simde_mm_xor_si128(c2[6010],simde_mm_xor_si128(c2[3128],simde_mm_xor_si128(c2[4089],simde_mm_xor_si128(c2[2696],simde_mm_xor_si128(c2[4137],simde_mm_xor_si128(c2[3657],simde_mm_xor_si128(c2[6533],simde_mm_xor_si128(c2[7494],simde_mm_xor_si128(c2[4619],simde_mm_xor_si128(c2[5580],simde_mm_xor_si128(c2[4671],simde_mm_xor_si128(c2[5632],simde_mm_xor_si128(c2[2267],simde_mm_xor_si128(c2[3228],simde_mm_xor_si128(c2[3759],simde_mm_xor_si128(c2[4720],simde_mm_xor_si128(c2[395],simde_mm_xor_si128(c2[1356],simde_mm_xor_si128(c2[5189],simde_mm_xor_si128(c2[6150],simde_mm_xor_si128(c2[5246],simde_mm_xor_si128(c2[6687],simde_mm_xor_si128(c2[6207],simde_mm_xor_si128(c2[916],simde_mm_xor_si128(c2[1877],simde_mm_xor_si128(c2[4287],c2[5248]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 16
+     d2[384]=simde_mm_xor_si128(c2[2419],simde_mm_xor_si128(c2[1939],simde_mm_xor_si128(c2[6734],simde_mm_xor_si128(c2[6254],simde_mm_xor_si128(c2[490],simde_mm_xor_si128(c2[5285],simde_mm_xor_si128(c2[4805],simde_mm_xor_si128(c2[7209],simde_mm_xor_si128(c2[3845],simde_mm_xor_si128(c2[3902],simde_mm_xor_si128(c2[3422],simde_mm_xor_si128(c2[538],simde_mm_xor_si128(c2[58],simde_mm_xor_si128(c2[3903],simde_mm_xor_si128(c2[539],simde_mm_xor_si128(c2[6784],simde_mm_xor_si128(c2[3420],simde_mm_xor_si128(c2[3413],simde_mm_xor_si128(c2[6354],simde_mm_xor_si128(c2[5874],simde_mm_xor_si128(c2[2990],simde_mm_xor_si128(c2[2510],simde_mm_xor_si128(c2[2515],simde_mm_xor_si128(c2[6830],simde_mm_xor_si128(c2[4476],simde_mm_xor_si128(c2[3996],simde_mm_xor_si128(c2[1112],simde_mm_xor_si128(c2[632],simde_mm_xor_si128(c2[4954],simde_mm_xor_si128(c2[2070],simde_mm_xor_si128(c2[1590],simde_mm_xor_si128(c2[5439],simde_mm_xor_si128(c2[2075],simde_mm_xor_si128(c2[3083],simde_mm_xor_si128(c2[199],simde_mm_xor_si128(c2[7398],simde_mm_xor_si128(c2[2127],simde_mm_xor_si128(c2[6442],simde_mm_xor_si128(c2[3567],simde_mm_xor_si128(c2[203],simde_mm_xor_si128(c2[1214],simde_mm_xor_si128(c2[6009],simde_mm_xor_si128(c2[5529],simde_mm_xor_si128(c2[6972],simde_mm_xor_si128(c2[3608],simde_mm_xor_si128(c2[7020],simde_mm_xor_si128(c2[6540],simde_mm_xor_si128(c2[3656],simde_mm_xor_si128(c2[3176],simde_mm_xor_si128(c2[2698],simde_mm_xor_si128(c2[7493],simde_mm_xor_si128(c2[7013],simde_mm_xor_si128(c2[784],simde_mm_xor_si128(c2[5099],simde_mm_xor_si128(c2[836],simde_mm_xor_si128(c2[5631],simde_mm_xor_si128(c2[5151],simde_mm_xor_si128(c2[6111],simde_mm_xor_si128(c2[2747],simde_mm_xor_si128(c2[7603],simde_mm_xor_si128(c2[4719],simde_mm_xor_si128(c2[4239],simde_mm_xor_si128(c2[4239],simde_mm_xor_si128(c2[875],simde_mm_xor_si128(c2[1354],simde_mm_xor_si128(c2[5669],simde_mm_xor_si128(c2[1891],simde_mm_xor_si128(c2[1411],simde_mm_xor_si128(c2[6206],simde_mm_xor_si128(c2[5726],simde_mm_xor_si128(c2[4760],simde_mm_xor_si128(c2[1876],simde_mm_xor_si128(c2[1396],simde_mm_xor_si128(c2[452],simde_mm_xor_si128(c2[4767],c2[923])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 17
+     d2[408]=simde_mm_xor_si128(c2[3371],simde_mm_xor_si128(c2[2891],simde_mm_xor_si128(c2[1931],simde_mm_xor_si128(c2[1451],simde_mm_xor_si128(c2[1442],simde_mm_xor_si128(c2[482],simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[482],simde_mm_xor_si128(c2[6721],simde_mm_xor_si128(c2[4854],simde_mm_xor_si128(c2[4374],simde_mm_xor_si128(c2[3414],simde_mm_xor_si128(c2[2934],simde_mm_xor_si128(c2[4855],simde_mm_xor_si128(c2[3415],simde_mm_xor_si128(c2[57],simde_mm_xor_si128(c2[6296],simde_mm_xor_si128(c2[6777],simde_mm_xor_si128(c2[7306],simde_mm_xor_si128(c2[6826],simde_mm_xor_si128(c2[5866],simde_mm_xor_si128(c2[5386],simde_mm_xor_si128(c2[3467],simde_mm_xor_si128(c2[2027],simde_mm_xor_si128(c2[5428],simde_mm_xor_si128(c2[4948],simde_mm_xor_si128(c2[3988],simde_mm_xor_si128(c2[3508],simde_mm_xor_si128(c2[5906],simde_mm_xor_si128(c2[4946],simde_mm_xor_si128(c2[4466],simde_mm_xor_si128(c2[6391],simde_mm_xor_si128(c2[4951],simde_mm_xor_si128(c2[4035],simde_mm_xor_si128(c2[3075],simde_mm_xor_si128(c2[2595],simde_mm_xor_si128(c2[3079],simde_mm_xor_si128(c2[1639],simde_mm_xor_si128(c2[4519],simde_mm_xor_si128(c2[3079],simde_mm_xor_si128(c2[2166],simde_mm_xor_si128(c2[1206],simde_mm_xor_si128(c2[726],simde_mm_xor_si128(c2[245],simde_mm_xor_si128(c2[6484],simde_mm_xor_si128(c2[3601],simde_mm_xor_si128(c2[293],simde_mm_xor_si128(c2[7492],simde_mm_xor_si128(c2[6532],simde_mm_xor_si128(c2[6052],simde_mm_xor_si128(c2[3650],simde_mm_xor_si128(c2[2690],simde_mm_xor_si128(c2[2210],simde_mm_xor_si128(c2[1736],simde_mm_xor_si128(c2[296],simde_mm_xor_si128(c2[1788],simde_mm_xor_si128(c2[828],simde_mm_xor_si128(c2[348],simde_mm_xor_si128(c2[7063],simde_mm_xor_si128(c2[5623],simde_mm_xor_si128(c2[876],simde_mm_xor_si128(c2[7595],simde_mm_xor_si128(c2[7115],simde_mm_xor_si128(c2[5191],simde_mm_xor_si128(c2[3751],simde_mm_xor_si128(c2[2306],simde_mm_xor_si128(c2[866],simde_mm_xor_si128(c2[2843],simde_mm_xor_si128(c2[2363],simde_mm_xor_si128(c2[1403],simde_mm_xor_si128(c2[923],simde_mm_xor_si128(c2[5712],simde_mm_xor_si128(c2[4752],simde_mm_xor_si128(c2[4272],simde_mm_xor_si128(c2[1404],c2[7643])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 18
+     d2[432]=simde_mm_xor_si128(c2[4811],simde_mm_xor_si128(c2[3168],c2[7058]));
+
+//row: 19
+     d2[456]=simde_mm_xor_si128(c2[4340],simde_mm_xor_si128(c2[2891],simde_mm_xor_si128(c2[1931],simde_mm_xor_si128(c2[4803],simde_mm_xor_si128(c2[5823],simde_mm_xor_si128(c2[6304],simde_mm_xor_si128(c2[1506],simde_mm_xor_si128(c2[4850],simde_mm_xor_si128(c2[596],simde_mm_xor_si128(c2[4916],simde_mm_xor_si128(c2[6397],simde_mm_xor_si128(c2[7355],simde_mm_xor_si128(c2[161],simde_mm_xor_si128(c2[5484],simde_mm_xor_si128(c2[4528],simde_mm_xor_si128(c2[5968],simde_mm_xor_si128(c2[3615],simde_mm_xor_si128(c2[1694],simde_mm_xor_si128(c2[1262],simde_mm_xor_si128(c2[5099],simde_mm_xor_si128(c2[3185],simde_mm_xor_si128(c2[3237],simde_mm_xor_si128(c2[833],simde_mm_xor_si128(c2[2325],simde_mm_xor_si128(c2[6640],simde_mm_xor_si128(c2[3755],simde_mm_xor_si128(c2[3812],simde_mm_xor_si128(c2[7161],c2[2853]))))))))))))))))))))))))))));
+
+//row: 20
+     d2[480]=simde_mm_xor_si128(c2[3380],simde_mm_xor_si128(c2[2900],simde_mm_xor_si128(c2[1451],simde_mm_xor_si128(c2[491],simde_mm_xor_si128(c2[4863],simde_mm_xor_si128(c2[4383],simde_mm_xor_si128(c2[4864],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[5812],simde_mm_xor_si128(c2[7315],simde_mm_xor_si128(c2[6835],simde_mm_xor_si128(c2[3476],simde_mm_xor_si128(c2[5437],simde_mm_xor_si128(c2[4957],simde_mm_xor_si128(c2[5915],simde_mm_xor_si128(c2[6400],simde_mm_xor_si128(c2[4044],simde_mm_xor_si128(c2[3088],simde_mm_xor_si128(c2[4528],simde_mm_xor_si128(c2[6435],simde_mm_xor_si128(c2[2175],simde_mm_xor_si128(c2[254],simde_mm_xor_si128(c2[302],simde_mm_xor_si128(c2[7501],simde_mm_xor_si128(c2[3659],simde_mm_xor_si128(c2[1745],simde_mm_xor_si128(c2[1797],simde_mm_xor_si128(c2[7072],simde_mm_xor_si128(c2[885],simde_mm_xor_si128(c2[5200],simde_mm_xor_si128(c2[2315],simde_mm_xor_si128(c2[2852],simde_mm_xor_si128(c2[2372],simde_mm_xor_si128(c2[5721],c2[1413]))))))))))))))))))))))))))))))))));
+
+//row: 21
+     d2[504]=simde_mm_xor_si128(c2[15],simde_mm_xor_si128(c2[6245],simde_mm_xor_si128(c2[5285],simde_mm_xor_si128(c2[6249],simde_mm_xor_si128(c2[1498],simde_mm_xor_si128(c2[1979],simde_mm_xor_si128(c2[5340],simde_mm_xor_si128(c2[4860],simde_mm_xor_si128(c2[3950],simde_mm_xor_si128(c2[1071],simde_mm_xor_si128(c2[591],simde_mm_xor_si128(c2[2072],simde_mm_xor_si128(c2[3030],simde_mm_xor_si128(c2[3515],simde_mm_xor_si128(c2[1159],simde_mm_xor_si128(c2[203],simde_mm_xor_si128(c2[2123],simde_mm_xor_si128(c2[1643],simde_mm_xor_si128(c2[6969],simde_mm_xor_si128(c2[5528],simde_mm_xor_si128(c2[5048],simde_mm_xor_si128(c2[4616],simde_mm_xor_si128(c2[774],simde_mm_xor_si128(c2[7019],simde_mm_xor_si128(c2[6539],simde_mm_xor_si128(c2[6591],simde_mm_xor_si128(c2[4667],simde_mm_xor_si128(c2[4187],simde_mm_xor_si128(c2[5679],simde_mm_xor_si128(c2[2315],simde_mm_xor_si128(c2[7589],simde_mm_xor_si128(c2[7109],simde_mm_xor_si128(c2[7594],simde_mm_xor_si128(c2[7166],simde_mm_xor_si128(c2[2836],simde_mm_xor_si128(c2[6687],c2[6207]))))))))))))))))))))))))))))))))))));
+
+//row: 22
+     d2[528]=simde_mm_xor_si128(c2[1969],c2[2019]);
+
+//row: 23
+     d2[552]=simde_mm_xor_si128(c2[4806],simde_mm_xor_si128(c2[3029],c2[7445]));
+
+//row: 24
+     d2[576]=simde_mm_xor_si128(c2[2939],simde_mm_xor_si128(c2[4425],c2[436]));
+
+//row: 25
+     d2[600]=simde_mm_xor_si128(c2[6242],c2[2641]);
+
+//row: 26
+     d2[624]=simde_mm_xor_si128(c2[2414],simde_mm_xor_si128(c2[1934],simde_mm_xor_si128(c2[976],simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[485],simde_mm_xor_si128(c2[7206],simde_mm_xor_si128(c2[7204],simde_mm_xor_si128(c2[6246],simde_mm_xor_si128(c2[3897],simde_mm_xor_si128(c2[3417],simde_mm_xor_si128(c2[2459],simde_mm_xor_si128(c2[3898],simde_mm_xor_si128(c2[2940],simde_mm_xor_si128(c2[6779],simde_mm_xor_si128(c2[6301],simde_mm_xor_si128(c2[5821],simde_mm_xor_si128(c2[6349],simde_mm_xor_si128(c2[5869],simde_mm_xor_si128(c2[4911],simde_mm_xor_si128(c2[2510],simde_mm_xor_si128(c2[2032],simde_mm_xor_si128(c2[1552],simde_mm_xor_si128(c2[1540],simde_mm_xor_si128(c2[4471],simde_mm_xor_si128(c2[3991],simde_mm_xor_si128(c2[3033],simde_mm_xor_si128(c2[5429],simde_mm_xor_si128(c2[4949],simde_mm_xor_si128(c2[3991],simde_mm_xor_si128(c2[5434],simde_mm_xor_si128(c2[4476],simde_mm_xor_si128(c2[3558],simde_mm_xor_si128(c2[3078],simde_mm_xor_si128(c2[2120],simde_mm_xor_si128(c2[2122],simde_mm_xor_si128(c2[1164],simde_mm_xor_si128(c2[3562],simde_mm_xor_si128(c2[3084],simde_mm_xor_si128(c2[2604],simde_mm_xor_si128(c2[1689],simde_mm_xor_si128(c2[1209],simde_mm_xor_si128(c2[251],simde_mm_xor_si128(c2[6967],simde_mm_xor_si128(c2[6489],simde_mm_xor_si128(c2[6009],simde_mm_xor_si128(c2[7015],simde_mm_xor_si128(c2[6535],simde_mm_xor_si128(c2[5577],simde_mm_xor_si128(c2[3173],simde_mm_xor_si128(c2[2693],simde_mm_xor_si128(c2[1735],simde_mm_xor_si128(c2[779],simde_mm_xor_si128(c2[301],simde_mm_xor_si128(c2[7500],simde_mm_xor_si128(c2[1311],simde_mm_xor_si128(c2[831],simde_mm_xor_si128(c2[7552],simde_mm_xor_si128(c2[6106],simde_mm_xor_si128(c2[5628],simde_mm_xor_si128(c2[5148],simde_mm_xor_si128(c2[4664],simde_mm_xor_si128(c2[399],simde_mm_xor_si128(c2[7598],simde_mm_xor_si128(c2[6640],simde_mm_xor_si128(c2[4234],simde_mm_xor_si128(c2[3276],simde_mm_xor_si128(c2[1349],simde_mm_xor_si128(c2[871],simde_mm_xor_si128(c2[391],simde_mm_xor_si128(c2[1886],simde_mm_xor_si128(c2[1406],simde_mm_xor_si128(c2[448],simde_mm_xor_si128(c2[5235],simde_mm_xor_si128(c2[4755],simde_mm_xor_si128(c2[3797],simde_mm_xor_si128(c2[447],simde_mm_xor_si128(c2[7648],c2[7168])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 27
+     d2[648]=simde_mm_xor_si128(c2[3366],c2[1251]);
+
+//row: 28
+     d2[672]=simde_mm_xor_si128(c2[2932],simde_mm_xor_si128(c2[7302],c2[4090]));
+
+//row: 29
+     d2[696]=simde_mm_xor_si128(c2[6726],c2[673]);
+
+//row: 30
+     d2[720]=simde_mm_xor_si128(c2[3943],simde_mm_xor_si128(c2[5049],simde_mm_xor_si128(c2[2259],c2[4275])));
+
+//row: 31
+     d2[744]=simde_mm_xor_si128(c2[3381],simde_mm_xor_si128(c2[1932],simde_mm_xor_si128(c2[972],simde_mm_xor_si128(c2[4864],simde_mm_xor_si128(c2[5345],simde_mm_xor_si128(c2[1027],simde_mm_xor_si128(c2[547],simde_mm_xor_si128(c2[1488],simde_mm_xor_si128(c2[7316],simde_mm_xor_si128(c2[4437],simde_mm_xor_si128(c2[3957],simde_mm_xor_si128(c2[5438],simde_mm_xor_si128(c2[6396],simde_mm_xor_si128(c2[6881],simde_mm_xor_si128(c2[4525],simde_mm_xor_si128(c2[3569],simde_mm_xor_si128(c2[5489],simde_mm_xor_si128(c2[5009],simde_mm_xor_si128(c2[2656],simde_mm_xor_si128(c2[1215],simde_mm_xor_si128(c2[735],simde_mm_xor_si128(c2[303],simde_mm_xor_si128(c2[4140],simde_mm_xor_si128(c2[2706],simde_mm_xor_si128(c2[2226],simde_mm_xor_si128(c2[2278],simde_mm_xor_si128(c2[354],simde_mm_xor_si128(c2[7553],simde_mm_xor_si128(c2[1366],simde_mm_xor_si128(c2[5681],simde_mm_xor_si128(c2[3276],simde_mm_xor_si128(c2[2796],simde_mm_xor_si128(c2[2853],simde_mm_xor_si128(c2[6202],simde_mm_xor_si128(c2[2374],c2[1894])))))))))))))))))))))))))))))))))));
+
+//row: 32
+     d2[768]=simde_mm_xor_si128(c2[7211],simde_mm_xor_si128(c2[6731],simde_mm_xor_si128(c2[5762],simde_mm_xor_si128(c2[5282],simde_mm_xor_si128(c2[4322],simde_mm_xor_si128(c2[1925],simde_mm_xor_si128(c2[1015],simde_mm_xor_si128(c2[535],simde_mm_xor_si128(c2[1016],simde_mm_xor_si128(c2[3897],simde_mm_xor_si128(c2[3467],simde_mm_xor_si128(c2[2987],simde_mm_xor_si128(c2[7307],simde_mm_xor_si128(c2[1589],simde_mm_xor_si128(c2[1109],simde_mm_xor_si128(c2[2547],simde_mm_xor_si128(c2[2067],simde_mm_xor_si128(c2[2552],simde_mm_xor_si128(c2[676],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[6919],simde_mm_xor_si128(c2[680],simde_mm_xor_si128(c2[6486],simde_mm_xor_si128(c2[6006],simde_mm_xor_si128(c2[4085],simde_mm_xor_si128(c2[4080],simde_mm_xor_si128(c2[4133],simde_mm_xor_si128(c2[3653],simde_mm_xor_si128(c2[291],simde_mm_xor_si128(c2[7490],simde_mm_xor_si128(c2[5576],simde_mm_xor_si128(c2[6108],simde_mm_xor_si128(c2[5628],simde_mm_xor_si128(c2[3224],simde_mm_xor_si128(c2[5196],simde_mm_xor_si128(c2[4716],simde_mm_xor_si128(c2[1352],simde_mm_xor_si128(c2[6146],simde_mm_xor_si128(c2[6683],simde_mm_xor_si128(c2[6203],simde_mm_xor_si128(c2[2353],simde_mm_xor_si128(c2[1873],c2[5244]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 33
+     d2[792]=simde_mm_xor_si128(c2[4338],simde_mm_xor_si128(c2[2889],simde_mm_xor_si128(c2[1929],simde_mm_xor_si128(c2[5821],simde_mm_xor_si128(c2[6302],simde_mm_xor_si128(c2[1504],simde_mm_xor_si128(c2[594],simde_mm_xor_si128(c2[4914],simde_mm_xor_si128(c2[2506],simde_mm_xor_si128(c2[6395],simde_mm_xor_si128(c2[7353],simde_mm_xor_si128(c2[159],simde_mm_xor_si128(c2[5482],simde_mm_xor_si128(c2[4526],simde_mm_xor_si128(c2[5966],simde_mm_xor_si128(c2[3613],simde_mm_xor_si128(c2[1692],simde_mm_xor_si128(c2[1260],simde_mm_xor_si128(c2[5097],simde_mm_xor_si128(c2[3183],simde_mm_xor_si128(c2[3235],simde_mm_xor_si128(c2[831],simde_mm_xor_si128(c2[1787],simde_mm_xor_si128(c2[2323],simde_mm_xor_si128(c2[6638],simde_mm_xor_si128(c2[3753],simde_mm_xor_si128(c2[3810],simde_mm_xor_si128(c2[7159],c2[2851]))))))))))))))))))))))))))));
+
+//row: 34
+     d2[816]=simde_mm_xor_si128(c2[7221],simde_mm_xor_si128(c2[6741],simde_mm_xor_si128(c2[4331],simde_mm_xor_si128(c2[5772],simde_mm_xor_si128(c2[5292],simde_mm_xor_si128(c2[2882],simde_mm_xor_si128(c2[4332],simde_mm_xor_si128(c2[1922],simde_mm_xor_si128(c2[6250],simde_mm_xor_si128(c2[1025],simde_mm_xor_si128(c2[545],simde_mm_xor_si128(c2[5814],simde_mm_xor_si128(c2[1026],simde_mm_xor_si128(c2[6295],simde_mm_xor_si128(c2[3907],simde_mm_xor_si128(c2[1977],simde_mm_xor_si128(c2[1497],simde_mm_xor_si128(c2[3477],simde_mm_xor_si128(c2[2997],simde_mm_xor_si128(c2[587],simde_mm_xor_si128(c2[7317],simde_mm_xor_si128(c2[5387],simde_mm_xor_si128(c2[4907],simde_mm_xor_si128(c2[1599],simde_mm_xor_si128(c2[1119],simde_mm_xor_si128(c2[6388],simde_mm_xor_si128(c2[2557],simde_mm_xor_si128(c2[2077],simde_mm_xor_si128(c2[7346],simde_mm_xor_si128(c2[2562],simde_mm_xor_si128(c2[152],simde_mm_xor_si128(c2[686],simde_mm_xor_si128(c2[206],simde_mm_xor_si128(c2[5475],simde_mm_xor_si128(c2[6929],simde_mm_xor_si128(c2[4519],simde_mm_xor_si128(c2[690],simde_mm_xor_si128(c2[6439],simde_mm_xor_si128(c2[5959],simde_mm_xor_si128(c2[6496],simde_mm_xor_si128(c2[6016],simde_mm_xor_si128(c2[3606],simde_mm_xor_si128(c2[4095],simde_mm_xor_si128(c2[2165],simde_mm_xor_si128(c2[1685],simde_mm_xor_si128(c2[4143],simde_mm_xor_si128(c2[3663],simde_mm_xor_si128(c2[1253],simde_mm_xor_si128(c2[301],simde_mm_xor_si128(c2[7500],simde_mm_xor_si128(c2[5090],simde_mm_xor_si128(c2[5586],simde_mm_xor_si128(c2[3656],simde_mm_xor_si128(c2[3176],simde_mm_xor_si128(c2[6118],simde_mm_xor_si128(c2[5638],simde_mm_xor_si128(c2[3228],simde_mm_xor_si128(c2[3234],simde_mm_xor_si128(c2[1304],simde_mm_xor_si128(c2[824],simde_mm_xor_si128(c2[5206],simde_mm_xor_si128(c2[4726],simde_mm_xor_si128(c2[2316],simde_mm_xor_si128(c2[1362],simde_mm_xor_si128(c2[6631],simde_mm_xor_si128(c2[6156],simde_mm_xor_si128(c2[4226],simde_mm_xor_si128(c2[3746],simde_mm_xor_si128(c2[6693],simde_mm_xor_si128(c2[6213],simde_mm_xor_si128(c2[3803],simde_mm_xor_si128(c2[2363],simde_mm_xor_si128(c2[1883],simde_mm_xor_si128(c2[7152],simde_mm_xor_si128(c2[5254],simde_mm_xor_si128(c2[3324],c2[2844]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 35
+     d2[840]=simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[7211],simde_mm_xor_si128(c2[5762],simde_mm_xor_si128(c2[4802],simde_mm_xor_si128(c2[1495],simde_mm_xor_si128(c2[1015],simde_mm_xor_si128(c2[1496],simde_mm_xor_si128(c2[4377],simde_mm_xor_si128(c2[6292],simde_mm_xor_si128(c2[3947],simde_mm_xor_si128(c2[3467],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[2069],simde_mm_xor_si128(c2[1589],simde_mm_xor_si128(c2[2547],simde_mm_xor_si128(c2[3032],simde_mm_xor_si128(c2[676],simde_mm_xor_si128(c2[7399],simde_mm_xor_si128(c2[1160],simde_mm_xor_si128(c2[6486],simde_mm_xor_si128(c2[4565],simde_mm_xor_si128(c2[4091],simde_mm_xor_si128(c2[4613],simde_mm_xor_si128(c2[4133],simde_mm_xor_si128(c2[291],simde_mm_xor_si128(c2[6056],simde_mm_xor_si128(c2[6108],simde_mm_xor_si128(c2[3704],simde_mm_xor_si128(c2[5196],simde_mm_xor_si128(c2[1832],simde_mm_xor_si128(c2[6626],simde_mm_xor_si128(c2[7163],simde_mm_xor_si128(c2[6683],simde_mm_xor_si128(c2[2353],c2[5724]))))))))))))))))))))))))))))))))));
+
+//row: 36
+     d2[864]=simde_mm_xor_si128(c2[4321],simde_mm_xor_si128(c2[3465],c2[5146]));
+
+//row: 37
+     d2[888]=simde_mm_xor_si128(c2[973],simde_mm_xor_si128(c2[5772],simde_mm_xor_si128(c2[7203],simde_mm_xor_si128(c2[4323],simde_mm_xor_si128(c2[6243],simde_mm_xor_si128(c2[3363],simde_mm_xor_si128(c2[2456],simde_mm_xor_si128(c2[7255],simde_mm_xor_si128(c2[2937],simde_mm_xor_si128(c2[57],simde_mm_xor_si128(c2[5818],simde_mm_xor_si128(c2[3418],simde_mm_xor_si128(c2[2938],simde_mm_xor_si128(c2[4908],simde_mm_xor_si128(c2[2028],simde_mm_xor_si128(c2[1549],simde_mm_xor_si128(c2[6828],simde_mm_xor_si128(c2[6348],simde_mm_xor_si128(c2[3030],simde_mm_xor_si128(c2[150],simde_mm_xor_si128(c2[3988],simde_mm_xor_si128(c2[1108],simde_mm_xor_si128(c2[4473],simde_mm_xor_si128(c2[1593],simde_mm_xor_si128(c2[2117],simde_mm_xor_si128(c2[6916],simde_mm_xor_si128(c2[1161],simde_mm_xor_si128(c2[5960],simde_mm_xor_si128(c2[2601],simde_mm_xor_si128(c2[201],simde_mm_xor_si128(c2[7400],simde_mm_xor_si128(c2[248],simde_mm_xor_si128(c2[5047],simde_mm_xor_si128(c2[6006],simde_mm_xor_si128(c2[3606],simde_mm_xor_si128(c2[3126],simde_mm_xor_si128(c2[5574],simde_mm_xor_si128(c2[2694],simde_mm_xor_si128(c2[1732],simde_mm_xor_si128(c2[6531],simde_mm_xor_si128(c2[7497],simde_mm_xor_si128(c2[5097],simde_mm_xor_si128(c2[4617],simde_mm_xor_si128(c2[7549],simde_mm_xor_si128(c2[4669],simde_mm_xor_si128(c2[5145],simde_mm_xor_si128(c2[2745],simde_mm_xor_si128(c2[2265],simde_mm_xor_si128(c2[6637],simde_mm_xor_si128(c2[3757],simde_mm_xor_si128(c2[3273],simde_mm_xor_si128(c2[393],simde_mm_xor_si128(c2[388],simde_mm_xor_si128(c2[5667],simde_mm_xor_si128(c2[5187],simde_mm_xor_si128(c2[445],simde_mm_xor_si128(c2[5244],simde_mm_xor_si128(c2[3794],simde_mm_xor_si128(c2[914],simde_mm_xor_si128(c2[7165],simde_mm_xor_si128(c2[4765],c2[4285])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 38
+     d2[912]=simde_mm_xor_si128(c2[5782],simde_mm_xor_si128(c2[5302],simde_mm_xor_si128(c2[3853],simde_mm_xor_si128(c2[2893],simde_mm_xor_si128(c2[7265],simde_mm_xor_si128(c2[6785],simde_mm_xor_si128(c2[7266],simde_mm_xor_si128(c2[2468],simde_mm_xor_si128(c2[1973],simde_mm_xor_si128(c2[2038],simde_mm_xor_si128(c2[1558],simde_mm_xor_si128(c2[5878],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[7359],simde_mm_xor_si128(c2[638],simde_mm_xor_si128(c2[1123],simde_mm_xor_si128(c2[6446],simde_mm_xor_si128(c2[5490],simde_mm_xor_si128(c2[6930],simde_mm_xor_si128(c2[4577],simde_mm_xor_si128(c2[2656],simde_mm_xor_si128(c2[3609],simde_mm_xor_si128(c2[2704],simde_mm_xor_si128(c2[2224],simde_mm_xor_si128(c2[6061],simde_mm_xor_si128(c2[4147],simde_mm_xor_si128(c2[4199],simde_mm_xor_si128(c2[1795],simde_mm_xor_si128(c2[3287],simde_mm_xor_si128(c2[7602],simde_mm_xor_si128(c2[4717],simde_mm_xor_si128(c2[5254],simde_mm_xor_si128(c2[4774],simde_mm_xor_si128(c2[444],c2[3815]))))))))))))))))))))))))))))))))));
+
+//row: 39
+     d2[936]=simde_mm_xor_si128(c2[3374],simde_mm_xor_si128(c2[2894],simde_mm_xor_si128(c2[1925],simde_mm_xor_si128(c2[1445],simde_mm_xor_si128(c2[485],simde_mm_xor_si128(c2[6245],simde_mm_xor_si128(c2[4857],simde_mm_xor_si128(c2[4377],simde_mm_xor_si128(c2[4858],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[7309],simde_mm_xor_si128(c2[6829],simde_mm_xor_si128(c2[3470],simde_mm_xor_si128(c2[5431],simde_mm_xor_si128(c2[4951],simde_mm_xor_si128(c2[6389],simde_mm_xor_si128(c2[5909],simde_mm_xor_si128(c2[6394],simde_mm_xor_si128(c2[4518],simde_mm_xor_si128(c2[4038],simde_mm_xor_si128(c2[3082],simde_mm_xor_si128(c2[4522],simde_mm_xor_si128(c2[2649],simde_mm_xor_si128(c2[2169],simde_mm_xor_si128(c2[248],simde_mm_xor_si128(c2[296],simde_mm_xor_si128(c2[7495],simde_mm_xor_si128(c2[4133],simde_mm_xor_si128(c2[3653],simde_mm_xor_si128(c2[1739],simde_mm_xor_si128(c2[2271],simde_mm_xor_si128(c2[1791],simde_mm_xor_si128(c2[7066],simde_mm_xor_si128(c2[2264],simde_mm_xor_si128(c2[1359],simde_mm_xor_si128(c2[879],simde_mm_xor_si128(c2[5194],simde_mm_xor_si128(c2[2309],simde_mm_xor_si128(c2[2846],simde_mm_xor_si128(c2[2366],simde_mm_xor_si128(c2[6195],simde_mm_xor_si128(c2[5715],c2[1407]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 40
+     d2[960]=simde_mm_xor_si128(c2[3857],simde_mm_xor_si128(c2[21],simde_mm_xor_si128(c2[2408],simde_mm_xor_si128(c2[6251],simde_mm_xor_si128(c2[1448],simde_mm_xor_si128(c2[5291],simde_mm_xor_si128(c2[5340],simde_mm_xor_si128(c2[1504],simde_mm_xor_si128(c2[5821],simde_mm_xor_si128(c2[1985],simde_mm_xor_si128(c2[1023],simde_mm_xor_si128(c2[5346],simde_mm_xor_si128(c2[4866],simde_mm_xor_si128(c2[113],simde_mm_xor_si128(c2[3956],simde_mm_xor_si128(c2[4433],simde_mm_xor_si128(c2[1077],simde_mm_xor_si128(c2[597],simde_mm_xor_si128(c2[3462],simde_mm_xor_si128(c2[5914],simde_mm_xor_si128(c2[2078],simde_mm_xor_si128(c2[6872],simde_mm_xor_si128(c2[3036],simde_mm_xor_si128(c2[7357],simde_mm_xor_si128(c2[3521],simde_mm_xor_si128(c2[5001],simde_mm_xor_si128(c2[1165],simde_mm_xor_si128(c2[4045],simde_mm_xor_si128(c2[209],simde_mm_xor_si128(c2[5485],simde_mm_xor_si128(c2[2129],simde_mm_xor_si128(c2[1649],simde_mm_xor_si128(c2[3132],simde_mm_xor_si128(c2[6975],simde_mm_xor_si128(c2[1211],simde_mm_xor_si128(c2[5534],simde_mm_xor_si128(c2[5054],simde_mm_xor_si128(c2[779],simde_mm_xor_si128(c2[4622],simde_mm_xor_si128(c2[4616],simde_mm_xor_si128(c2[780],simde_mm_xor_si128(c2[2702],simde_mm_xor_si128(c2[7025],simde_mm_xor_si128(c2[6545],simde_mm_xor_si128(c2[2754],simde_mm_xor_si128(c2[6597],simde_mm_xor_si128(c2[350],simde_mm_xor_si128(c2[4673],simde_mm_xor_si128(c2[4193],simde_mm_xor_si128(c2[1842],simde_mm_xor_si128(c2[5685],simde_mm_xor_si128(c2[6157],simde_mm_xor_si128(c2[2321],simde_mm_xor_si128(c2[3272],simde_mm_xor_si128(c2[7595],simde_mm_xor_si128(c2[7115],simde_mm_xor_si128(c2[3329],simde_mm_xor_si128(c2[7172],simde_mm_xor_si128(c2[6678],simde_mm_xor_si128(c2[2842],simde_mm_xor_si128(c2[2370],simde_mm_xor_si128(c2[6693],c2[6213]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 41
+     d2[984]=simde_mm_xor_si128(c2[4814],simde_mm_xor_si128(c2[4334],simde_mm_xor_si128(c2[2885],simde_mm_xor_si128(c2[1925],simde_mm_xor_si128(c2[6297],simde_mm_xor_si128(c2[5817],simde_mm_xor_si128(c2[6298],simde_mm_xor_si128(c2[1500],simde_mm_xor_si128(c2[1497],simde_mm_xor_si128(c2[1070],simde_mm_xor_si128(c2[590],simde_mm_xor_si128(c2[4910],simde_mm_xor_si128(c2[6871],simde_mm_xor_si128(c2[6391],simde_mm_xor_si128(c2[7349],simde_mm_xor_si128(c2[155],simde_mm_xor_si128(c2[5478],simde_mm_xor_si128(c2[4522],simde_mm_xor_si128(c2[5962],simde_mm_xor_si128(c2[3609],simde_mm_xor_si128(c2[1688],simde_mm_xor_si128(c2[3600],simde_mm_xor_si128(c2[1736],simde_mm_xor_si128(c2[1256],simde_mm_xor_si128(c2[5093],simde_mm_xor_si128(c2[3179],simde_mm_xor_si128(c2[3231],simde_mm_xor_si128(c2[827],simde_mm_xor_si128(c2[2319],simde_mm_xor_si128(c2[6634],simde_mm_xor_si128(c2[3749],simde_mm_xor_si128(c2[4286],simde_mm_xor_si128(c2[3806],simde_mm_xor_si128(c2[7155],c2[2847]))))))))))))))))))))))))))))))))));
+  }
+}
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc64_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc64_byte.c
index 0ae044b3a2b06930e3805438386a08e336bd2b87..af5c1b42b2a51852622ae0954b4e1922938e6604 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc64_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc64_byte.c
@@ -1,9 +1,9 @@
+#ifdef __AVX2__
 #include "PHY/sse_intrin.h"
 // generated code for Zc=64, byte encoding
 static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG2_Zc64_byte(uint8_t *c,uint8_t *d) {
-  __m256i *csimd=(__m256i *)c,*dsimd=(__m256i *)d;
-
-  __m256i *c2,*d2;
+  simde__m256i *csimd=(simde__m256i *)c,*dsimd=(simde__m256i *)d;
+  simde__m256i *c2,*d2;
 
   int i2;
   for (i2=0; i2<2; i2++) {
@@ -137,3 +137,4 @@ static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG
      d2[82]=simde_mm256_xor_si256(c2[993],simde_mm256_xor_si256(c2[961],simde_mm256_xor_si256(c2[896],simde_mm256_xor_si256(c2[192],simde_mm256_xor_si256(c2[357],simde_mm256_xor_si256(c2[325],simde_mm256_xor_si256(c2[229],simde_mm256_xor_si256(c2[933],simde_mm256_xor_si256(c2[36],simde_mm256_xor_si256(c2[72],simde_mm256_xor_si256(c2[40],simde_mm256_xor_si256(c2[489],simde_mm256_xor_si256(c2[524],simde_mm256_xor_si256(c2[492],simde_mm256_xor_si256(c2[876],simde_mm256_xor_si256(c2[45],simde_mm256_xor_si256(c2[593],simde_mm256_xor_si256(c2[305],simde_mm256_xor_si256(c2[624],simde_mm256_xor_si256(c2[597],simde_mm256_xor_si256(c2[948],simde_mm256_xor_si256(c2[181],simde_mm256_xor_si256(c2[633],simde_mm256_xor_si256(c2[601],simde_mm256_xor_si256(c2[760],simde_mm256_xor_si256(c2[216],simde_mm256_xor_si256(c2[572],simde_mm256_xor_si256(c2[381],simde_mm256_xor_si256(c2[704],simde_mm256_xor_si256(c2[193],simde_mm256_xor_si256(c2[256],simde_mm256_xor_si256(c2[132],simde_mm256_xor_si256(c2[100],simde_mm256_xor_si256(c2[581],c2[709]))))))))))))))))))))))))))))))))));
   }
 }
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc64_byte_128.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc64_byte_128.c
new file mode 100644
index 0000000000000000000000000000000000000000..3c0bda47680f492cc38a24cfe5fbe89898472053
--- /dev/null
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc64_byte_128.c
@@ -0,0 +1,139 @@
+#include "PHY/sse_intrin.h"
+// generated code for Zc=64, byte encoding
+static inline void ldpc_BG2_Zc64_byte(uint8_t *c,uint8_t *d) {
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
+
+  simde__m128i *c2,*d2;
+
+  int i2;
+  for (i2=0; i2<4; i2++) {
+     c2=&csimd[i2];
+     d2=&dsimd[i2];
+
+//row: 0
+     d2[0]=simde_mm_xor_si128(c2[640],simde_mm_xor_si128(c2[482],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[331],simde_mm_xor_si128(c2[91],simde_mm_xor_si128(c2[568],simde_mm_xor_si128(c2[896],simde_mm_xor_si128(c2[739],simde_mm_xor_si128(c2[745],simde_mm_xor_si128(c2[426],simde_mm_xor_si128(c2[906],simde_mm_xor_si128(c2[995],simde_mm_xor_si128(c2[275],simde_mm_xor_si128(c2[1073],simde_mm_xor_si128(c2[1003],simde_mm_xor_si128(c2[602],simde_mm_xor_si128(c2[1011],simde_mm_xor_si128(c2[130],simde_mm_xor_si128(c2[49],simde_mm_xor_si128(c2[937],simde_mm_xor_si128(c2[459],simde_mm_xor_si128(c2[1265],simde_mm_xor_si128(c2[1266],simde_mm_xor_si128(c2[145],simde_mm_xor_si128(c2[1032],simde_mm_xor_si128(c2[955],c2[1275]))))))))))))))))))))))))));
+
+//row: 1
+     d2[4]=simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[640],simde_mm_xor_si128(c2[482],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[411],simde_mm_xor_si128(c2[331],simde_mm_xor_si128(c2[91],simde_mm_xor_si128(c2[568],simde_mm_xor_si128(c2[976],simde_mm_xor_si128(c2[896],simde_mm_xor_si128(c2[739],simde_mm_xor_si128(c2[825],simde_mm_xor_si128(c2[745],simde_mm_xor_si128(c2[426],simde_mm_xor_si128(c2[906],simde_mm_xor_si128(c2[995],simde_mm_xor_si128(c2[275],simde_mm_xor_si128(c2[1073],simde_mm_xor_si128(c2[1003],simde_mm_xor_si128(c2[602],simde_mm_xor_si128(c2[1091],simde_mm_xor_si128(c2[1011],simde_mm_xor_si128(c2[130],simde_mm_xor_si128(c2[49],simde_mm_xor_si128(c2[937],simde_mm_xor_si128(c2[459],simde_mm_xor_si128(c2[1265],simde_mm_xor_si128(c2[1266],simde_mm_xor_si128(c2[145],simde_mm_xor_si128(c2[1112],simde_mm_xor_si128(c2[1032],simde_mm_xor_si128(c2[955],c2[1275]))))))))))))))))))))))))))))))));
+
+//row: 2
+     d2[8]=simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[640],simde_mm_xor_si128(c2[562],simde_mm_xor_si128(c2[482],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[411],simde_mm_xor_si128(c2[331],simde_mm_xor_si128(c2[91],simde_mm_xor_si128(c2[568],simde_mm_xor_si128(c2[976],simde_mm_xor_si128(c2[896],simde_mm_xor_si128(c2[739],simde_mm_xor_si128(c2[825],simde_mm_xor_si128(c2[745],simde_mm_xor_si128(c2[506],simde_mm_xor_si128(c2[426],simde_mm_xor_si128(c2[906],simde_mm_xor_si128(c2[1075],simde_mm_xor_si128(c2[995],simde_mm_xor_si128(c2[275],simde_mm_xor_si128(c2[1073],simde_mm_xor_si128(c2[1083],simde_mm_xor_si128(c2[1003],simde_mm_xor_si128(c2[602],simde_mm_xor_si128(c2[1091],simde_mm_xor_si128(c2[1011],simde_mm_xor_si128(c2[210],simde_mm_xor_si128(c2[130],simde_mm_xor_si128(c2[49],simde_mm_xor_si128(c2[1017],simde_mm_xor_si128(c2[937],simde_mm_xor_si128(c2[459],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[1265],simde_mm_xor_si128(c2[1266],simde_mm_xor_si128(c2[145],simde_mm_xor_si128(c2[1112],simde_mm_xor_si128(c2[1032],simde_mm_xor_si128(c2[1035],simde_mm_xor_si128(c2[955],c2[1275]))))))))))))))))))))))))))))))))))))))));
+
+//row: 3
+     d2[12]=simde_mm_xor_si128(c2[640],simde_mm_xor_si128(c2[482],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[331],simde_mm_xor_si128(c2[91],simde_mm_xor_si128(c2[648],simde_mm_xor_si128(c2[568],simde_mm_xor_si128(c2[896],simde_mm_xor_si128(c2[819],simde_mm_xor_si128(c2[739],simde_mm_xor_si128(c2[745],simde_mm_xor_si128(c2[426],simde_mm_xor_si128(c2[906],simde_mm_xor_si128(c2[995],simde_mm_xor_si128(c2[275],simde_mm_xor_si128(c2[1153],simde_mm_xor_si128(c2[1073],simde_mm_xor_si128(c2[1003],simde_mm_xor_si128(c2[682],simde_mm_xor_si128(c2[602],simde_mm_xor_si128(c2[1011],simde_mm_xor_si128(c2[130],simde_mm_xor_si128(c2[129],simde_mm_xor_si128(c2[49],simde_mm_xor_si128(c2[937],simde_mm_xor_si128(c2[539],simde_mm_xor_si128(c2[459],simde_mm_xor_si128(c2[1265],simde_mm_xor_si128(c2[1266],simde_mm_xor_si128(c2[225],simde_mm_xor_si128(c2[145],simde_mm_xor_si128(c2[1032],simde_mm_xor_si128(c2[955],simde_mm_xor_si128(c2[72],c2[1275]))))))))))))))))))))))))))))))))));
+
+//row: 4
+     d2[16]=simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[1200],simde_mm_xor_si128(c2[1042],simde_mm_xor_si128(c2[561],simde_mm_xor_si128(c2[243],simde_mm_xor_si128(c2[971],simde_mm_xor_si128(c2[891],simde_mm_xor_si128(c2[651],simde_mm_xor_si128(c2[1128],simde_mm_xor_si128(c2[489],simde_mm_xor_si128(c2[257],simde_mm_xor_si128(c2[177],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[106],simde_mm_xor_si128(c2[26],simde_mm_xor_si128(c2[986],simde_mm_xor_si128(c2[187],simde_mm_xor_si128(c2[272],simde_mm_xor_si128(c2[835],simde_mm_xor_si128(c2[354],simde_mm_xor_si128(c2[280],simde_mm_xor_si128(c2[1162],simde_mm_xor_si128(c2[368],simde_mm_xor_si128(c2[288],simde_mm_xor_si128(c2[690],simde_mm_xor_si128(c2[609],simde_mm_xor_si128(c2[218],simde_mm_xor_si128(c2[1019],simde_mm_xor_si128(c2[546],simde_mm_xor_si128(c2[547],simde_mm_xor_si128(c2[705],simde_mm_xor_si128(c2[393],simde_mm_xor_si128(c2[313],simde_mm_xor_si128(c2[232],c2[552]))))))))))))))))))))))))))))))))));
+
+//row: 5
+     d2[20]=simde_mm_xor_si128(c2[3],simde_mm_xor_si128(c2[1202],simde_mm_xor_si128(c2[1040],simde_mm_xor_si128(c2[563],simde_mm_xor_si128(c2[562],simde_mm_xor_si128(c2[969],simde_mm_xor_si128(c2[889],simde_mm_xor_si128(c2[649],simde_mm_xor_si128(c2[1130],simde_mm_xor_si128(c2[730],simde_mm_xor_si128(c2[259],simde_mm_xor_si128(c2[179],simde_mm_xor_si128(c2[18],simde_mm_xor_si128(c2[104],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[984],simde_mm_xor_si128(c2[185],simde_mm_xor_si128(c2[274],simde_mm_xor_si128(c2[833],simde_mm_xor_si128(c2[352],simde_mm_xor_si128(c2[282],simde_mm_xor_si128(c2[1160],simde_mm_xor_si128(c2[200],simde_mm_xor_si128(c2[370],simde_mm_xor_si128(c2[290],simde_mm_xor_si128(c2[688],simde_mm_xor_si128(c2[611],simde_mm_xor_si128(c2[216],simde_mm_xor_si128(c2[1017],simde_mm_xor_si128(c2[1257],simde_mm_xor_si128(c2[544],simde_mm_xor_si128(c2[545],simde_mm_xor_si128(c2[707],simde_mm_xor_si128(c2[395],simde_mm_xor_si128(c2[315],simde_mm_xor_si128(c2[234],c2[554]))))))))))))))))))))))))))))))))))));
+
+//row: 6
+     d2[24]=simde_mm_xor_si128(c2[562],simde_mm_xor_si128(c2[482],simde_mm_xor_si128(c2[320],simde_mm_xor_si128(c2[1122],simde_mm_xor_si128(c2[881],simde_mm_xor_si128(c2[249],simde_mm_xor_si128(c2[169],simde_mm_xor_si128(c2[1208],simde_mm_xor_si128(c2[410],simde_mm_xor_si128(c2[818],simde_mm_xor_si128(c2[738],simde_mm_xor_si128(c2[577],simde_mm_xor_si128(c2[667],simde_mm_xor_si128(c2[587],simde_mm_xor_si128(c2[264],simde_mm_xor_si128(c2[744],simde_mm_xor_si128(c2[833],simde_mm_xor_si128(c2[113],simde_mm_xor_si128(c2[915],simde_mm_xor_si128(c2[841],simde_mm_xor_si128(c2[440],simde_mm_xor_si128(c2[362],simde_mm_xor_si128(c2[929],simde_mm_xor_si128(c2[849],simde_mm_xor_si128(c2[1251],simde_mm_xor_si128(c2[1170],simde_mm_xor_si128(c2[779],simde_mm_xor_si128(c2[297],simde_mm_xor_si128(c2[1098],simde_mm_xor_si128(c2[1107],simde_mm_xor_si128(c2[1104],simde_mm_xor_si128(c2[1266],simde_mm_xor_si128(c2[954],simde_mm_xor_si128(c2[874],simde_mm_xor_si128(c2[793],simde_mm_xor_si128(c2[1113],c2[1033]))))))))))))))))))))))))))))))))))));
+
+//row: 7
+     d2[28]=simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[880],simde_mm_xor_si128(c2[963],simde_mm_xor_si128(c2[722],simde_mm_xor_si128(c2[801],simde_mm_xor_si128(c2[241],simde_mm_xor_si128(c2[320],simde_mm_xor_si128(c2[651],simde_mm_xor_si128(c2[571],simde_mm_xor_si128(c2[650],simde_mm_xor_si128(c2[331],simde_mm_xor_si128(c2[410],simde_mm_xor_si128(c2[971],simde_mm_xor_si128(c2[808],simde_mm_xor_si128(c2[891],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[1216],simde_mm_xor_si128(c2[1136],simde_mm_xor_si128(c2[1219],simde_mm_xor_si128(c2[1138],simde_mm_xor_si128(c2[979],simde_mm_xor_si128(c2[1058],simde_mm_xor_si128(c2[1065],simde_mm_xor_si128(c2[985],simde_mm_xor_si128(c2[1064],simde_mm_xor_si128(c2[666],simde_mm_xor_si128(c2[745],simde_mm_xor_si128(c2[1146],simde_mm_xor_si128(c2[1225],simde_mm_xor_si128(c2[1235],simde_mm_xor_si128(c2[35],simde_mm_xor_si128(c2[515],simde_mm_xor_si128(c2[594],simde_mm_xor_si128(c2[193],simde_mm_xor_si128(c2[34],simde_mm_xor_si128(c2[113],simde_mm_xor_si128(c2[1243],simde_mm_xor_si128(c2[43],simde_mm_xor_si128(c2[1001],simde_mm_xor_si128(c2[842],simde_mm_xor_si128(c2[921],simde_mm_xor_si128(c2[281],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[1251],simde_mm_xor_si128(c2[51],simde_mm_xor_si128(c2[370],simde_mm_xor_si128(c2[449],simde_mm_xor_si128(c2[448],simde_mm_xor_si128(c2[289],simde_mm_xor_si128(c2[368],simde_mm_xor_si128(c2[1177],simde_mm_xor_si128(c2[1256],simde_mm_xor_si128(c2[858],simde_mm_xor_si128(c2[699],simde_mm_xor_si128(c2[778],simde_mm_xor_si128(c2[1016],simde_mm_xor_si128(c2[226],simde_mm_xor_si128(c2[305],simde_mm_xor_si128(c2[227],simde_mm_xor_si128(c2[306],simde_mm_xor_si128(c2[544],simde_mm_xor_si128(c2[385],simde_mm_xor_si128(c2[464],simde_mm_xor_si128(c2[73],simde_mm_xor_si128(c2[1272],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[1195],simde_mm_xor_si128(c2[1274],simde_mm_xor_si128(c2[395],simde_mm_xor_si128(c2[232],c2[315]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 8
+     d2[32]=simde_mm_xor_si128(c2[1202],simde_mm_xor_si128(c2[1122],simde_mm_xor_si128(c2[1040],simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[483],simde_mm_xor_si128(c2[1120],simde_mm_xor_si128(c2[889],simde_mm_xor_si128(c2[809],simde_mm_xor_si128(c2[569],simde_mm_xor_si128(c2[1050],simde_mm_xor_si128(c2[1129],simde_mm_xor_si128(c2[179],simde_mm_xor_si128(c2[99],simde_mm_xor_si128(c2[1217],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[1227],simde_mm_xor_si128(c2[984],simde_mm_xor_si128(c2[904],simde_mm_xor_si128(c2[105],simde_mm_xor_si128(c2[274],simde_mm_xor_si128(c2[194],simde_mm_xor_si128(c2[753],simde_mm_xor_si128(c2[272],simde_mm_xor_si128(c2[282],simde_mm_xor_si128(c2[202],simde_mm_xor_si128(c2[1080],simde_mm_xor_si128(c2[290],simde_mm_xor_si128(c2[210],simde_mm_xor_si128(c2[688],simde_mm_xor_si128(c2[608],simde_mm_xor_si128(c2[531],simde_mm_xor_si128(c2[216],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[937],simde_mm_xor_si128(c2[544],simde_mm_xor_si128(c2[464],simde_mm_xor_si128(c2[465],simde_mm_xor_si128(c2[627],simde_mm_xor_si128(c2[315],simde_mm_xor_si128(c2[235],simde_mm_xor_si128(c2[234],simde_mm_xor_si128(c2[154],c2[474]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 9
+     d2[36]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[400],simde_mm_xor_si128(c2[1203],simde_mm_xor_si128(c2[242],simde_mm_xor_si128(c2[1041],simde_mm_xor_si128(c2[1040],simde_mm_xor_si128(c2[560],simde_mm_xor_si128(c2[970],simde_mm_xor_si128(c2[91],simde_mm_xor_si128(c2[890],simde_mm_xor_si128(c2[1130],simde_mm_xor_si128(c2[650],simde_mm_xor_si128(c2[328],simde_mm_xor_si128(c2[1131],simde_mm_xor_si128(c2[888],simde_mm_xor_si128(c2[256],simde_mm_xor_si128(c2[656],simde_mm_xor_si128(c2[176],simde_mm_xor_si128(c2[499],simde_mm_xor_si128(c2[19],simde_mm_xor_si128(c2[105],simde_mm_xor_si128(c2[505],simde_mm_xor_si128(c2[25],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[985],simde_mm_xor_si128(c2[666],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[755],simde_mm_xor_si128(c2[275],simde_mm_xor_si128(c2[35],simde_mm_xor_si128(c2[834],simde_mm_xor_si128(c2[833],simde_mm_xor_si128(c2[353],simde_mm_xor_si128(c2[763],simde_mm_xor_si128(c2[283],simde_mm_xor_si128(c2[362],simde_mm_xor_si128(c2[1161],simde_mm_xor_si128(c2[371],simde_mm_xor_si128(c2[771],simde_mm_xor_si128(c2[291],simde_mm_xor_si128(c2[1169],simde_mm_xor_si128(c2[689],simde_mm_xor_si128(c2[1088],simde_mm_xor_si128(c2[608],simde_mm_xor_si128(c2[697],simde_mm_xor_si128(c2[217],simde_mm_xor_si128(c2[219],simde_mm_xor_si128(c2[1018],simde_mm_xor_si128(c2[1025],simde_mm_xor_si128(c2[545],simde_mm_xor_si128(c2[1026],simde_mm_xor_si128(c2[546],simde_mm_xor_si128(c2[1184],simde_mm_xor_si128(c2[704],simde_mm_xor_si128(c2[1104],simde_mm_xor_si128(c2[392],simde_mm_xor_si128(c2[792],simde_mm_xor_si128(c2[312],simde_mm_xor_si128(c2[715],simde_mm_xor_si128(c2[235],simde_mm_xor_si128(c2[1035],c2[555])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 10
+     d2[40]=simde_mm_xor_si128(c2[880],simde_mm_xor_si128(c2[731],simde_mm_xor_si128(c2[48],c2[459])));
+
+//row: 11
+     d2[44]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[1121],simde_mm_xor_si128(c2[640],simde_mm_xor_si128(c2[880],simde_mm_xor_si128(c2[970],simde_mm_xor_si128(c2[730],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[1211],simde_mm_xor_si128(c2[256],simde_mm_xor_si128(c2[179],simde_mm_xor_si128(c2[99],simde_mm_xor_si128(c2[105],simde_mm_xor_si128(c2[1065],simde_mm_xor_si128(c2[266],simde_mm_xor_si128(c2[355],simde_mm_xor_si128(c2[914],simde_mm_xor_si128(c2[513],simde_mm_xor_si128(c2[433],simde_mm_xor_si128(c2[363],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[1241],simde_mm_xor_si128(c2[371],simde_mm_xor_si128(c2[769],simde_mm_xor_si128(c2[768],simde_mm_xor_si128(c2[688],simde_mm_xor_si128(c2[297],simde_mm_xor_si128(c2[1178],simde_mm_xor_si128(c2[1098],simde_mm_xor_si128(c2[1018],simde_mm_xor_si128(c2[625],simde_mm_xor_si128(c2[626],simde_mm_xor_si128(c2[864],simde_mm_xor_si128(c2[784],simde_mm_xor_si128(c2[392],simde_mm_xor_si128(c2[315],simde_mm_xor_si128(c2[715],simde_mm_xor_si128(c2[635],c2[233])))))))))))))))))))))))))))))))))))));
+
+//row: 12
+     d2[48]=simde_mm_xor_si128(c2[561],simde_mm_xor_si128(c2[481],simde_mm_xor_si128(c2[323],simde_mm_xor_si128(c2[1121],simde_mm_xor_si128(c2[248],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[1211],simde_mm_xor_si128(c2[409],simde_mm_xor_si128(c2[1211],simde_mm_xor_si128(c2[817],simde_mm_xor_si128(c2[737],simde_mm_xor_si128(c2[576],simde_mm_xor_si128(c2[666],simde_mm_xor_si128(c2[586],simde_mm_xor_si128(c2[267],simde_mm_xor_si128(c2[747],simde_mm_xor_si128(c2[1226],simde_mm_xor_si128(c2[832],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[914],simde_mm_xor_si128(c2[840],simde_mm_xor_si128(c2[443],simde_mm_xor_si128(c2[928],simde_mm_xor_si128(c2[848],simde_mm_xor_si128(c2[1250],simde_mm_xor_si128(c2[1169],simde_mm_xor_si128(c2[778],simde_mm_xor_si128(c2[296],simde_mm_xor_si128(c2[1106],simde_mm_xor_si128(c2[1107],simde_mm_xor_si128(c2[1265],simde_mm_xor_si128(c2[953],simde_mm_xor_si128(c2[873],simde_mm_xor_si128(c2[792],c2[1112]))))))))))))))))))))))))))))))))));
+
+//row: 13
+     d2[52]=simde_mm_xor_si128(c2[482],simde_mm_xor_si128(c2[320],simde_mm_xor_si128(c2[1122],simde_mm_xor_si128(c2[241],simde_mm_xor_si128(c2[169],simde_mm_xor_si128(c2[1208],simde_mm_xor_si128(c2[490],simde_mm_xor_si128(c2[410],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[738],simde_mm_xor_si128(c2[657],simde_mm_xor_si128(c2[577],simde_mm_xor_si128(c2[587],simde_mm_xor_si128(c2[264],simde_mm_xor_si128(c2[744],simde_mm_xor_si128(c2[833],simde_mm_xor_si128(c2[113],simde_mm_xor_si128(c2[995],simde_mm_xor_si128(c2[915],simde_mm_xor_si128(c2[841],simde_mm_xor_si128(c2[520],simde_mm_xor_si128(c2[440],simde_mm_xor_si128(c2[849],simde_mm_xor_si128(c2[1251],simde_mm_xor_si128(c2[1250],simde_mm_xor_si128(c2[1170],simde_mm_xor_si128(c2[779],simde_mm_xor_si128(c2[377],simde_mm_xor_si128(c2[297],simde_mm_xor_si128(c2[1107],simde_mm_xor_si128(c2[1104],simde_mm_xor_si128(c2[67],simde_mm_xor_si128(c2[1266],simde_mm_xor_si128(c2[546],simde_mm_xor_si128(c2[874],simde_mm_xor_si128(c2[793],simde_mm_xor_si128(c2[1193],c2[1113])))))))))))))))))))))))))))))))))))));
+
+//row: 14
+     d2[56]=simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[880],simde_mm_xor_si128(c2[3],simde_mm_xor_si128(c2[722],simde_mm_xor_si128(c2[1120],simde_mm_xor_si128(c2[241],simde_mm_xor_si128(c2[643],simde_mm_xor_si128(c2[651],simde_mm_xor_si128(c2[571],simde_mm_xor_si128(c2[969],simde_mm_xor_si128(c2[331],simde_mm_xor_si128(c2[729],simde_mm_xor_si128(c2[11],simde_mm_xor_si128(c2[808],simde_mm_xor_si128(c2[1210],simde_mm_xor_si128(c2[251],simde_mm_xor_si128(c2[1216],simde_mm_xor_si128(c2[1136],simde_mm_xor_si128(c2[259],simde_mm_xor_si128(c2[178],simde_mm_xor_si128(c2[979],simde_mm_xor_si128(c2[98],simde_mm_xor_si128(c2[1065],simde_mm_xor_si128(c2[985],simde_mm_xor_si128(c2[104],simde_mm_xor_si128(c2[666],simde_mm_xor_si128(c2[1064],simde_mm_xor_si128(c2[1146],simde_mm_xor_si128(c2[265],simde_mm_xor_si128(c2[1235],simde_mm_xor_si128(c2[354],simde_mm_xor_si128(c2[515],simde_mm_xor_si128(c2[913],simde_mm_xor_si128(c2[512],simde_mm_xor_si128(c2[34],simde_mm_xor_si128(c2[432],simde_mm_xor_si128(c2[1243],simde_mm_xor_si128(c2[362],simde_mm_xor_si128(c2[41],simde_mm_xor_si128(c2[842],simde_mm_xor_si128(c2[1240],simde_mm_xor_si128(c2[48],simde_mm_xor_si128(c2[1251],simde_mm_xor_si128(c2[370],simde_mm_xor_si128(c2[370],simde_mm_xor_si128(c2[768],simde_mm_xor_si128(c2[771],simde_mm_xor_si128(c2[289],simde_mm_xor_si128(c2[691],simde_mm_xor_si128(c2[129],simde_mm_xor_si128(c2[1177],simde_mm_xor_si128(c2[296],simde_mm_xor_si128(c2[1177],simde_mm_xor_si128(c2[699],simde_mm_xor_si128(c2[1097],simde_mm_xor_si128(c2[226],simde_mm_xor_si128(c2[624],simde_mm_xor_si128(c2[227],simde_mm_xor_si128(c2[625],simde_mm_xor_si128(c2[867],simde_mm_xor_si128(c2[385],simde_mm_xor_si128(c2[787],simde_mm_xor_si128(c2[73],simde_mm_xor_si128(c2[1272],simde_mm_xor_si128(c2[395],simde_mm_xor_si128(c2[1195],simde_mm_xor_si128(c2[314],simde_mm_xor_si128(c2[714],simde_mm_xor_si128(c2[232],c2[634])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 15
+     d2[60]=simde_mm_xor_si128(c2[1121],simde_mm_xor_si128(c2[563],simde_mm_xor_si128(c2[1041],simde_mm_xor_si128(c2[401],simde_mm_xor_si128(c2[883],simde_mm_xor_si128(c2[1203],simde_mm_xor_si128(c2[402],simde_mm_xor_si128(c2[243],simde_mm_xor_si128(c2[808],simde_mm_xor_si128(c2[250],simde_mm_xor_si128(c2[728],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[488],simde_mm_xor_si128(c2[491],simde_mm_xor_si128(c2[969],simde_mm_xor_si128(c2[98],simde_mm_xor_si128(c2[819],simde_mm_xor_si128(c2[18],simde_mm_xor_si128(c2[658],simde_mm_xor_si128(c2[1136],simde_mm_xor_si128(c2[1226],simde_mm_xor_si128(c2[664],simde_mm_xor_si128(c2[1146],simde_mm_xor_si128(c2[345],simde_mm_xor_si128(c2[827],simde_mm_xor_si128(c2[825],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[914],simde_mm_xor_si128(c2[113],simde_mm_xor_si128(c2[194],simde_mm_xor_si128(c2[672],simde_mm_xor_si128(c2[992],simde_mm_xor_si128(c2[195],simde_mm_xor_si128(c2[922],simde_mm_xor_si128(c2[121],simde_mm_xor_si128(c2[521],simde_mm_xor_si128(c2[1003],simde_mm_xor_si128(c2[209],simde_mm_xor_si128(c2[930],simde_mm_xor_si128(c2[129],simde_mm_xor_si128(c2[49],simde_mm_xor_si128(c2[531],simde_mm_xor_si128(c2[1251],simde_mm_xor_si128(c2[450],simde_mm_xor_si128(c2[856],simde_mm_xor_si128(c2[59],simde_mm_xor_si128(c2[378],simde_mm_xor_si128(c2[856],simde_mm_xor_si128(c2[1184],simde_mm_xor_si128(c2[387],simde_mm_xor_si128(c2[1185],simde_mm_xor_si128(c2[384],simde_mm_xor_si128(c2[64],simde_mm_xor_si128(c2[546],simde_mm_xor_si128(c2[234],simde_mm_xor_si128(c2[955],simde_mm_xor_si128(c2[154],simde_mm_xor_si128(c2[874],simde_mm_xor_si128(c2[73],simde_mm_xor_si128(c2[1194],c2[393]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 16
+     d2[64]=simde_mm_xor_si128(c2[81],simde_mm_xor_si128(c2[883],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[803],simde_mm_xor_si128(c2[721],simde_mm_xor_si128(c2[1122],simde_mm_xor_si128(c2[641],simde_mm_xor_si128(c2[641],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[1051],simde_mm_xor_si128(c2[570],simde_mm_xor_si128(c2[971],simde_mm_xor_si128(c2[490],simde_mm_xor_si128(c2[731],simde_mm_xor_si128(c2[250],simde_mm_xor_si128(c2[1208],simde_mm_xor_si128(c2[731],simde_mm_xor_si128(c2[888],simde_mm_xor_si128(c2[337],simde_mm_xor_si128(c2[1139],simde_mm_xor_si128(c2[257],simde_mm_xor_si128(c2[1059],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[898],simde_mm_xor_si128(c2[186],simde_mm_xor_si128(c2[984],simde_mm_xor_si128(c2[106],simde_mm_xor_si128(c2[904],simde_mm_xor_si128(c2[665],simde_mm_xor_si128(c2[1066],simde_mm_xor_si128(c2[585],simde_mm_xor_si128(c2[267],simde_mm_xor_si128(c2[1065],simde_mm_xor_si128(c2[1234],simde_mm_xor_si128(c2[352],simde_mm_xor_si128(c2[1154],simde_mm_xor_si128(c2[915],simde_mm_xor_si128(c2[434],simde_mm_xor_si128(c2[434],simde_mm_xor_si128(c2[1232],simde_mm_xor_si128(c2[1242],simde_mm_xor_si128(c2[360],simde_mm_xor_si128(c2[1162],simde_mm_xor_si128(c2[1242],simde_mm_xor_si128(c2[761],simde_mm_xor_si128(c2[448],simde_mm_xor_si128(c2[1250],simde_mm_xor_si128(c2[368],simde_mm_xor_si128(c2[1170],simde_mm_xor_si128(c2[369],simde_mm_xor_si128(c2[770],simde_mm_xor_si128(c2[289],simde_mm_xor_si128(c2[689],simde_mm_xor_si128(c2[208],simde_mm_xor_si128(c2[1176],simde_mm_xor_si128(c2[298],simde_mm_xor_si128(c2[1096],simde_mm_xor_si128(c2[1099],simde_mm_xor_si128(c2[618],simde_mm_xor_si128(c2[225],simde_mm_xor_si128(c2[626],simde_mm_xor_si128(c2[145],simde_mm_xor_si128(c2[627],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[785],simde_mm_xor_si128(c2[304],simde_mm_xor_si128(c2[473],simde_mm_xor_si128(c2[1275],simde_mm_xor_si128(c2[393],simde_mm_xor_si128(c2[1195],simde_mm_xor_si128(c2[1194],simde_mm_xor_si128(c2[312],simde_mm_xor_si128(c2[1114],simde_mm_xor_si128(c2[632],simde_mm_xor_si128(c2[155],c2[1192])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 17
+     d2[68]=simde_mm_xor_si128(c2[883],simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[803],simde_mm_xor_si128(c2[640],simde_mm_xor_si128(c2[562],simde_mm_xor_si128(c2[641],simde_mm_xor_si128(c2[482],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[570],simde_mm_xor_si128(c2[411],simde_mm_xor_si128(c2[490],simde_mm_xor_si128(c2[331],simde_mm_xor_si128(c2[250],simde_mm_xor_si128(c2[91],simde_mm_xor_si128(c2[731],simde_mm_xor_si128(c2[568],simde_mm_xor_si128(c2[1131],simde_mm_xor_si128(c2[1139],simde_mm_xor_si128(c2[976],simde_mm_xor_si128(c2[1059],simde_mm_xor_si128(c2[896],simde_mm_xor_si128(c2[898],simde_mm_xor_si128(c2[739],simde_mm_xor_si128(c2[984],simde_mm_xor_si128(c2[825],simde_mm_xor_si128(c2[904],simde_mm_xor_si128(c2[745],simde_mm_xor_si128(c2[506],simde_mm_xor_si128(c2[585],simde_mm_xor_si128(c2[426],simde_mm_xor_si128(c2[1065],simde_mm_xor_si128(c2[906],simde_mm_xor_si128(c2[1075],simde_mm_xor_si128(c2[1154],simde_mm_xor_si128(c2[995],simde_mm_xor_si128(c2[434],simde_mm_xor_si128(c2[275],simde_mm_xor_si128(c2[1232],simde_mm_xor_si128(c2[1073],simde_mm_xor_si128(c2[1083],simde_mm_xor_si128(c2[1162],simde_mm_xor_si128(c2[1003],simde_mm_xor_si128(c2[761],simde_mm_xor_si128(c2[602],simde_mm_xor_si128(c2[1003],simde_mm_xor_si128(c2[1250],simde_mm_xor_si128(c2[1091],simde_mm_xor_si128(c2[1170],simde_mm_xor_si128(c2[1011],simde_mm_xor_si128(c2[210],simde_mm_xor_si128(c2[289],simde_mm_xor_si128(c2[130],simde_mm_xor_si128(c2[208],simde_mm_xor_si128(c2[49],simde_mm_xor_si128(c2[1017],simde_mm_xor_si128(c2[1096],simde_mm_xor_si128(c2[937],simde_mm_xor_si128(c2[618],simde_mm_xor_si128(c2[459],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[145],simde_mm_xor_si128(c2[1265],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[1266],simde_mm_xor_si128(c2[304],simde_mm_xor_si128(c2[145],simde_mm_xor_si128(c2[1275],simde_mm_xor_si128(c2[1112],simde_mm_xor_si128(c2[1195],simde_mm_xor_si128(c2[1032],simde_mm_xor_si128(c2[1035],simde_mm_xor_si128(c2[1114],simde_mm_xor_si128(c2[955],simde_mm_xor_si128(c2[155],c2[1275])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 18
+     d2[72]=simde_mm_xor_si128(c2[961],simde_mm_xor_si128(c2[208],c2[219]));
+
+//row: 19
+     d2[76]=simde_mm_xor_si128(c2[80],simde_mm_xor_si128(c2[1201],simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[561],simde_mm_xor_si128(c2[1050],simde_mm_xor_si128(c2[810],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[329],simde_mm_xor_si128(c2[336],simde_mm_xor_si128(c2[179],simde_mm_xor_si128(c2[185],simde_mm_xor_si128(c2[1145],simde_mm_xor_si128(c2[346],simde_mm_xor_si128(c2[435],simde_mm_xor_si128(c2[994],simde_mm_xor_si128(c2[513],simde_mm_xor_si128(c2[443],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[451],simde_mm_xor_si128(c2[849],simde_mm_xor_si128(c2[768],simde_mm_xor_si128(c2[377],simde_mm_xor_si128(c2[1178],simde_mm_xor_si128(c2[705],simde_mm_xor_si128(c2[706],simde_mm_xor_si128(c2[864],simde_mm_xor_si128(c2[472],simde_mm_xor_si128(c2[395],c2[715]))))))))))))))))))))))))))));
+
+//row: 20
+     d2[80]=simde_mm_xor_si128(c2[482],simde_mm_xor_si128(c2[402],simde_mm_xor_si128(c2[240],simde_mm_xor_si128(c2[1042],simde_mm_xor_si128(c2[169],simde_mm_xor_si128(c2[89],simde_mm_xor_si128(c2[1128],simde_mm_xor_si128(c2[330],simde_mm_xor_si128(c2[809],simde_mm_xor_si128(c2[738],simde_mm_xor_si128(c2[658],simde_mm_xor_si128(c2[497],simde_mm_xor_si128(c2[587],simde_mm_xor_si128(c2[507],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[664],simde_mm_xor_si128(c2[753],simde_mm_xor_si128(c2[33],simde_mm_xor_si128(c2[835],simde_mm_xor_si128(c2[754],simde_mm_xor_si128(c2[761],simde_mm_xor_si128(c2[360],simde_mm_xor_si128(c2[849],simde_mm_xor_si128(c2[769],simde_mm_xor_si128(c2[1171],simde_mm_xor_si128(c2[1090],simde_mm_xor_si128(c2[699],simde_mm_xor_si128(c2[217],simde_mm_xor_si128(c2[1027],simde_mm_xor_si128(c2[1024],simde_mm_xor_si128(c2[1186],simde_mm_xor_si128(c2[874],simde_mm_xor_si128(c2[794],simde_mm_xor_si128(c2[713],c2[1033]))))))))))))))))))))))))))))))))));
+
+//row: 21
+     d2[84]=simde_mm_xor_si128(c2[801],simde_mm_xor_si128(c2[643],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[488],simde_mm_xor_si128(c2[248],simde_mm_xor_si128(c2[809],simde_mm_xor_si128(c2[729],simde_mm_xor_si128(c2[1057],simde_mm_xor_si128(c2[976],simde_mm_xor_si128(c2[896],simde_mm_xor_si128(c2[906],simde_mm_xor_si128(c2[587],simde_mm_xor_si128(c2[1067],simde_mm_xor_si128(c2[1152],simde_mm_xor_si128(c2[432],simde_mm_xor_si128(c2[35],simde_mm_xor_si128(c2[1234],simde_mm_xor_si128(c2[1160],simde_mm_xor_si128(c2[843],simde_mm_xor_si128(c2[763],simde_mm_xor_si128(c2[1168],simde_mm_xor_si128(c2[291],simde_mm_xor_si128(c2[290],simde_mm_xor_si128(c2[210],simde_mm_xor_si128(c2[1098],simde_mm_xor_si128(c2[696],simde_mm_xor_si128(c2[616],simde_mm_xor_si128(c2[147],simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[386],simde_mm_xor_si128(c2[306],simde_mm_xor_si128(c2[866],simde_mm_xor_si128(c2[1193],simde_mm_xor_si128(c2[1112],simde_mm_xor_si128(c2[233],c2[153]))))))))))))))))))))))))))))))))))));
+
+//row: 22
+     d2[88]=simde_mm_xor_si128(c2[1129],c2[1219]);
+
+//row: 23
+     d2[92]=simde_mm_xor_si128(c2[561],simde_mm_xor_si128(c2[906],c2[1162]));
+
+//row: 24
+     d2[96]=simde_mm_xor_si128(c2[1130],simde_mm_xor_si128(c2[896],c2[712]));
+
+//row: 25
+     d2[100]=simde_mm_xor_si128(c2[322],c2[1001]);
+
+//row: 26
+     d2[104]=simde_mm_xor_si128(c2[722],simde_mm_xor_si128(c2[642],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[560],simde_mm_xor_si128(c2[480],simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[3],simde_mm_xor_si128(c2[800],simde_mm_xor_si128(c2[409],simde_mm_xor_si128(c2[329],simde_mm_xor_si128(c2[1130],simde_mm_xor_si128(c2[89],simde_mm_xor_si128(c2[890],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[570],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[978],simde_mm_xor_si128(c2[898],simde_mm_xor_si128(c2[416],simde_mm_xor_si128(c2[339],simde_mm_xor_si128(c2[737],simde_mm_xor_si128(c2[259],simde_mm_xor_si128(c2[1057],simde_mm_xor_si128(c2[827],simde_mm_xor_si128(c2[747],simde_mm_xor_si128(c2[265],simde_mm_xor_si128(c2[504],simde_mm_xor_si128(c2[424],simde_mm_xor_si128(c2[1225],simde_mm_xor_si128(c2[904],simde_mm_xor_si128(c2[426],simde_mm_xor_si128(c2[1073],simde_mm_xor_si128(c2[993],simde_mm_xor_si128(c2[515],simde_mm_xor_si128(c2[273],simde_mm_xor_si128(c2[1074],simde_mm_xor_si128(c2[673],simde_mm_xor_si128(c2[1075],simde_mm_xor_si128(c2[593],simde_mm_xor_si128(c2[1081],simde_mm_xor_si128(c2[1001],simde_mm_xor_si128(c2[523],simde_mm_xor_si128(c2[202],simde_mm_xor_si128(c2[600],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[1089],simde_mm_xor_si128(c2[1009],simde_mm_xor_si128(c2[531],simde_mm_xor_si128(c2[208],simde_mm_xor_si128(c2[128],simde_mm_xor_si128(c2[929],simde_mm_xor_si128(c2[928],simde_mm_xor_si128(c2[51],simde_mm_xor_si128(c2[848],simde_mm_xor_si128(c2[1019],simde_mm_xor_si128(c2[939],simde_mm_xor_si128(c2[457],simde_mm_xor_si128(c2[59],simde_mm_xor_si128(c2[457],simde_mm_xor_si128(c2[1258],simde_mm_xor_si128(c2[1256],simde_mm_xor_si128(c2[64],simde_mm_xor_si128(c2[1267],simde_mm_xor_si128(c2[785],simde_mm_xor_si128(c2[1264],simde_mm_xor_si128(c2[786],simde_mm_xor_si128(c2[1024],simde_mm_xor_si128(c2[147],simde_mm_xor_si128(c2[944],simde_mm_xor_si128(c2[1114],simde_mm_xor_si128(c2[1034],simde_mm_xor_si128(c2[552],simde_mm_xor_si128(c2[1033],simde_mm_xor_si128(c2[953],simde_mm_xor_si128(c2[475],simde_mm_xor_si128(c2[875],simde_mm_xor_si128(c2[1273],c2[795])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 27
+     d2[108]=simde_mm_xor_si128(c2[640],c2[609]);
+
+//row: 28
+     d2[112]=simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[418],c2[600]));
+
+//row: 29
+     d2[116]=simde_mm_xor_si128(c2[161],c2[993]);
+
+//row: 30
+     d2[120]=simde_mm_xor_si128(c2[576],simde_mm_xor_si128(c2[43],simde_mm_xor_si128(c2[776],c2[393])));
+
+//row: 31
+     d2[124]=simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[562],simde_mm_xor_si128(c2[81],simde_mm_xor_si128(c2[411],simde_mm_xor_si128(c2[171],simde_mm_xor_si128(c2[728],simde_mm_xor_si128(c2[648],simde_mm_xor_si128(c2[810],simde_mm_xor_si128(c2[976],simde_mm_xor_si128(c2[899],simde_mm_xor_si128(c2[819],simde_mm_xor_si128(c2[825],simde_mm_xor_si128(c2[506],simde_mm_xor_si128(c2[986],simde_mm_xor_si128(c2[1075],simde_mm_xor_si128(c2[355],simde_mm_xor_si128(c2[1233],simde_mm_xor_si128(c2[1153],simde_mm_xor_si128(c2[1083],simde_mm_xor_si128(c2[762],simde_mm_xor_si128(c2[682],simde_mm_xor_si128(c2[1091],simde_mm_xor_si128(c2[210],simde_mm_xor_si128(c2[209],simde_mm_xor_si128(c2[129],simde_mm_xor_si128(c2[1017],simde_mm_xor_si128(c2[619],simde_mm_xor_si128(c2[539],simde_mm_xor_si128(c2[66],simde_mm_xor_si128(c2[67],simde_mm_xor_si128(c2[305],simde_mm_xor_si128(c2[225],simde_mm_xor_si128(c2[1112],simde_mm_xor_si128(c2[1035],simde_mm_xor_si128(c2[152],c2[72])))))))))))))))))))))))))))))))))));
+
+//row: 32
+     d2[128]=simde_mm_xor_si128(c2[1202],simde_mm_xor_si128(c2[1122],simde_mm_xor_si128(c2[1040],simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[483],simde_mm_xor_si128(c2[163],simde_mm_xor_si128(c2[889],simde_mm_xor_si128(c2[809],simde_mm_xor_si128(c2[569],simde_mm_xor_si128(c2[1050],simde_mm_xor_si128(c2[179],simde_mm_xor_si128(c2[99],simde_mm_xor_si128(c2[1217],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[1227],simde_mm_xor_si128(c2[984],simde_mm_xor_si128(c2[904],simde_mm_xor_si128(c2[105],simde_mm_xor_si128(c2[274],simde_mm_xor_si128(c2[194],simde_mm_xor_si128(c2[753],simde_mm_xor_si128(c2[272],simde_mm_xor_si128(c2[282],simde_mm_xor_si128(c2[202],simde_mm_xor_si128(c2[1080],simde_mm_xor_si128(c2[1002],simde_mm_xor_si128(c2[290],simde_mm_xor_si128(c2[210],simde_mm_xor_si128(c2[688],simde_mm_xor_si128(c2[608],simde_mm_xor_si128(c2[531],simde_mm_xor_si128(c2[216],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[937],simde_mm_xor_si128(c2[544],simde_mm_xor_si128(c2[464],simde_mm_xor_si128(c2[465],simde_mm_xor_si128(c2[627],simde_mm_xor_si128(c2[315],simde_mm_xor_si128(c2[235],simde_mm_xor_si128(c2[234],simde_mm_xor_si128(c2[154],c2[474]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 33
+     d2[132]=simde_mm_xor_si128(c2[243],simde_mm_xor_si128(c2[81],simde_mm_xor_si128(c2[883],simde_mm_xor_si128(c2[1209],simde_mm_xor_si128(c2[969],simde_mm_xor_si128(c2[171],simde_mm_xor_si128(c2[499],simde_mm_xor_si128(c2[338],simde_mm_xor_si128(c2[336],simde_mm_xor_si128(c2[344],simde_mm_xor_si128(c2[25],simde_mm_xor_si128(c2[505],simde_mm_xor_si128(c2[594],simde_mm_xor_si128(c2[1153],simde_mm_xor_si128(c2[672],simde_mm_xor_si128(c2[602],simde_mm_xor_si128(c2[201],simde_mm_xor_si128(c2[610],simde_mm_xor_si128(c2[1008],simde_mm_xor_si128(c2[931],simde_mm_xor_si128(c2[536],simde_mm_xor_si128(c2[58],simde_mm_xor_si128(c2[378],simde_mm_xor_si128(c2[864],simde_mm_xor_si128(c2[865],simde_mm_xor_si128(c2[1027],simde_mm_xor_si128(c2[635],simde_mm_xor_si128(c2[554],c2[874]))))))))))))))))))))))))))));
+
+//row: 34
+     d2[136]=simde_mm_xor_si128(c2[1121],simde_mm_xor_si128(c2[1041],simde_mm_xor_si128(c2[962],simde_mm_xor_si128(c2[963],simde_mm_xor_si128(c2[883],simde_mm_xor_si128(c2[800],simde_mm_xor_si128(c2[402],simde_mm_xor_si128(c2[323],simde_mm_xor_si128(c2[241],simde_mm_xor_si128(c2[808],simde_mm_xor_si128(c2[728],simde_mm_xor_si128(c2[649],simde_mm_xor_si128(c2[488],simde_mm_xor_si128(c2[409],simde_mm_xor_si128(c2[970],simde_mm_xor_si128(c2[969],simde_mm_xor_si128(c2[890],simde_mm_xor_si128(c2[98],simde_mm_xor_si128(c2[18],simde_mm_xor_si128(c2[1218],simde_mm_xor_si128(c2[1137],simde_mm_xor_si128(c2[1136],simde_mm_xor_si128(c2[1057],simde_mm_xor_si128(c2[1226],simde_mm_xor_si128(c2[1146],simde_mm_xor_si128(c2[1067],simde_mm_xor_si128(c2[907],simde_mm_xor_si128(c2[827],simde_mm_xor_si128(c2[744],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[1224],simde_mm_xor_si128(c2[193],simde_mm_xor_si128(c2[113],simde_mm_xor_si128(c2[34],simde_mm_xor_si128(c2[672],simde_mm_xor_si128(c2[593],simde_mm_xor_si128(c2[192],simde_mm_xor_si128(c2[195],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[201],simde_mm_xor_si128(c2[121],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[1000],simde_mm_xor_si128(c2[1003],simde_mm_xor_si128(c2[920],simde_mm_xor_si128(c2[209],simde_mm_xor_si128(c2[129],simde_mm_xor_si128(c2[50],simde_mm_xor_si128(c2[611],simde_mm_xor_si128(c2[531],simde_mm_xor_si128(c2[448],simde_mm_xor_si128(c2[451],simde_mm_xor_si128(c2[450],simde_mm_xor_si128(c2[371],simde_mm_xor_si128(c2[139],simde_mm_xor_si128(c2[59],simde_mm_xor_si128(c2[1259],simde_mm_xor_si128(c2[857],simde_mm_xor_si128(c2[856],simde_mm_xor_si128(c2[777],simde_mm_xor_si128(c2[467],simde_mm_xor_si128(c2[387],simde_mm_xor_si128(c2[304],simde_mm_xor_si128(c2[384],simde_mm_xor_si128(c2[305],simde_mm_xor_si128(c2[547],simde_mm_xor_si128(c2[546],simde_mm_xor_si128(c2[467],simde_mm_xor_si128(c2[234],simde_mm_xor_si128(c2[154],simde_mm_xor_si128(c2[75],simde_mm_xor_si128(c2[153],simde_mm_xor_si128(c2[73],simde_mm_xor_si128(c2[1273],simde_mm_xor_si128(c2[394],simde_mm_xor_si128(c2[393],c2[314]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 35
+     d2[140]=simde_mm_xor_si128(c2[640],simde_mm_xor_si128(c2[560],simde_mm_xor_si128(c2[402],simde_mm_xor_si128(c2[1200],simde_mm_xor_si128(c2[331],simde_mm_xor_si128(c2[251],simde_mm_xor_si128(c2[11],simde_mm_xor_si128(c2[488],simde_mm_xor_si128(c2[731],simde_mm_xor_si128(c2[896],simde_mm_xor_si128(c2[816],simde_mm_xor_si128(c2[659],simde_mm_xor_si128(c2[745],simde_mm_xor_si128(c2[665],simde_mm_xor_si128(c2[346],simde_mm_xor_si128(c2[826],simde_mm_xor_si128(c2[915],simde_mm_xor_si128(c2[195],simde_mm_xor_si128(c2[993],simde_mm_xor_si128(c2[923],simde_mm_xor_si128(c2[522],simde_mm_xor_si128(c2[682],simde_mm_xor_si128(c2[1011],simde_mm_xor_si128(c2[931],simde_mm_xor_si128(c2[50],simde_mm_xor_si128(c2[1248],simde_mm_xor_si128(c2[857],simde_mm_xor_si128(c2[379],simde_mm_xor_si128(c2[1185],simde_mm_xor_si128(c2[1186],simde_mm_xor_si128(c2[65],simde_mm_xor_si128(c2[1032],simde_mm_xor_si128(c2[952],simde_mm_xor_si128(c2[875],c2[1195]))))))))))))))))))))))))))))))))));
+
+//row: 36
+     d2[144]=simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[498],c2[857]));
+
+//row: 37
+     d2[148]=simde_mm_xor_si128(c2[242],simde_mm_xor_si128(c2[1201],simde_mm_xor_si128(c2[80],simde_mm_xor_si128(c2[1043],simde_mm_xor_si128(c2[882],simde_mm_xor_si128(c2[562],simde_mm_xor_si128(c2[1208],simde_mm_xor_si128(c2[888],simde_mm_xor_si128(c2[968],simde_mm_xor_si128(c2[648],simde_mm_xor_si128(c2[1209],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[1129],simde_mm_xor_si128(c2[498],simde_mm_xor_si128(c2[178],simde_mm_xor_si128(c2[97],simde_mm_xor_si128(c2[337],simde_mm_xor_si128(c2[17],simde_mm_xor_si128(c2[347],simde_mm_xor_si128(c2[27],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[987],simde_mm_xor_si128(c2[504],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[593],simde_mm_xor_si128(c2[273],simde_mm_xor_si128(c2[1152],simde_mm_xor_si128(c2[832],simde_mm_xor_si128(c2[435],simde_mm_xor_si128(c2[675],simde_mm_xor_si128(c2[355],simde_mm_xor_si128(c2[601],simde_mm_xor_si128(c2[281],simde_mm_xor_si128(c2[1243],simde_mm_xor_si128(c2[200],simde_mm_xor_si128(c2[1163],simde_mm_xor_si128(c2[609],simde_mm_xor_si128(c2[289],simde_mm_xor_si128(c2[1011],simde_mm_xor_si128(c2[691],simde_mm_xor_si128(c2[690],simde_mm_xor_si128(c2[930],simde_mm_xor_si128(c2[610],simde_mm_xor_si128(c2[539],simde_mm_xor_si128(c2[219],simde_mm_xor_si128(c2[1096],simde_mm_xor_si128(c2[57],simde_mm_xor_si128(c2[1016],simde_mm_xor_si128(c2[867],simde_mm_xor_si128(c2[547],simde_mm_xor_si128(c2[864],simde_mm_xor_si128(c2[544],simde_mm_xor_si128(c2[786],simde_mm_xor_si128(c2[1026],simde_mm_xor_si128(c2[706],simde_mm_xor_si128(c2[634],simde_mm_xor_si128(c2[314],simde_mm_xor_si128(c2[553],simde_mm_xor_si128(c2[233],simde_mm_xor_si128(c2[633],simde_mm_xor_si128(c2[873],c2[553])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 38
+     d2[152]=simde_mm_xor_si128(c2[1202],simde_mm_xor_si128(c2[1122],simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[483],simde_mm_xor_si128(c2[889],simde_mm_xor_si128(c2[809],simde_mm_xor_si128(c2[569],simde_mm_xor_si128(c2[1050],simde_mm_xor_si128(c2[1209],simde_mm_xor_si128(c2[179],simde_mm_xor_si128(c2[99],simde_mm_xor_si128(c2[1217],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[1227],simde_mm_xor_si128(c2[904],simde_mm_xor_si128(c2[105],simde_mm_xor_si128(c2[194],simde_mm_xor_si128(c2[753],simde_mm_xor_si128(c2[272],simde_mm_xor_si128(c2[202],simde_mm_xor_si128(c2[1080],simde_mm_xor_si128(c2[200],simde_mm_xor_si128(c2[290],simde_mm_xor_si128(c2[210],simde_mm_xor_si128(c2[608],simde_mm_xor_si128(c2[531],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[937],simde_mm_xor_si128(c2[464],simde_mm_xor_si128(c2[465],simde_mm_xor_si128(c2[627],simde_mm_xor_si128(c2[315],simde_mm_xor_si128(c2[235],simde_mm_xor_si128(c2[154],c2[474]))))))))))))))))))))))))))))))))));
+
+//row: 39
+     d2[156]=simde_mm_xor_si128(c2[882],simde_mm_xor_si128(c2[802],simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[640],simde_mm_xor_si128(c2[163],simde_mm_xor_si128(c2[1202],simde_mm_xor_si128(c2[569],simde_mm_xor_si128(c2[489],simde_mm_xor_si128(c2[249],simde_mm_xor_si128(c2[730],simde_mm_xor_si128(c2[1138],simde_mm_xor_si128(c2[1058],simde_mm_xor_si128(c2[897],simde_mm_xor_si128(c2[987],simde_mm_xor_si128(c2[907],simde_mm_xor_si128(c2[664],simde_mm_xor_si128(c2[584],simde_mm_xor_si128(c2[1064],simde_mm_xor_si128(c2[1233],simde_mm_xor_si128(c2[1153],simde_mm_xor_si128(c2[433],simde_mm_xor_si128(c2[1235],simde_mm_xor_si128(c2[1241],simde_mm_xor_si128(c2[1161],simde_mm_xor_si128(c2[760],simde_mm_xor_si128(c2[1249],simde_mm_xor_si128(c2[1169],simde_mm_xor_si128(c2[368],simde_mm_xor_si128(c2[288],simde_mm_xor_si128(c2[211],simde_mm_xor_si128(c2[1179],simde_mm_xor_si128(c2[1099],simde_mm_xor_si128(c2[617],simde_mm_xor_si128(c2[1018],simde_mm_xor_si128(c2[224],simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[145],simde_mm_xor_si128(c2[307],simde_mm_xor_si128(c2[1274],simde_mm_xor_si128(c2[1194],simde_mm_xor_si128(c2[1193],simde_mm_xor_si128(c2[1113],c2[154]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 40
+     d2[160]=simde_mm_xor_si128(c2[241],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[83],simde_mm_xor_si128(c2[1121],simde_mm_xor_si128(c2[881],simde_mm_xor_si128(c2[640],simde_mm_xor_si128(c2[1211],simde_mm_xor_si128(c2[970],simde_mm_xor_si128(c2[971],simde_mm_xor_si128(c2[730],simde_mm_xor_si128(c2[8],simde_mm_xor_si128(c2[169],simde_mm_xor_si128(c2[1211],simde_mm_xor_si128(c2[497],simde_mm_xor_si128(c2[256],simde_mm_xor_si128(c2[179],simde_mm_xor_si128(c2[336],simde_mm_xor_si128(c2[99],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[346],simde_mm_xor_si128(c2[105],simde_mm_xor_si128(c2[27],simde_mm_xor_si128(c2[1065],simde_mm_xor_si128(c2[507],simde_mm_xor_si128(c2[266],simde_mm_xor_si128(c2[592],simde_mm_xor_si128(c2[355],simde_mm_xor_si128(c2[1155],simde_mm_xor_si128(c2[914],simde_mm_xor_si128(c2[513],simde_mm_xor_si128(c2[674],simde_mm_xor_si128(c2[433],simde_mm_xor_si128(c2[600],simde_mm_xor_si128(c2[363],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[203],simde_mm_xor_si128(c2[1241],simde_mm_xor_si128(c2[608],simde_mm_xor_si128(c2[371],simde_mm_xor_si128(c2[1010],simde_mm_xor_si128(c2[769],simde_mm_xor_si128(c2[768],simde_mm_xor_si128(c2[929],simde_mm_xor_si128(c2[688],simde_mm_xor_si128(c2[538],simde_mm_xor_si128(c2[297],simde_mm_xor_si128(c2[1178],simde_mm_xor_si128(c2[56],simde_mm_xor_si128(c2[1098],simde_mm_xor_si128(c2[866],simde_mm_xor_si128(c2[625],simde_mm_xor_si128(c2[867],simde_mm_xor_si128(c2[626],simde_mm_xor_si128(c2[864],simde_mm_xor_si128(c2[1025],simde_mm_xor_si128(c2[784],simde_mm_xor_si128(c2[633],simde_mm_xor_si128(c2[392],simde_mm_xor_si128(c2[552],simde_mm_xor_si128(c2[315],simde_mm_xor_si128(c2[715],simde_mm_xor_si128(c2[872],c2[635]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 41
+     d2[164]=simde_mm_xor_si128(c2[1203],simde_mm_xor_si128(c2[1123],simde_mm_xor_si128(c2[961],simde_mm_xor_si128(c2[480],simde_mm_xor_si128(c2[890],simde_mm_xor_si128(c2[810],simde_mm_xor_si128(c2[570],simde_mm_xor_si128(c2[1051],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[176],simde_mm_xor_si128(c2[96],simde_mm_xor_si128(c2[1218],simde_mm_xor_si128(c2[25],simde_mm_xor_si128(c2[1224],simde_mm_xor_si128(c2[905],simde_mm_xor_si128(c2[106],simde_mm_xor_si128(c2[195],simde_mm_xor_si128(c2[754],simde_mm_xor_si128(c2[273],simde_mm_xor_si128(c2[203],simde_mm_xor_si128(c2[1081],simde_mm_xor_si128(c2[442],simde_mm_xor_si128(c2[291],simde_mm_xor_si128(c2[211],simde_mm_xor_si128(c2[609],simde_mm_xor_si128(c2[528],simde_mm_xor_si128(c2[137],simde_mm_xor_si128(c2[938],simde_mm_xor_si128(c2[465],simde_mm_xor_si128(c2[466],simde_mm_xor_si128(c2[624],simde_mm_xor_si128(c2[312],simde_mm_xor_si128(c2[232],simde_mm_xor_si128(c2[155],c2[475]))))))))))))))))))))))))))))))))));
+  }
+}
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc72_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc72_byte.c
index 4d2e586106d5324bdfd93901bb3ca1318827aa26..20db86d53a1789f3609d487a0cbb52569c716ad5 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc72_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc72_byte.c
@@ -1,9 +1,8 @@
 #include "PHY/sse_intrin.h"
 // generated code for Zc=72, byte encoding
 static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG2_Zc72_byte(uint8_t *c,uint8_t *d) {
-  __m64 *csimd=(__m64 *)c,*dsimd=(__m64 *)d;
-
-  __m64 *c2,*d2;
+  simde__m64 *csimd=(simde__m64 *)c,*dsimd=(simde__m64 *)d;
+  simde__m64 *c2,*d2;
 
   int i2;
   for (i2=0; i2<9; i2++) {
@@ -11,129 +10,129 @@ static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG
      d2=&dsimd[i2];
 
 //row: 0
-     d2[0]=_mm_xor_si64(c2[360],_mm_xor_si64(c2[362],_mm_xor_si64(c2[1082],_mm_xor_si64(c2[201],_mm_xor_si64(c2[202],_mm_xor_si64(c2[1283],_mm_xor_si64(c2[762],_mm_xor_si64(c2[42],_mm_xor_si64(c2[418],_mm_xor_si64(c2[956],_mm_xor_si64(c2[596],_mm_xor_si64(c2[1335],_mm_xor_si64(c2[976],_mm_xor_si64(c2[974],_mm_xor_si64(c2[278],_mm_xor_si64(c2[630],_mm_xor_si64(c2[473],_mm_xor_si64(c2[649],_mm_xor_si64(c2[108],_mm_xor_si64(c2[309],_mm_xor_si64(c2[132],_mm_xor_si64(c2[872],_mm_xor_si64(c2[1228],_mm_xor_si64(c2[1410],_mm_xor_si64(c2[1248],_mm_xor_si64(c2[348],c2[1242]))))))))))))))))))))))))));
+     d2[0]=simde_mm_xor_si64(c2[360],simde_mm_xor_si64(c2[362],simde_mm_xor_si64(c2[1082],simde_mm_xor_si64(c2[201],simde_mm_xor_si64(c2[202],simde_mm_xor_si64(c2[1283],simde_mm_xor_si64(c2[762],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[418],simde_mm_xor_si64(c2[956],simde_mm_xor_si64(c2[596],simde_mm_xor_si64(c2[1335],simde_mm_xor_si64(c2[976],simde_mm_xor_si64(c2[974],simde_mm_xor_si64(c2[278],simde_mm_xor_si64(c2[630],simde_mm_xor_si64(c2[473],simde_mm_xor_si64(c2[649],simde_mm_xor_si64(c2[108],simde_mm_xor_si64(c2[309],simde_mm_xor_si64(c2[132],simde_mm_xor_si64(c2[872],simde_mm_xor_si64(c2[1228],simde_mm_xor_si64(c2[1410],simde_mm_xor_si64(c2[1248],simde_mm_xor_si64(c2[348],c2[1242]))))))))))))))))))))))))));
 
 //row: 1
-     d2[9]=_mm_xor_si64(c2[540],_mm_xor_si64(c2[360],_mm_xor_si64(c2[362],_mm_xor_si64(c2[1082],_mm_xor_si64(c2[381],_mm_xor_si64(c2[201],_mm_xor_si64(c2[202],_mm_xor_si64(c2[1283],_mm_xor_si64(c2[942],_mm_xor_si64(c2[762],_mm_xor_si64(c2[42],_mm_xor_si64(c2[598],_mm_xor_si64(c2[418],_mm_xor_si64(c2[956],_mm_xor_si64(c2[596],_mm_xor_si64(c2[1335],_mm_xor_si64(c2[976],_mm_xor_si64(c2[974],_mm_xor_si64(c2[278],_mm_xor_si64(c2[630],_mm_xor_si64(c2[653],_mm_xor_si64(c2[473],_mm_xor_si64(c2[649],_mm_xor_si64(c2[108],_mm_xor_si64(c2[309],_mm_xor_si64(c2[132],_mm_xor_si64(c2[872],_mm_xor_si64(c2[1228],_mm_xor_si64(c2[1410],_mm_xor_si64(c2[1428],_mm_xor_si64(c2[1248],_mm_xor_si64(c2[348],c2[1242]))))))))))))))))))))))))))))))));
+     d2[9]=simde_mm_xor_si64(c2[540],simde_mm_xor_si64(c2[360],simde_mm_xor_si64(c2[362],simde_mm_xor_si64(c2[1082],simde_mm_xor_si64(c2[381],simde_mm_xor_si64(c2[201],simde_mm_xor_si64(c2[202],simde_mm_xor_si64(c2[1283],simde_mm_xor_si64(c2[942],simde_mm_xor_si64(c2[762],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[598],simde_mm_xor_si64(c2[418],simde_mm_xor_si64(c2[956],simde_mm_xor_si64(c2[596],simde_mm_xor_si64(c2[1335],simde_mm_xor_si64(c2[976],simde_mm_xor_si64(c2[974],simde_mm_xor_si64(c2[278],simde_mm_xor_si64(c2[630],simde_mm_xor_si64(c2[653],simde_mm_xor_si64(c2[473],simde_mm_xor_si64(c2[649],simde_mm_xor_si64(c2[108],simde_mm_xor_si64(c2[309],simde_mm_xor_si64(c2[132],simde_mm_xor_si64(c2[872],simde_mm_xor_si64(c2[1228],simde_mm_xor_si64(c2[1410],simde_mm_xor_si64(c2[1428],simde_mm_xor_si64(c2[1248],simde_mm_xor_si64(c2[348],c2[1242]))))))))))))))))))))))))))))))));
 
 //row: 2
-     d2[18]=_mm_xor_si64(c2[540],_mm_xor_si64(c2[360],_mm_xor_si64(c2[542],_mm_xor_si64(c2[362],_mm_xor_si64(c2[1082],_mm_xor_si64(c2[381],_mm_xor_si64(c2[201],_mm_xor_si64(c2[202],_mm_xor_si64(c2[1283],_mm_xor_si64(c2[942],_mm_xor_si64(c2[762],_mm_xor_si64(c2[42],_mm_xor_si64(c2[598],_mm_xor_si64(c2[418],_mm_xor_si64(c2[1136],_mm_xor_si64(c2[956],_mm_xor_si64(c2[596],_mm_xor_si64(c2[76],_mm_xor_si64(c2[1335],_mm_xor_si64(c2[976],_mm_xor_si64(c2[974],_mm_xor_si64(c2[458],_mm_xor_si64(c2[278],_mm_xor_si64(c2[630],_mm_xor_si64(c2[653],_mm_xor_si64(c2[473],_mm_xor_si64(c2[829],_mm_xor_si64(c2[649],_mm_xor_si64(c2[108],_mm_xor_si64(c2[489],_mm_xor_si64(c2[309],_mm_xor_si64(c2[132],_mm_xor_si64(c2[1052],_mm_xor_si64(c2[872],_mm_xor_si64(c2[1228],_mm_xor_si64(c2[1410],_mm_xor_si64(c2[1428],_mm_xor_si64(c2[1248],_mm_xor_si64(c2[528],_mm_xor_si64(c2[348],c2[1242]))))))))))))))))))))))))))))))))))))))));
+     d2[18]=simde_mm_xor_si64(c2[540],simde_mm_xor_si64(c2[360],simde_mm_xor_si64(c2[542],simde_mm_xor_si64(c2[362],simde_mm_xor_si64(c2[1082],simde_mm_xor_si64(c2[381],simde_mm_xor_si64(c2[201],simde_mm_xor_si64(c2[202],simde_mm_xor_si64(c2[1283],simde_mm_xor_si64(c2[942],simde_mm_xor_si64(c2[762],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[598],simde_mm_xor_si64(c2[418],simde_mm_xor_si64(c2[1136],simde_mm_xor_si64(c2[956],simde_mm_xor_si64(c2[596],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[1335],simde_mm_xor_si64(c2[976],simde_mm_xor_si64(c2[974],simde_mm_xor_si64(c2[458],simde_mm_xor_si64(c2[278],simde_mm_xor_si64(c2[630],simde_mm_xor_si64(c2[653],simde_mm_xor_si64(c2[473],simde_mm_xor_si64(c2[829],simde_mm_xor_si64(c2[649],simde_mm_xor_si64(c2[108],simde_mm_xor_si64(c2[489],simde_mm_xor_si64(c2[309],simde_mm_xor_si64(c2[132],simde_mm_xor_si64(c2[1052],simde_mm_xor_si64(c2[872],simde_mm_xor_si64(c2[1228],simde_mm_xor_si64(c2[1410],simde_mm_xor_si64(c2[1428],simde_mm_xor_si64(c2[1248],simde_mm_xor_si64(c2[528],simde_mm_xor_si64(c2[348],c2[1242]))))))))))))))))))))))))))))))))))))))));
 
 //row: 3
-     d2[27]=_mm_xor_si64(c2[360],_mm_xor_si64(c2[362],_mm_xor_si64(c2[1082],_mm_xor_si64(c2[201],_mm_xor_si64(c2[202],_mm_xor_si64(c2[24],_mm_xor_si64(c2[1283],_mm_xor_si64(c2[762],_mm_xor_si64(c2[222],_mm_xor_si64(c2[42],_mm_xor_si64(c2[418],_mm_xor_si64(c2[956],_mm_xor_si64(c2[596],_mm_xor_si64(c2[1335],_mm_xor_si64(c2[976],_mm_xor_si64(c2[1154],_mm_xor_si64(c2[974],_mm_xor_si64(c2[278],_mm_xor_si64(c2[810],_mm_xor_si64(c2[630],_mm_xor_si64(c2[473],_mm_xor_si64(c2[649],_mm_xor_si64(c2[288],_mm_xor_si64(c2[108],_mm_xor_si64(c2[309],_mm_xor_si64(c2[312],_mm_xor_si64(c2[132],_mm_xor_si64(c2[872],_mm_xor_si64(c2[1228],_mm_xor_si64(c2[151],_mm_xor_si64(c2[1410],_mm_xor_si64(c2[1248],_mm_xor_si64(c2[348],_mm_xor_si64(c2[1422],c2[1242]))))))))))))))))))))))))))))))))));
+     d2[27]=simde_mm_xor_si64(c2[360],simde_mm_xor_si64(c2[362],simde_mm_xor_si64(c2[1082],simde_mm_xor_si64(c2[201],simde_mm_xor_si64(c2[202],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[1283],simde_mm_xor_si64(c2[762],simde_mm_xor_si64(c2[222],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[418],simde_mm_xor_si64(c2[956],simde_mm_xor_si64(c2[596],simde_mm_xor_si64(c2[1335],simde_mm_xor_si64(c2[976],simde_mm_xor_si64(c2[1154],simde_mm_xor_si64(c2[974],simde_mm_xor_si64(c2[278],simde_mm_xor_si64(c2[810],simde_mm_xor_si64(c2[630],simde_mm_xor_si64(c2[473],simde_mm_xor_si64(c2[649],simde_mm_xor_si64(c2[288],simde_mm_xor_si64(c2[108],simde_mm_xor_si64(c2[309],simde_mm_xor_si64(c2[312],simde_mm_xor_si64(c2[132],simde_mm_xor_si64(c2[872],simde_mm_xor_si64(c2[1228],simde_mm_xor_si64(c2[151],simde_mm_xor_si64(c2[1410],simde_mm_xor_si64(c2[1248],simde_mm_xor_si64(c2[348],simde_mm_xor_si64(c2[1422],c2[1242]))))))))))))))))))))))))))))))))));
 
 //row: 4
-     d2[36]=_mm_xor_si64(c2[1086],_mm_xor_si64(c2[906],_mm_xor_si64(c2[908],_mm_xor_si64(c2[180],_mm_xor_si64(c2[365],_mm_xor_si64(c2[918],_mm_xor_si64(c2[738],_mm_xor_si64(c2[739],_mm_xor_si64(c2[381],_mm_xor_si64(c2[21],_mm_xor_si64(c2[40],_mm_xor_si64(c2[1299],_mm_xor_si64(c2[579],_mm_xor_si64(c2[1135],_mm_xor_si64(c2[955],_mm_xor_si64(c2[54],_mm_xor_si64(c2[1142],_mm_xor_si64(c2[433],_mm_xor_si64(c2[74],_mm_xor_si64(c2[72],_mm_xor_si64(c2[815],_mm_xor_si64(c2[1176],_mm_xor_si64(c2[1190],_mm_xor_si64(c2[1010],_mm_xor_si64(c2[1195],_mm_xor_si64(c2[654],_mm_xor_si64(c2[846],_mm_xor_si64(c2[669],_mm_xor_si64(c2[1409],_mm_xor_si64(c2[326],_mm_xor_si64(c2[508],_mm_xor_si64(c2[526],_mm_xor_si64(c2[346],_mm_xor_si64(c2[885],c2[349]))))))))))))))))))))))))))))))))));
+     d2[36]=simde_mm_xor_si64(c2[1086],simde_mm_xor_si64(c2[906],simde_mm_xor_si64(c2[908],simde_mm_xor_si64(c2[180],simde_mm_xor_si64(c2[365],simde_mm_xor_si64(c2[918],simde_mm_xor_si64(c2[738],simde_mm_xor_si64(c2[739],simde_mm_xor_si64(c2[381],simde_mm_xor_si64(c2[21],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[1299],simde_mm_xor_si64(c2[579],simde_mm_xor_si64(c2[1135],simde_mm_xor_si64(c2[955],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[1142],simde_mm_xor_si64(c2[433],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[815],simde_mm_xor_si64(c2[1176],simde_mm_xor_si64(c2[1190],simde_mm_xor_si64(c2[1010],simde_mm_xor_si64(c2[1195],simde_mm_xor_si64(c2[654],simde_mm_xor_si64(c2[846],simde_mm_xor_si64(c2[669],simde_mm_xor_si64(c2[1409],simde_mm_xor_si64(c2[326],simde_mm_xor_si64(c2[508],simde_mm_xor_si64(c2[526],simde_mm_xor_si64(c2[346],simde_mm_xor_si64(c2[885],c2[349]))))))))))))))))))))))))))))))))));
 
 //row: 5
-     d2[45]=_mm_xor_si64(c2[360],_mm_xor_si64(c2[180],_mm_xor_si64(c2[182],_mm_xor_si64(c2[902],_mm_xor_si64(c2[5],_mm_xor_si64(c2[201],_mm_xor_si64(c2[21],_mm_xor_si64(c2[22],_mm_xor_si64(c2[1103],_mm_xor_si64(c2[746],_mm_xor_si64(c2[762],_mm_xor_si64(c2[582],_mm_xor_si64(c2[1301],_mm_xor_si64(c2[418],_mm_xor_si64(c2[238],_mm_xor_si64(c2[776],_mm_xor_si64(c2[416],_mm_xor_si64(c2[1155],_mm_xor_si64(c2[796],_mm_xor_si64(c2[794],_mm_xor_si64(c2[98],_mm_xor_si64(c2[450],_mm_xor_si64(c2[811],_mm_xor_si64(c2[473],_mm_xor_si64(c2[293],_mm_xor_si64(c2[469],_mm_xor_si64(c2[1376],_mm_xor_si64(c2[129],_mm_xor_si64(c2[1391],_mm_xor_si64(c2[314],_mm_xor_si64(c2[692],_mm_xor_si64(c2[1048],_mm_xor_si64(c2[1230],_mm_xor_si64(c2[1248],_mm_xor_si64(c2[1068],_mm_xor_si64(c2[168],c2[1062]))))))))))))))))))))))))))))))))))));
+     d2[45]=simde_mm_xor_si64(c2[360],simde_mm_xor_si64(c2[180],simde_mm_xor_si64(c2[182],simde_mm_xor_si64(c2[902],simde_mm_xor_si64(c2[5],simde_mm_xor_si64(c2[201],simde_mm_xor_si64(c2[21],simde_mm_xor_si64(c2[22],simde_mm_xor_si64(c2[1103],simde_mm_xor_si64(c2[746],simde_mm_xor_si64(c2[762],simde_mm_xor_si64(c2[582],simde_mm_xor_si64(c2[1301],simde_mm_xor_si64(c2[418],simde_mm_xor_si64(c2[238],simde_mm_xor_si64(c2[776],simde_mm_xor_si64(c2[416],simde_mm_xor_si64(c2[1155],simde_mm_xor_si64(c2[796],simde_mm_xor_si64(c2[794],simde_mm_xor_si64(c2[98],simde_mm_xor_si64(c2[450],simde_mm_xor_si64(c2[811],simde_mm_xor_si64(c2[473],simde_mm_xor_si64(c2[293],simde_mm_xor_si64(c2[469],simde_mm_xor_si64(c2[1376],simde_mm_xor_si64(c2[129],simde_mm_xor_si64(c2[1391],simde_mm_xor_si64(c2[314],simde_mm_xor_si64(c2[692],simde_mm_xor_si64(c2[1048],simde_mm_xor_si64(c2[1230],simde_mm_xor_si64(c2[1248],simde_mm_xor_si64(c2[1068],simde_mm_xor_si64(c2[168],c2[1062]))))))))))))))))))))))))))))))))))));
 
 //row: 6
-     d2[54]=_mm_xor_si64(c2[1088],_mm_xor_si64(c2[908],_mm_xor_si64(c2[901],_mm_xor_si64(c2[182],_mm_xor_si64(c2[904],_mm_xor_si64(c2[920],_mm_xor_si64(c2[740],_mm_xor_si64(c2[741],_mm_xor_si64(c2[383],_mm_xor_si64(c2[42],_mm_xor_si64(c2[1301],_mm_xor_si64(c2[581],_mm_xor_si64(c2[1137],_mm_xor_si64(c2[957],_mm_xor_si64(c2[56],_mm_xor_si64(c2[1135],_mm_xor_si64(c2[435],_mm_xor_si64(c2[76],_mm_xor_si64(c2[74],_mm_xor_si64(c2[817],_mm_xor_si64(c2[1178],_mm_xor_si64(c2[1351],_mm_xor_si64(c2[1192],_mm_xor_si64(c2[1012],_mm_xor_si64(c2[1188],_mm_xor_si64(c2[656],_mm_xor_si64(c2[848],_mm_xor_si64(c2[671],_mm_xor_si64(c2[670],_mm_xor_si64(c2[1411],_mm_xor_si64(c2[328],_mm_xor_si64(c2[510],_mm_xor_si64(c2[528],_mm_xor_si64(c2[348],_mm_xor_si64(c2[887],_mm_xor_si64(c2[342],c2[1069]))))))))))))))))))))))))))))))))))));
+     d2[54]=simde_mm_xor_si64(c2[1088],simde_mm_xor_si64(c2[908],simde_mm_xor_si64(c2[901],simde_mm_xor_si64(c2[182],simde_mm_xor_si64(c2[904],simde_mm_xor_si64(c2[920],simde_mm_xor_si64(c2[740],simde_mm_xor_si64(c2[741],simde_mm_xor_si64(c2[383],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[1301],simde_mm_xor_si64(c2[581],simde_mm_xor_si64(c2[1137],simde_mm_xor_si64(c2[957],simde_mm_xor_si64(c2[56],simde_mm_xor_si64(c2[1135],simde_mm_xor_si64(c2[435],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[817],simde_mm_xor_si64(c2[1178],simde_mm_xor_si64(c2[1351],simde_mm_xor_si64(c2[1192],simde_mm_xor_si64(c2[1012],simde_mm_xor_si64(c2[1188],simde_mm_xor_si64(c2[656],simde_mm_xor_si64(c2[848],simde_mm_xor_si64(c2[671],simde_mm_xor_si64(c2[670],simde_mm_xor_si64(c2[1411],simde_mm_xor_si64(c2[328],simde_mm_xor_si64(c2[510],simde_mm_xor_si64(c2[528],simde_mm_xor_si64(c2[348],simde_mm_xor_si64(c2[887],simde_mm_xor_si64(c2[342],c2[1069]))))))))))))))))))))))))))))))))))));
 
 //row: 7
-     d2[63]=_mm_xor_si64(c2[1264],_mm_xor_si64(c2[1084],_mm_xor_si64(c2[548],_mm_xor_si64(c2[1086],_mm_xor_si64(c2[541],_mm_xor_si64(c2[367],_mm_xor_si64(c2[1261],_mm_xor_si64(c2[1105],_mm_xor_si64(c2[925],_mm_xor_si64(c2[380],_mm_xor_si64(c2[926],_mm_xor_si64(c2[381],_mm_xor_si64(c2[559],_mm_xor_si64(c2[203],_mm_xor_si64(c2[23],_mm_xor_si64(c2[201],_mm_xor_si64(c2[218],_mm_xor_si64(c2[38],_mm_xor_si64(c2[941],_mm_xor_si64(c2[757],_mm_xor_si64(c2[401],_mm_xor_si64(c2[221],_mm_xor_si64(c2[1322],_mm_xor_si64(c2[1142],_mm_xor_si64(c2[597],_mm_xor_si64(c2[241],_mm_xor_si64(c2[1135],_mm_xor_si64(c2[1320],_mm_xor_si64(c2[775],_mm_xor_si64(c2[620],_mm_xor_si64(c2[75],_mm_xor_si64(c2[252],_mm_xor_si64(c2[1155],_mm_xor_si64(c2[259],_mm_xor_si64(c2[1333],_mm_xor_si64(c2[1153],_mm_xor_si64(c2[993],_mm_xor_si64(c2[457],_mm_xor_si64(c2[1354],_mm_xor_si64(c2[998],_mm_xor_si64(c2[818],_mm_xor_si64(c2[1357],_mm_xor_si64(c2[1368],_mm_xor_si64(c2[1188],_mm_xor_si64(c2[652],_mm_xor_si64(c2[1373],_mm_xor_si64(c2[828],_mm_xor_si64(c2[832],_mm_xor_si64(c2[476],_mm_xor_si64(c2[296],_mm_xor_si64(c2[1033],_mm_xor_si64(c2[488],_mm_xor_si64(c2[847],_mm_xor_si64(c2[491],_mm_xor_si64(c2[311],_mm_xor_si64(c2[670],_mm_xor_si64(c2[148],_mm_xor_si64(c2[1051],_mm_xor_si64(c2[504],_mm_xor_si64(c2[1407],_mm_xor_si64(c2[686],_mm_xor_si64(c2[330],_mm_xor_si64(c2[150],_mm_xor_si64(c2[704],_mm_xor_si64(c2[524],_mm_xor_si64(c2[1427],_mm_xor_si64(c2[1063],_mm_xor_si64(c2[527],_mm_xor_si64(c2[527],_mm_xor_si64(c2[162],c2[1430]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[63]=simde_mm_xor_si64(c2[1264],simde_mm_xor_si64(c2[1084],simde_mm_xor_si64(c2[548],simde_mm_xor_si64(c2[1086],simde_mm_xor_si64(c2[541],simde_mm_xor_si64(c2[367],simde_mm_xor_si64(c2[1261],simde_mm_xor_si64(c2[1105],simde_mm_xor_si64(c2[925],simde_mm_xor_si64(c2[380],simde_mm_xor_si64(c2[926],simde_mm_xor_si64(c2[381],simde_mm_xor_si64(c2[559],simde_mm_xor_si64(c2[203],simde_mm_xor_si64(c2[23],simde_mm_xor_si64(c2[201],simde_mm_xor_si64(c2[218],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[941],simde_mm_xor_si64(c2[757],simde_mm_xor_si64(c2[401],simde_mm_xor_si64(c2[221],simde_mm_xor_si64(c2[1322],simde_mm_xor_si64(c2[1142],simde_mm_xor_si64(c2[597],simde_mm_xor_si64(c2[241],simde_mm_xor_si64(c2[1135],simde_mm_xor_si64(c2[1320],simde_mm_xor_si64(c2[775],simde_mm_xor_si64(c2[620],simde_mm_xor_si64(c2[75],simde_mm_xor_si64(c2[252],simde_mm_xor_si64(c2[1155],simde_mm_xor_si64(c2[259],simde_mm_xor_si64(c2[1333],simde_mm_xor_si64(c2[1153],simde_mm_xor_si64(c2[993],simde_mm_xor_si64(c2[457],simde_mm_xor_si64(c2[1354],simde_mm_xor_si64(c2[998],simde_mm_xor_si64(c2[818],simde_mm_xor_si64(c2[1357],simde_mm_xor_si64(c2[1368],simde_mm_xor_si64(c2[1188],simde_mm_xor_si64(c2[652],simde_mm_xor_si64(c2[1373],simde_mm_xor_si64(c2[828],simde_mm_xor_si64(c2[832],simde_mm_xor_si64(c2[476],simde_mm_xor_si64(c2[296],simde_mm_xor_si64(c2[1033],simde_mm_xor_si64(c2[488],simde_mm_xor_si64(c2[847],simde_mm_xor_si64(c2[491],simde_mm_xor_si64(c2[311],simde_mm_xor_si64(c2[670],simde_mm_xor_si64(c2[148],simde_mm_xor_si64(c2[1051],simde_mm_xor_si64(c2[504],simde_mm_xor_si64(c2[1407],simde_mm_xor_si64(c2[686],simde_mm_xor_si64(c2[330],simde_mm_xor_si64(c2[150],simde_mm_xor_si64(c2[704],simde_mm_xor_si64(c2[524],simde_mm_xor_si64(c2[1427],simde_mm_xor_si64(c2[1063],simde_mm_xor_si64(c2[527],simde_mm_xor_si64(c2[527],simde_mm_xor_si64(c2[162],c2[1430]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 8
-     d2[72]=_mm_xor_si64(c2[542],_mm_xor_si64(c2[362],_mm_xor_si64(c2[544],_mm_xor_si64(c2[364],_mm_xor_si64(c2[1084],_mm_xor_si64(c2[1088],_mm_xor_si64(c2[383],_mm_xor_si64(c2[203],_mm_xor_si64(c2[204],_mm_xor_si64(c2[1285],_mm_xor_si64(c2[926],_mm_xor_si64(c2[944],_mm_xor_si64(c2[764],_mm_xor_si64(c2[44],_mm_xor_si64(c2[600],_mm_xor_si64(c2[420],_mm_xor_si64(c2[1138],_mm_xor_si64(c2[958],_mm_xor_si64(c2[598],_mm_xor_si64(c2[78],_mm_xor_si64(c2[1337],_mm_xor_si64(c2[978],_mm_xor_si64(c2[976],_mm_xor_si64(c2[451],_mm_xor_si64(c2[271],_mm_xor_si64(c2[632],_mm_xor_si64(c2[655],_mm_xor_si64(c2[475],_mm_xor_si64(c2[831],_mm_xor_si64(c2[651],_mm_xor_si64(c2[110],_mm_xor_si64(c2[491],_mm_xor_si64(c2[311],_mm_xor_si64(c2[134],_mm_xor_si64(c2[1045],_mm_xor_si64(c2[865],_mm_xor_si64(c2[1230],_mm_xor_si64(c2[1412],_mm_xor_si64(c2[1430],_mm_xor_si64(c2[1250],_mm_xor_si64(c2[530],_mm_xor_si64(c2[350],c2[1244]))))))))))))))))))))))))))))))))))))))))));
+     d2[72]=simde_mm_xor_si64(c2[542],simde_mm_xor_si64(c2[362],simde_mm_xor_si64(c2[544],simde_mm_xor_si64(c2[364],simde_mm_xor_si64(c2[1084],simde_mm_xor_si64(c2[1088],simde_mm_xor_si64(c2[383],simde_mm_xor_si64(c2[203],simde_mm_xor_si64(c2[204],simde_mm_xor_si64(c2[1285],simde_mm_xor_si64(c2[926],simde_mm_xor_si64(c2[944],simde_mm_xor_si64(c2[764],simde_mm_xor_si64(c2[44],simde_mm_xor_si64(c2[600],simde_mm_xor_si64(c2[420],simde_mm_xor_si64(c2[1138],simde_mm_xor_si64(c2[958],simde_mm_xor_si64(c2[598],simde_mm_xor_si64(c2[78],simde_mm_xor_si64(c2[1337],simde_mm_xor_si64(c2[978],simde_mm_xor_si64(c2[976],simde_mm_xor_si64(c2[451],simde_mm_xor_si64(c2[271],simde_mm_xor_si64(c2[632],simde_mm_xor_si64(c2[655],simde_mm_xor_si64(c2[475],simde_mm_xor_si64(c2[831],simde_mm_xor_si64(c2[651],simde_mm_xor_si64(c2[110],simde_mm_xor_si64(c2[491],simde_mm_xor_si64(c2[311],simde_mm_xor_si64(c2[134],simde_mm_xor_si64(c2[1045],simde_mm_xor_si64(c2[865],simde_mm_xor_si64(c2[1230],simde_mm_xor_si64(c2[1412],simde_mm_xor_si64(c2[1430],simde_mm_xor_si64(c2[1250],simde_mm_xor_si64(c2[530],simde_mm_xor_si64(c2[350],c2[1244]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 9
-     d2[81]=_mm_xor_si64(c2[363],_mm_xor_si64(c2[726],_mm_xor_si64(c2[546],_mm_xor_si64(c2[365],_mm_xor_si64(c2[548],_mm_xor_si64(c2[1085],_mm_xor_si64(c2[1268],_mm_xor_si64(c2[204],_mm_xor_si64(c2[558],_mm_xor_si64(c2[378],_mm_xor_si64(c2[205],_mm_xor_si64(c2[379],_mm_xor_si64(c2[1286],_mm_xor_si64(c2[21],_mm_xor_si64(c2[201],_mm_xor_si64(c2[756],_mm_xor_si64(c2[1119],_mm_xor_si64(c2[939],_mm_xor_si64(c2[36],_mm_xor_si64(c2[219],_mm_xor_si64(c2[421],_mm_xor_si64(c2[775],_mm_xor_si64(c2[595],_mm_xor_si64(c2[959],_mm_xor_si64(c2[1142],_mm_xor_si64(c2[599],_mm_xor_si64(c2[782],_mm_xor_si64(c2[1338],_mm_xor_si64(c2[73],_mm_xor_si64(c2[979],_mm_xor_si64(c2[1153],_mm_xor_si64(c2[977],_mm_xor_si64(c2[1160],_mm_xor_si64(c2[272],_mm_xor_si64(c2[455],_mm_xor_si64(c2[633],_mm_xor_si64(c2[816],_mm_xor_si64(c2[476],_mm_xor_si64(c2[830],_mm_xor_si64(c2[650],_mm_xor_si64(c2[652],_mm_xor_si64(c2[835],_mm_xor_si64(c2[111],_mm_xor_si64(c2[294],_mm_xor_si64(c2[312],_mm_xor_si64(c2[486],_mm_xor_si64(c2[126],_mm_xor_si64(c2[309],_mm_xor_si64(c2[866],_mm_xor_si64(c2[1049],_mm_xor_si64(c2[1231],_mm_xor_si64(c2[1405],_mm_xor_si64(c2[1404],_mm_xor_si64(c2[148],_mm_xor_si64(c2[149],_mm_xor_si64(c2[1242],_mm_xor_si64(c2[166],_mm_xor_si64(c2[1425],_mm_xor_si64(c2[342],_mm_xor_si64(c2[525],_mm_xor_si64(c2[1245],c2[1428])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[81]=simde_mm_xor_si64(c2[363],simde_mm_xor_si64(c2[726],simde_mm_xor_si64(c2[546],simde_mm_xor_si64(c2[365],simde_mm_xor_si64(c2[548],simde_mm_xor_si64(c2[1085],simde_mm_xor_si64(c2[1268],simde_mm_xor_si64(c2[204],simde_mm_xor_si64(c2[558],simde_mm_xor_si64(c2[378],simde_mm_xor_si64(c2[205],simde_mm_xor_si64(c2[379],simde_mm_xor_si64(c2[1286],simde_mm_xor_si64(c2[21],simde_mm_xor_si64(c2[201],simde_mm_xor_si64(c2[756],simde_mm_xor_si64(c2[1119],simde_mm_xor_si64(c2[939],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[219],simde_mm_xor_si64(c2[421],simde_mm_xor_si64(c2[775],simde_mm_xor_si64(c2[595],simde_mm_xor_si64(c2[959],simde_mm_xor_si64(c2[1142],simde_mm_xor_si64(c2[599],simde_mm_xor_si64(c2[782],simde_mm_xor_si64(c2[1338],simde_mm_xor_si64(c2[73],simde_mm_xor_si64(c2[979],simde_mm_xor_si64(c2[1153],simde_mm_xor_si64(c2[977],simde_mm_xor_si64(c2[1160],simde_mm_xor_si64(c2[272],simde_mm_xor_si64(c2[455],simde_mm_xor_si64(c2[633],simde_mm_xor_si64(c2[816],simde_mm_xor_si64(c2[476],simde_mm_xor_si64(c2[830],simde_mm_xor_si64(c2[650],simde_mm_xor_si64(c2[652],simde_mm_xor_si64(c2[835],simde_mm_xor_si64(c2[111],simde_mm_xor_si64(c2[294],simde_mm_xor_si64(c2[312],simde_mm_xor_si64(c2[486],simde_mm_xor_si64(c2[126],simde_mm_xor_si64(c2[309],simde_mm_xor_si64(c2[866],simde_mm_xor_si64(c2[1049],simde_mm_xor_si64(c2[1231],simde_mm_xor_si64(c2[1405],simde_mm_xor_si64(c2[1404],simde_mm_xor_si64(c2[148],simde_mm_xor_si64(c2[149],simde_mm_xor_si64(c2[1242],simde_mm_xor_si64(c2[166],simde_mm_xor_si64(c2[1425],simde_mm_xor_si64(c2[342],simde_mm_xor_si64(c2[525],simde_mm_xor_si64(c2[1245],c2[1428])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 10
-     d2[90]=_mm_xor_si64(c2[1085],_mm_xor_si64(c2[203],_mm_xor_si64(c2[1011],c2[129])));
+     d2[90]=simde_mm_xor_si64(c2[1085],simde_mm_xor_si64(c2[203],simde_mm_xor_si64(c2[1011],c2[129])));
 
 //row: 11
-     d2[99]=_mm_xor_si64(c2[368],_mm_xor_si64(c2[361],_mm_xor_si64(c2[1081],_mm_xor_si64(c2[723],_mm_xor_si64(c2[200],_mm_xor_si64(c2[201],_mm_xor_si64(c2[23],_mm_xor_si64(c2[1282],_mm_xor_si64(c2[761],_mm_xor_si64(c2[221],_mm_xor_si64(c2[41],_mm_xor_si64(c2[417],_mm_xor_si64(c2[955],_mm_xor_si64(c2[595],_mm_xor_si64(c2[1334],_mm_xor_si64(c2[975],_mm_xor_si64(c2[1153],_mm_xor_si64(c2[973],_mm_xor_si64(c2[277],_mm_xor_si64(c2[818],_mm_xor_si64(c2[638],_mm_xor_si64(c2[472],_mm_xor_si64(c2[648],_mm_xor_si64(c2[296],_mm_xor_si64(c2[116],_mm_xor_si64(c2[308],_mm_xor_si64(c2[311],_mm_xor_si64(c2[131],_mm_xor_si64(c2[1209],_mm_xor_si64(c2[871],_mm_xor_si64(c2[1227],_mm_xor_si64(c2[150],_mm_xor_si64(c2[1409],_mm_xor_si64(c2[1247],_mm_xor_si64(c2[347],_mm_xor_si64(c2[1430],_mm_xor_si64(c2[1250],c2[887])))))))))))))))))))))))))))))))))))));
+     d2[99]=simde_mm_xor_si64(c2[368],simde_mm_xor_si64(c2[361],simde_mm_xor_si64(c2[1081],simde_mm_xor_si64(c2[723],simde_mm_xor_si64(c2[200],simde_mm_xor_si64(c2[201],simde_mm_xor_si64(c2[23],simde_mm_xor_si64(c2[1282],simde_mm_xor_si64(c2[761],simde_mm_xor_si64(c2[221],simde_mm_xor_si64(c2[41],simde_mm_xor_si64(c2[417],simde_mm_xor_si64(c2[955],simde_mm_xor_si64(c2[595],simde_mm_xor_si64(c2[1334],simde_mm_xor_si64(c2[975],simde_mm_xor_si64(c2[1153],simde_mm_xor_si64(c2[973],simde_mm_xor_si64(c2[277],simde_mm_xor_si64(c2[818],simde_mm_xor_si64(c2[638],simde_mm_xor_si64(c2[472],simde_mm_xor_si64(c2[648],simde_mm_xor_si64(c2[296],simde_mm_xor_si64(c2[116],simde_mm_xor_si64(c2[308],simde_mm_xor_si64(c2[311],simde_mm_xor_si64(c2[131],simde_mm_xor_si64(c2[1209],simde_mm_xor_si64(c2[871],simde_mm_xor_si64(c2[1227],simde_mm_xor_si64(c2[150],simde_mm_xor_si64(c2[1409],simde_mm_xor_si64(c2[1247],simde_mm_xor_si64(c2[347],simde_mm_xor_si64(c2[1430],simde_mm_xor_si64(c2[1250],c2[887])))))))))))))))))))))))))))))))))))));
 
 //row: 12
-     d2[108]=_mm_xor_si64(c2[1087],_mm_xor_si64(c2[907],_mm_xor_si64(c2[900],_mm_xor_si64(c2[181],_mm_xor_si64(c2[919],_mm_xor_si64(c2[739],_mm_xor_si64(c2[740],_mm_xor_si64(c2[382],_mm_xor_si64(c2[202],_mm_xor_si64(c2[41],_mm_xor_si64(c2[1300],_mm_xor_si64(c2[580],_mm_xor_si64(c2[1136],_mm_xor_si64(c2[956],_mm_xor_si64(c2[55],_mm_xor_si64(c2[1134],_mm_xor_si64(c2[420],_mm_xor_si64(c2[434],_mm_xor_si64(c2[75],_mm_xor_si64(c2[73],_mm_xor_si64(c2[816],_mm_xor_si64(c2[1177],_mm_xor_si64(c2[1191],_mm_xor_si64(c2[1011],_mm_xor_si64(c2[1196],_mm_xor_si64(c2[655],_mm_xor_si64(c2[847],_mm_xor_si64(c2[670],_mm_xor_si64(c2[1410],_mm_xor_si64(c2[327],_mm_xor_si64(c2[509],_mm_xor_si64(c2[527],_mm_xor_si64(c2[347],_mm_xor_si64(c2[886],c2[350]))))))))))))))))))))))))))))))))));
+     d2[108]=simde_mm_xor_si64(c2[1087],simde_mm_xor_si64(c2[907],simde_mm_xor_si64(c2[900],simde_mm_xor_si64(c2[181],simde_mm_xor_si64(c2[919],simde_mm_xor_si64(c2[739],simde_mm_xor_si64(c2[740],simde_mm_xor_si64(c2[382],simde_mm_xor_si64(c2[202],simde_mm_xor_si64(c2[41],simde_mm_xor_si64(c2[1300],simde_mm_xor_si64(c2[580],simde_mm_xor_si64(c2[1136],simde_mm_xor_si64(c2[956],simde_mm_xor_si64(c2[55],simde_mm_xor_si64(c2[1134],simde_mm_xor_si64(c2[420],simde_mm_xor_si64(c2[434],simde_mm_xor_si64(c2[75],simde_mm_xor_si64(c2[73],simde_mm_xor_si64(c2[816],simde_mm_xor_si64(c2[1177],simde_mm_xor_si64(c2[1191],simde_mm_xor_si64(c2[1011],simde_mm_xor_si64(c2[1196],simde_mm_xor_si64(c2[655],simde_mm_xor_si64(c2[847],simde_mm_xor_si64(c2[670],simde_mm_xor_si64(c2[1410],simde_mm_xor_si64(c2[327],simde_mm_xor_si64(c2[509],simde_mm_xor_si64(c2[527],simde_mm_xor_si64(c2[347],simde_mm_xor_si64(c2[886],c2[350]))))))))))))))))))))))))))))))))));
 
 //row: 13
-     d2[117]=_mm_xor_si64(c2[186],_mm_xor_si64(c2[188],_mm_xor_si64(c2[908],_mm_xor_si64(c2[720],_mm_xor_si64(c2[18],_mm_xor_si64(c2[19],_mm_xor_si64(c2[1280],_mm_xor_si64(c2[1100],_mm_xor_si64(c2[922],_mm_xor_si64(c2[579],_mm_xor_si64(c2[39],_mm_xor_si64(c2[1298],_mm_xor_si64(c2[235],_mm_xor_si64(c2[782],_mm_xor_si64(c2[422],_mm_xor_si64(c2[1152],_mm_xor_si64(c2[793],_mm_xor_si64(c2[980],_mm_xor_si64(c2[800],_mm_xor_si64(c2[95],_mm_xor_si64(c2[636],_mm_xor_si64(c2[456],_mm_xor_si64(c2[290],_mm_xor_si64(c2[475],_mm_xor_si64(c2[114],_mm_xor_si64(c2[1373],_mm_xor_si64(c2[126],_mm_xor_si64(c2[129],_mm_xor_si64(c2[1388],_mm_xor_si64(c2[689],_mm_xor_si64(c2[1045],_mm_xor_si64(c2[1407],_mm_xor_si64(c2[1227],_mm_xor_si64(c2[1231],_mm_xor_si64(c2[1065],_mm_xor_si64(c2[165],_mm_xor_si64(c2[1248],c2[1068])))))))))))))))))))))))))))))))))))));
+     d2[117]=simde_mm_xor_si64(c2[186],simde_mm_xor_si64(c2[188],simde_mm_xor_si64(c2[908],simde_mm_xor_si64(c2[720],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[19],simde_mm_xor_si64(c2[1280],simde_mm_xor_si64(c2[1100],simde_mm_xor_si64(c2[922],simde_mm_xor_si64(c2[579],simde_mm_xor_si64(c2[39],simde_mm_xor_si64(c2[1298],simde_mm_xor_si64(c2[235],simde_mm_xor_si64(c2[782],simde_mm_xor_si64(c2[422],simde_mm_xor_si64(c2[1152],simde_mm_xor_si64(c2[793],simde_mm_xor_si64(c2[980],simde_mm_xor_si64(c2[800],simde_mm_xor_si64(c2[95],simde_mm_xor_si64(c2[636],simde_mm_xor_si64(c2[456],simde_mm_xor_si64(c2[290],simde_mm_xor_si64(c2[475],simde_mm_xor_si64(c2[114],simde_mm_xor_si64(c2[1373],simde_mm_xor_si64(c2[126],simde_mm_xor_si64(c2[129],simde_mm_xor_si64(c2[1388],simde_mm_xor_si64(c2[689],simde_mm_xor_si64(c2[1045],simde_mm_xor_si64(c2[1407],simde_mm_xor_si64(c2[1227],simde_mm_xor_si64(c2[1231],simde_mm_xor_si64(c2[1065],simde_mm_xor_si64(c2[165],simde_mm_xor_si64(c2[1248],c2[1068])))))))))))))))))))))))))))))))))))));
 
 //row: 14
-     d2[126]=_mm_xor_si64(c2[907],_mm_xor_si64(c2[727],_mm_xor_si64(c2[543],_mm_xor_si64(c2[720],_mm_xor_si64(c2[545],_mm_xor_si64(c2[1],_mm_xor_si64(c2[1265],_mm_xor_si64(c2[739],_mm_xor_si64(c2[559],_mm_xor_si64(c2[384],_mm_xor_si64(c2[560],_mm_xor_si64(c2[385],_mm_xor_si64(c2[202],_mm_xor_si64(c2[198],_mm_xor_si64(c2[18],_mm_xor_si64(c2[1286],_mm_xor_si64(c2[1300],_mm_xor_si64(c2[1120],_mm_xor_si64(c2[936],_mm_xor_si64(c2[400],_mm_xor_si64(c2[396],_mm_xor_si64(c2[216],_mm_xor_si64(c2[956],_mm_xor_si64(c2[776],_mm_xor_si64(c2[601],_mm_xor_si64(c2[1314],_mm_xor_si64(c2[1139],_mm_xor_si64(c2[954],_mm_xor_si64(c2[779],_mm_xor_si64(c2[254],_mm_xor_si64(c2[79],_mm_xor_si64(c2[1334],_mm_xor_si64(c2[1159],_mm_xor_si64(c2[1332],_mm_xor_si64(c2[1337],_mm_xor_si64(c2[1157],_mm_xor_si64(c2[636],_mm_xor_si64(c2[452],_mm_xor_si64(c2[997],_mm_xor_si64(c2[993],_mm_xor_si64(c2[813],_mm_xor_si64(c2[1011],_mm_xor_si64(c2[831],_mm_xor_si64(c2[656],_mm_xor_si64(c2[1016],_mm_xor_si64(c2[832],_mm_xor_si64(c2[475],_mm_xor_si64(c2[471],_mm_xor_si64(c2[291],_mm_xor_si64(c2[654],_mm_xor_si64(c2[667],_mm_xor_si64(c2[492],_mm_xor_si64(c2[490],_mm_xor_si64(c2[486],_mm_xor_si64(c2[306],_mm_xor_si64(c2[1230],_mm_xor_si64(c2[1046],_mm_xor_si64(c2[147],_mm_xor_si64(c2[1411],_mm_xor_si64(c2[329],_mm_xor_si64(c2[325],_mm_xor_si64(c2[145],_mm_xor_si64(c2[347],_mm_xor_si64(c2[167],_mm_xor_si64(c2[1422],_mm_xor_si64(c2[706],_mm_xor_si64(c2[522],_mm_xor_si64(c2[170],_mm_xor_si64(c2[166],c2[1425])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[126]=simde_mm_xor_si64(c2[907],simde_mm_xor_si64(c2[727],simde_mm_xor_si64(c2[543],simde_mm_xor_si64(c2[720],simde_mm_xor_si64(c2[545],simde_mm_xor_si64(c2[1],simde_mm_xor_si64(c2[1265],simde_mm_xor_si64(c2[739],simde_mm_xor_si64(c2[559],simde_mm_xor_si64(c2[384],simde_mm_xor_si64(c2[560],simde_mm_xor_si64(c2[385],simde_mm_xor_si64(c2[202],simde_mm_xor_si64(c2[198],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[1286],simde_mm_xor_si64(c2[1300],simde_mm_xor_si64(c2[1120],simde_mm_xor_si64(c2[936],simde_mm_xor_si64(c2[400],simde_mm_xor_si64(c2[396],simde_mm_xor_si64(c2[216],simde_mm_xor_si64(c2[956],simde_mm_xor_si64(c2[776],simde_mm_xor_si64(c2[601],simde_mm_xor_si64(c2[1314],simde_mm_xor_si64(c2[1139],simde_mm_xor_si64(c2[954],simde_mm_xor_si64(c2[779],simde_mm_xor_si64(c2[254],simde_mm_xor_si64(c2[79],simde_mm_xor_si64(c2[1334],simde_mm_xor_si64(c2[1159],simde_mm_xor_si64(c2[1332],simde_mm_xor_si64(c2[1337],simde_mm_xor_si64(c2[1157],simde_mm_xor_si64(c2[636],simde_mm_xor_si64(c2[452],simde_mm_xor_si64(c2[997],simde_mm_xor_si64(c2[993],simde_mm_xor_si64(c2[813],simde_mm_xor_si64(c2[1011],simde_mm_xor_si64(c2[831],simde_mm_xor_si64(c2[656],simde_mm_xor_si64(c2[1016],simde_mm_xor_si64(c2[832],simde_mm_xor_si64(c2[475],simde_mm_xor_si64(c2[471],simde_mm_xor_si64(c2[291],simde_mm_xor_si64(c2[654],simde_mm_xor_si64(c2[667],simde_mm_xor_si64(c2[492],simde_mm_xor_si64(c2[490],simde_mm_xor_si64(c2[486],simde_mm_xor_si64(c2[306],simde_mm_xor_si64(c2[1230],simde_mm_xor_si64(c2[1046],simde_mm_xor_si64(c2[147],simde_mm_xor_si64(c2[1411],simde_mm_xor_si64(c2[329],simde_mm_xor_si64(c2[325],simde_mm_xor_si64(c2[145],simde_mm_xor_si64(c2[347],simde_mm_xor_si64(c2[167],simde_mm_xor_si64(c2[1422],simde_mm_xor_si64(c2[706],simde_mm_xor_si64(c2[522],simde_mm_xor_si64(c2[170],simde_mm_xor_si64(c2[166],c2[1425])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 15
-     d2[135]=_mm_xor_si64(c2[363],_mm_xor_si64(c2[547],_mm_xor_si64(c2[367],_mm_xor_si64(c2[365],_mm_xor_si64(c2[360],_mm_xor_si64(c2[1085],_mm_xor_si64(c2[1080],_mm_xor_si64(c2[548],_mm_xor_si64(c2[204],_mm_xor_si64(c2[379],_mm_xor_si64(c2[199],_mm_xor_si64(c2[205],_mm_xor_si64(c2[200],_mm_xor_si64(c2[1286],_mm_xor_si64(c2[1281],_mm_xor_si64(c2[756],_mm_xor_si64(c2[940],_mm_xor_si64(c2[760],_mm_xor_si64(c2[36],_mm_xor_si64(c2[40],_mm_xor_si64(c2[421],_mm_xor_si64(c2[596],_mm_xor_si64(c2[416],_mm_xor_si64(c2[959],_mm_xor_si64(c2[954],_mm_xor_si64(c2[599],_mm_xor_si64(c2[594],_mm_xor_si64(c2[1338],_mm_xor_si64(c2[1333],_mm_xor_si64(c2[979],_mm_xor_si64(c2[974],_mm_xor_si64(c2[977],_mm_xor_si64(c2[972],_mm_xor_si64(c2[272],_mm_xor_si64(c2[276],_mm_xor_si64(c2[633],_mm_xor_si64(c2[637],_mm_xor_si64(c2[476],_mm_xor_si64(c2[651],_mm_xor_si64(c2[471],_mm_xor_si64(c2[652],_mm_xor_si64(c2[656],_mm_xor_si64(c2[111],_mm_xor_si64(c2[115],_mm_xor_si64(c2[312],_mm_xor_si64(c2[307],_mm_xor_si64(c2[126],_mm_xor_si64(c2[130],_mm_xor_si64(c2[866],_mm_xor_si64(c2[870],_mm_xor_si64(c2[1231],_mm_xor_si64(c2[1226],_mm_xor_si64(c2[1404],_mm_xor_si64(c2[1408],_mm_xor_si64(c2[1242],_mm_xor_si64(c2[1426],_mm_xor_si64(c2[1246],_mm_xor_si64(c2[342],_mm_xor_si64(c2[346],_mm_xor_si64(c2[1245],c2[1249]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[135]=simde_mm_xor_si64(c2[363],simde_mm_xor_si64(c2[547],simde_mm_xor_si64(c2[367],simde_mm_xor_si64(c2[365],simde_mm_xor_si64(c2[360],simde_mm_xor_si64(c2[1085],simde_mm_xor_si64(c2[1080],simde_mm_xor_si64(c2[548],simde_mm_xor_si64(c2[204],simde_mm_xor_si64(c2[379],simde_mm_xor_si64(c2[199],simde_mm_xor_si64(c2[205],simde_mm_xor_si64(c2[200],simde_mm_xor_si64(c2[1286],simde_mm_xor_si64(c2[1281],simde_mm_xor_si64(c2[756],simde_mm_xor_si64(c2[940],simde_mm_xor_si64(c2[760],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[421],simde_mm_xor_si64(c2[596],simde_mm_xor_si64(c2[416],simde_mm_xor_si64(c2[959],simde_mm_xor_si64(c2[954],simde_mm_xor_si64(c2[599],simde_mm_xor_si64(c2[594],simde_mm_xor_si64(c2[1338],simde_mm_xor_si64(c2[1333],simde_mm_xor_si64(c2[979],simde_mm_xor_si64(c2[974],simde_mm_xor_si64(c2[977],simde_mm_xor_si64(c2[972],simde_mm_xor_si64(c2[272],simde_mm_xor_si64(c2[276],simde_mm_xor_si64(c2[633],simde_mm_xor_si64(c2[637],simde_mm_xor_si64(c2[476],simde_mm_xor_si64(c2[651],simde_mm_xor_si64(c2[471],simde_mm_xor_si64(c2[652],simde_mm_xor_si64(c2[656],simde_mm_xor_si64(c2[111],simde_mm_xor_si64(c2[115],simde_mm_xor_si64(c2[312],simde_mm_xor_si64(c2[307],simde_mm_xor_si64(c2[126],simde_mm_xor_si64(c2[130],simde_mm_xor_si64(c2[866],simde_mm_xor_si64(c2[870],simde_mm_xor_si64(c2[1231],simde_mm_xor_si64(c2[1226],simde_mm_xor_si64(c2[1404],simde_mm_xor_si64(c2[1408],simde_mm_xor_si64(c2[1242],simde_mm_xor_si64(c2[1426],simde_mm_xor_si64(c2[1246],simde_mm_xor_si64(c2[342],simde_mm_xor_si64(c2[346],simde_mm_xor_si64(c2[1245],c2[1249]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 16
-     d2[144]=_mm_xor_si64(c2[1263],_mm_xor_si64(c2[1083],_mm_xor_si64(c2[541],_mm_xor_si64(c2[361],_mm_xor_si64(c2[1085],_mm_xor_si64(c2[543],_mm_xor_si64(c2[363],_mm_xor_si64(c2[366],_mm_xor_si64(c2[1083],_mm_xor_si64(c2[1104],_mm_xor_si64(c2[924],_mm_xor_si64(c2[382],_mm_xor_si64(c2[202],_mm_xor_si64(c2[925],_mm_xor_si64(c2[203],_mm_xor_si64(c2[558],_mm_xor_si64(c2[1284],_mm_xor_si64(c2[24],_mm_xor_si64(c2[217],_mm_xor_si64(c2[37],_mm_xor_si64(c2[943],_mm_xor_si64(c2[763],_mm_xor_si64(c2[756],_mm_xor_si64(c2[43],_mm_xor_si64(c2[1321],_mm_xor_si64(c2[1141],_mm_xor_si64(c2[599],_mm_xor_si64(c2[419],_mm_xor_si64(c2[240],_mm_xor_si64(c2[1137],_mm_xor_si64(c2[957],_mm_xor_si64(c2[1319],_mm_xor_si64(c2[597],_mm_xor_si64(c2[619],_mm_xor_si64(c2[77],_mm_xor_si64(c2[1336],_mm_xor_si64(c2[260],_mm_xor_si64(c2[977],_mm_xor_si64(c2[258],_mm_xor_si64(c2[975],_mm_xor_si64(c2[992],_mm_xor_si64(c2[450],_mm_xor_si64(c2[270],_mm_xor_si64(c2[1353],_mm_xor_si64(c2[631],_mm_xor_si64(c2[1376],_mm_xor_si64(c2[1196],_mm_xor_si64(c2[654],_mm_xor_si64(c2[474],_mm_xor_si64(c2[1372],_mm_xor_si64(c2[830],_mm_xor_si64(c2[650],_mm_xor_si64(c2[831],_mm_xor_si64(c2[109],_mm_xor_si64(c2[1032],_mm_xor_si64(c2[490],_mm_xor_si64(c2[310],_mm_xor_si64(c2[846],_mm_xor_si64(c2[133],_mm_xor_si64(c2[147],_mm_xor_si64(c2[1044],_mm_xor_si64(c2[864],_mm_xor_si64(c2[512],_mm_xor_si64(c2[1229],_mm_xor_si64(c2[685],_mm_xor_si64(c2[1411],_mm_xor_si64(c2[703],_mm_xor_si64(c2[523],_mm_xor_si64(c2[1429],_mm_xor_si64(c2[1249],_mm_xor_si64(c2[1062],_mm_xor_si64(c2[529],_mm_xor_si64(c2[349],_mm_xor_si64(c2[526],_mm_xor_si64(c2[1243],c2[343])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[144]=simde_mm_xor_si64(c2[1263],simde_mm_xor_si64(c2[1083],simde_mm_xor_si64(c2[541],simde_mm_xor_si64(c2[361],simde_mm_xor_si64(c2[1085],simde_mm_xor_si64(c2[543],simde_mm_xor_si64(c2[363],simde_mm_xor_si64(c2[366],simde_mm_xor_si64(c2[1083],simde_mm_xor_si64(c2[1104],simde_mm_xor_si64(c2[924],simde_mm_xor_si64(c2[382],simde_mm_xor_si64(c2[202],simde_mm_xor_si64(c2[925],simde_mm_xor_si64(c2[203],simde_mm_xor_si64(c2[558],simde_mm_xor_si64(c2[1284],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[217],simde_mm_xor_si64(c2[37],simde_mm_xor_si64(c2[943],simde_mm_xor_si64(c2[763],simde_mm_xor_si64(c2[756],simde_mm_xor_si64(c2[43],simde_mm_xor_si64(c2[1321],simde_mm_xor_si64(c2[1141],simde_mm_xor_si64(c2[599],simde_mm_xor_si64(c2[419],simde_mm_xor_si64(c2[240],simde_mm_xor_si64(c2[1137],simde_mm_xor_si64(c2[957],simde_mm_xor_si64(c2[1319],simde_mm_xor_si64(c2[597],simde_mm_xor_si64(c2[619],simde_mm_xor_si64(c2[77],simde_mm_xor_si64(c2[1336],simde_mm_xor_si64(c2[260],simde_mm_xor_si64(c2[977],simde_mm_xor_si64(c2[258],simde_mm_xor_si64(c2[975],simde_mm_xor_si64(c2[992],simde_mm_xor_si64(c2[450],simde_mm_xor_si64(c2[270],simde_mm_xor_si64(c2[1353],simde_mm_xor_si64(c2[631],simde_mm_xor_si64(c2[1376],simde_mm_xor_si64(c2[1196],simde_mm_xor_si64(c2[654],simde_mm_xor_si64(c2[474],simde_mm_xor_si64(c2[1372],simde_mm_xor_si64(c2[830],simde_mm_xor_si64(c2[650],simde_mm_xor_si64(c2[831],simde_mm_xor_si64(c2[109],simde_mm_xor_si64(c2[1032],simde_mm_xor_si64(c2[490],simde_mm_xor_si64(c2[310],simde_mm_xor_si64(c2[846],simde_mm_xor_si64(c2[133],simde_mm_xor_si64(c2[147],simde_mm_xor_si64(c2[1044],simde_mm_xor_si64(c2[864],simde_mm_xor_si64(c2[512],simde_mm_xor_si64(c2[1229],simde_mm_xor_si64(c2[685],simde_mm_xor_si64(c2[1411],simde_mm_xor_si64(c2[703],simde_mm_xor_si64(c2[523],simde_mm_xor_si64(c2[1429],simde_mm_xor_si64(c2[1249],simde_mm_xor_si64(c2[1062],simde_mm_xor_si64(c2[529],simde_mm_xor_si64(c2[349],simde_mm_xor_si64(c2[526],simde_mm_xor_si64(c2[1243],c2[343])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 17
-     d2[153]=_mm_xor_si64(c2[728],_mm_xor_si64(c2[548],_mm_xor_si64(c2[905],_mm_xor_si64(c2[725],_mm_xor_si64(c2[541],_mm_xor_si64(c2[907],_mm_xor_si64(c2[727],_mm_xor_si64(c2[1261],_mm_xor_si64(c2[8],_mm_xor_si64(c2[560],_mm_xor_si64(c2[380],_mm_xor_si64(c2[746],_mm_xor_si64(c2[566],_mm_xor_si64(c2[381],_mm_xor_si64(c2[558],_mm_xor_si64(c2[23],_mm_xor_si64(c2[200],_mm_xor_si64(c2[24],_mm_xor_si64(c2[1121],_mm_xor_si64(c2[941],_mm_xor_si64(c2[1298],_mm_xor_si64(c2[1118],_mm_xor_si64(c2[221],_mm_xor_si64(c2[398],_mm_xor_si64(c2[777],_mm_xor_si64(c2[597],_mm_xor_si64(c2[954],_mm_xor_si64(c2[774],_mm_xor_si64(c2[1135],_mm_xor_si64(c2[62],_mm_xor_si64(c2[1321],_mm_xor_si64(c2[775],_mm_xor_si64(c2[961],_mm_xor_si64(c2[75],_mm_xor_si64(c2[432],_mm_xor_si64(c2[252],_mm_xor_si64(c2[1155],_mm_xor_si64(c2[1332],_mm_xor_si64(c2[1153],_mm_xor_si64(c2[1339],_mm_xor_si64(c2[457],_mm_xor_si64(c2[814],_mm_xor_si64(c2[634],_mm_xor_si64(c2[818],_mm_xor_si64(c2[995],_mm_xor_si64(c2[635],_mm_xor_si64(c2[832],_mm_xor_si64(c2[652],_mm_xor_si64(c2[1009],_mm_xor_si64(c2[829],_mm_xor_si64(c2[828],_mm_xor_si64(c2[1194],_mm_xor_si64(c2[1014],_mm_xor_si64(c2[296],_mm_xor_si64(c2[473],_mm_xor_si64(c2[488],_mm_xor_si64(c2[854],_mm_xor_si64(c2[674],_mm_xor_si64(c2[311],_mm_xor_si64(c2[488],_mm_xor_si64(c2[1051],_mm_xor_si64(c2[1408],_mm_xor_si64(c2[1228],_mm_xor_si64(c2[1407],_mm_xor_si64(c2[145],_mm_xor_si64(c2[150],_mm_xor_si64(c2[327],_mm_xor_si64(c2[168],_mm_xor_si64(c2[1427],_mm_xor_si64(c2[345],_mm_xor_si64(c2[165],_mm_xor_si64(c2[527],_mm_xor_si64(c2[884],_mm_xor_si64(c2[704],_mm_xor_si64(c2[1430],c2[168])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[153]=simde_mm_xor_si64(c2[728],simde_mm_xor_si64(c2[548],simde_mm_xor_si64(c2[905],simde_mm_xor_si64(c2[725],simde_mm_xor_si64(c2[541],simde_mm_xor_si64(c2[907],simde_mm_xor_si64(c2[727],simde_mm_xor_si64(c2[1261],simde_mm_xor_si64(c2[8],simde_mm_xor_si64(c2[560],simde_mm_xor_si64(c2[380],simde_mm_xor_si64(c2[746],simde_mm_xor_si64(c2[566],simde_mm_xor_si64(c2[381],simde_mm_xor_si64(c2[558],simde_mm_xor_si64(c2[23],simde_mm_xor_si64(c2[200],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[1121],simde_mm_xor_si64(c2[941],simde_mm_xor_si64(c2[1298],simde_mm_xor_si64(c2[1118],simde_mm_xor_si64(c2[221],simde_mm_xor_si64(c2[398],simde_mm_xor_si64(c2[777],simde_mm_xor_si64(c2[597],simde_mm_xor_si64(c2[954],simde_mm_xor_si64(c2[774],simde_mm_xor_si64(c2[1135],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[1321],simde_mm_xor_si64(c2[775],simde_mm_xor_si64(c2[961],simde_mm_xor_si64(c2[75],simde_mm_xor_si64(c2[432],simde_mm_xor_si64(c2[252],simde_mm_xor_si64(c2[1155],simde_mm_xor_si64(c2[1332],simde_mm_xor_si64(c2[1153],simde_mm_xor_si64(c2[1339],simde_mm_xor_si64(c2[457],simde_mm_xor_si64(c2[814],simde_mm_xor_si64(c2[634],simde_mm_xor_si64(c2[818],simde_mm_xor_si64(c2[995],simde_mm_xor_si64(c2[635],simde_mm_xor_si64(c2[832],simde_mm_xor_si64(c2[652],simde_mm_xor_si64(c2[1009],simde_mm_xor_si64(c2[829],simde_mm_xor_si64(c2[828],simde_mm_xor_si64(c2[1194],simde_mm_xor_si64(c2[1014],simde_mm_xor_si64(c2[296],simde_mm_xor_si64(c2[473],simde_mm_xor_si64(c2[488],simde_mm_xor_si64(c2[854],simde_mm_xor_si64(c2[674],simde_mm_xor_si64(c2[311],simde_mm_xor_si64(c2[488],simde_mm_xor_si64(c2[1051],simde_mm_xor_si64(c2[1408],simde_mm_xor_si64(c2[1228],simde_mm_xor_si64(c2[1407],simde_mm_xor_si64(c2[145],simde_mm_xor_si64(c2[150],simde_mm_xor_si64(c2[327],simde_mm_xor_si64(c2[168],simde_mm_xor_si64(c2[1427],simde_mm_xor_si64(c2[345],simde_mm_xor_si64(c2[165],simde_mm_xor_si64(c2[527],simde_mm_xor_si64(c2[884],simde_mm_xor_si64(c2[704],simde_mm_xor_si64(c2[1430],c2[168])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 18
-     d2[162]=_mm_xor_si64(c2[182],_mm_xor_si64(c2[472],c2[1214]));
+     d2[162]=simde_mm_xor_si64(c2[182],simde_mm_xor_si64(c2[472],c2[1214]));
 
 //row: 19
-     d2[171]=_mm_xor_si64(c2[545],_mm_xor_si64(c2[547],_mm_xor_si64(c2[1267],_mm_xor_si64(c2[1260],_mm_xor_si64(c2[386],_mm_xor_si64(c2[378],_mm_xor_si64(c2[20],_mm_xor_si64(c2[741],_mm_xor_si64(c2[938],_mm_xor_si64(c2[218],_mm_xor_si64(c2[594],_mm_xor_si64(c2[1141],_mm_xor_si64(c2[781],_mm_xor_si64(c2[72],_mm_xor_si64(c2[1152],_mm_xor_si64(c2[1159],_mm_xor_si64(c2[454],_mm_xor_si64(c2[815],_mm_xor_si64(c2[649],_mm_xor_si64(c2[834],_mm_xor_si64(c2[293],_mm_xor_si64(c2[494],_mm_xor_si64(c2[308],_mm_xor_si64(c2[1048],_mm_xor_si64(c2[1404],_mm_xor_si64(c2[147],_mm_xor_si64(c2[1424],_mm_xor_si64(c2[524],c2[1427]))))))))))))))))))))))))))));
+     d2[171]=simde_mm_xor_si64(c2[545],simde_mm_xor_si64(c2[547],simde_mm_xor_si64(c2[1267],simde_mm_xor_si64(c2[1260],simde_mm_xor_si64(c2[386],simde_mm_xor_si64(c2[378],simde_mm_xor_si64(c2[20],simde_mm_xor_si64(c2[741],simde_mm_xor_si64(c2[938],simde_mm_xor_si64(c2[218],simde_mm_xor_si64(c2[594],simde_mm_xor_si64(c2[1141],simde_mm_xor_si64(c2[781],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[1152],simde_mm_xor_si64(c2[1159],simde_mm_xor_si64(c2[454],simde_mm_xor_si64(c2[815],simde_mm_xor_si64(c2[649],simde_mm_xor_si64(c2[834],simde_mm_xor_si64(c2[293],simde_mm_xor_si64(c2[494],simde_mm_xor_si64(c2[308],simde_mm_xor_si64(c2[1048],simde_mm_xor_si64(c2[1404],simde_mm_xor_si64(c2[147],simde_mm_xor_si64(c2[1424],simde_mm_xor_si64(c2[524],c2[1427]))))))))))))))))))))))))))));
 
 //row: 20
-     d2[180]=_mm_xor_si64(c2[181],_mm_xor_si64(c2[1],_mm_xor_si64(c2[3],_mm_xor_si64(c2[723],_mm_xor_si64(c2[22],_mm_xor_si64(c2[1281],_mm_xor_si64(c2[1282],_mm_xor_si64(c2[924],_mm_xor_si64(c2[378],_mm_xor_si64(c2[583],_mm_xor_si64(c2[403],_mm_xor_si64(c2[1122],_mm_xor_si64(c2[239],_mm_xor_si64(c2[59],_mm_xor_si64(c2[597],_mm_xor_si64(c2[237],_mm_xor_si64(c2[976],_mm_xor_si64(c2[617],_mm_xor_si64(c2[615],_mm_xor_si64(c2[1335],_mm_xor_si64(c2[1358],_mm_xor_si64(c2[271],_mm_xor_si64(c2[294],_mm_xor_si64(c2[114],_mm_xor_si64(c2[290],_mm_xor_si64(c2[1188],_mm_xor_si64(c2[1389],_mm_xor_si64(c2[1212],_mm_xor_si64(c2[504],_mm_xor_si64(c2[869],_mm_xor_si64(c2[1051],_mm_xor_si64(c2[1069],_mm_xor_si64(c2[889],_mm_xor_si64(c2[1428],c2[883]))))))))))))))))))))))))))))))))));
+     d2[180]=simde_mm_xor_si64(c2[181],simde_mm_xor_si64(c2[1],simde_mm_xor_si64(c2[3],simde_mm_xor_si64(c2[723],simde_mm_xor_si64(c2[22],simde_mm_xor_si64(c2[1281],simde_mm_xor_si64(c2[1282],simde_mm_xor_si64(c2[924],simde_mm_xor_si64(c2[378],simde_mm_xor_si64(c2[583],simde_mm_xor_si64(c2[403],simde_mm_xor_si64(c2[1122],simde_mm_xor_si64(c2[239],simde_mm_xor_si64(c2[59],simde_mm_xor_si64(c2[597],simde_mm_xor_si64(c2[237],simde_mm_xor_si64(c2[976],simde_mm_xor_si64(c2[617],simde_mm_xor_si64(c2[615],simde_mm_xor_si64(c2[1335],simde_mm_xor_si64(c2[1358],simde_mm_xor_si64(c2[271],simde_mm_xor_si64(c2[294],simde_mm_xor_si64(c2[114],simde_mm_xor_si64(c2[290],simde_mm_xor_si64(c2[1188],simde_mm_xor_si64(c2[1389],simde_mm_xor_si64(c2[1212],simde_mm_xor_si64(c2[504],simde_mm_xor_si64(c2[869],simde_mm_xor_si64(c2[1051],simde_mm_xor_si64(c2[1069],simde_mm_xor_si64(c2[889],simde_mm_xor_si64(c2[1428],c2[883]))))))))))))))))))))))))))))))))));
 
 //row: 21
-     d2[189]=_mm_xor_si64(c2[541],_mm_xor_si64(c2[543],_mm_xor_si64(c2[1263],_mm_xor_si64(c2[542],_mm_xor_si64(c2[382],_mm_xor_si64(c2[383],_mm_xor_si64(c2[205],_mm_xor_si64(c2[25],_mm_xor_si64(c2[943],_mm_xor_si64(c2[403],_mm_xor_si64(c2[223],_mm_xor_si64(c2[599],_mm_xor_si64(c2[1137],_mm_xor_si64(c2[777],_mm_xor_si64(c2[77],_mm_xor_si64(c2[1157],_mm_xor_si64(c2[1335],_mm_xor_si64(c2[1155],_mm_xor_si64(c2[450],_mm_xor_si64(c2[991],_mm_xor_si64(c2[811],_mm_xor_si64(c2[654],_mm_xor_si64(c2[830],_mm_xor_si64(c2[469],_mm_xor_si64(c2[289],_mm_xor_si64(c2[490],_mm_xor_si64(c2[493],_mm_xor_si64(c2[313],_mm_xor_si64(c2[1044],_mm_xor_si64(c2[1409],_mm_xor_si64(c2[332],_mm_xor_si64(c2[152],_mm_xor_si64(c2[684],_mm_xor_si64(c2[1429],_mm_xor_si64(c2[529],_mm_xor_si64(c2[164],c2[1423]))))))))))))))))))))))))))))))))))));
+     d2[189]=simde_mm_xor_si64(c2[541],simde_mm_xor_si64(c2[543],simde_mm_xor_si64(c2[1263],simde_mm_xor_si64(c2[542],simde_mm_xor_si64(c2[382],simde_mm_xor_si64(c2[383],simde_mm_xor_si64(c2[205],simde_mm_xor_si64(c2[25],simde_mm_xor_si64(c2[943],simde_mm_xor_si64(c2[403],simde_mm_xor_si64(c2[223],simde_mm_xor_si64(c2[599],simde_mm_xor_si64(c2[1137],simde_mm_xor_si64(c2[777],simde_mm_xor_si64(c2[77],simde_mm_xor_si64(c2[1157],simde_mm_xor_si64(c2[1335],simde_mm_xor_si64(c2[1155],simde_mm_xor_si64(c2[450],simde_mm_xor_si64(c2[991],simde_mm_xor_si64(c2[811],simde_mm_xor_si64(c2[654],simde_mm_xor_si64(c2[830],simde_mm_xor_si64(c2[469],simde_mm_xor_si64(c2[289],simde_mm_xor_si64(c2[490],simde_mm_xor_si64(c2[493],simde_mm_xor_si64(c2[313],simde_mm_xor_si64(c2[1044],simde_mm_xor_si64(c2[1409],simde_mm_xor_si64(c2[332],simde_mm_xor_si64(c2[152],simde_mm_xor_si64(c2[684],simde_mm_xor_si64(c2[1429],simde_mm_xor_si64(c2[529],simde_mm_xor_si64(c2[164],c2[1423]))))))))))))))))))))))))))))))))))));
 
 //row: 22
-     d2[198]=_mm_xor_si64(c2[1104],c2[763]);
+     d2[198]=simde_mm_xor_si64(c2[1104],c2[763]);
 
 //row: 23
-     d2[207]=_mm_xor_si64(c2[728],_mm_xor_si64(c2[599],c2[97]));
+     d2[207]=simde_mm_xor_si64(c2[728],simde_mm_xor_si64(c2[599],c2[97]));
 
 //row: 24
-     d2[216]=_mm_xor_si64(c2[1101],_mm_xor_si64(c2[401],c2[1065]));
+     d2[216]=simde_mm_xor_si64(c2[1101],simde_mm_xor_si64(c2[401],c2[1065]));
 
 //row: 25
-     d2[225]=_mm_xor_si64(c2[7],c2[1357]);
+     d2[225]=simde_mm_xor_si64(c2[7],c2[1357]);
 
 //row: 26
-     d2[234]=_mm_xor_si64(c2[1263],_mm_xor_si64(c2[1083],_mm_xor_si64(c2[1267],_mm_xor_si64(c2[1265],_mm_xor_si64(c2[1085],_mm_xor_si64(c2[1260],_mm_xor_si64(c2[366],_mm_xor_si64(c2[541],_mm_xor_si64(c2[1104],_mm_xor_si64(c2[924],_mm_xor_si64(c2[1099],_mm_xor_si64(c2[925],_mm_xor_si64(c2[1100],_mm_xor_si64(c2[558],_mm_xor_si64(c2[922],_mm_xor_si64(c2[742],_mm_xor_si64(c2[217],_mm_xor_si64(c2[37],_mm_xor_si64(c2[221],_mm_xor_si64(c2[756],_mm_xor_si64(c2[1120],_mm_xor_si64(c2[940],_mm_xor_si64(c2[1124],_mm_xor_si64(c2[1321],_mm_xor_si64(c2[1141],_mm_xor_si64(c2[1316],_mm_xor_si64(c2[420],_mm_xor_si64(c2[240],_mm_xor_si64(c2[415],_mm_xor_si64(c2[1319],_mm_xor_si64(c2[55],_mm_xor_si64(c2[799],_mm_xor_si64(c2[619],_mm_xor_si64(c2[794],_mm_xor_si64(c2[260],_mm_xor_si64(c2[435],_mm_xor_si64(c2[258],_mm_xor_si64(c2[613],_mm_xor_si64(c2[433],_mm_xor_si64(c2[1172],_mm_xor_si64(c2[992],_mm_xor_si64(c2[1176],_mm_xor_si64(c2[1353],_mm_xor_si64(c2[278],_mm_xor_si64(c2[98],_mm_xor_si64(c2[1376],_mm_xor_si64(c2[1196],_mm_xor_si64(c2[1371],_mm_xor_si64(c2[113],_mm_xor_si64(c2[1372],_mm_xor_si64(c2[108],_mm_xor_si64(c2[831],_mm_xor_si64(c2[1195],_mm_xor_si64(c2[1015],_mm_xor_si64(c2[1212],_mm_xor_si64(c2[1032],_mm_xor_si64(c2[1207],_mm_xor_si64(c2[846],_mm_xor_si64(c2[1210],_mm_xor_si64(c2[1030],_mm_xor_si64(c2[849],_mm_xor_si64(c2[327],_mm_xor_si64(c2[147],_mm_xor_si64(c2[331],_mm_xor_si64(c2[512],_mm_xor_si64(c2[687],_mm_xor_si64(c2[685],_mm_xor_si64(c2[1049],_mm_xor_si64(c2[869],_mm_xor_si64(c2[703],_mm_xor_si64(c2[523],_mm_xor_si64(c2[707],_mm_xor_si64(c2[1242],_mm_xor_si64(c2[1062],_mm_xor_si64(c2[1246],_mm_xor_si64(c2[526],_mm_xor_si64(c2[890],c2[710])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[234]=simde_mm_xor_si64(c2[1263],simde_mm_xor_si64(c2[1083],simde_mm_xor_si64(c2[1267],simde_mm_xor_si64(c2[1265],simde_mm_xor_si64(c2[1085],simde_mm_xor_si64(c2[1260],simde_mm_xor_si64(c2[366],simde_mm_xor_si64(c2[541],simde_mm_xor_si64(c2[1104],simde_mm_xor_si64(c2[924],simde_mm_xor_si64(c2[1099],simde_mm_xor_si64(c2[925],simde_mm_xor_si64(c2[1100],simde_mm_xor_si64(c2[558],simde_mm_xor_si64(c2[922],simde_mm_xor_si64(c2[742],simde_mm_xor_si64(c2[217],simde_mm_xor_si64(c2[37],simde_mm_xor_si64(c2[221],simde_mm_xor_si64(c2[756],simde_mm_xor_si64(c2[1120],simde_mm_xor_si64(c2[940],simde_mm_xor_si64(c2[1124],simde_mm_xor_si64(c2[1321],simde_mm_xor_si64(c2[1141],simde_mm_xor_si64(c2[1316],simde_mm_xor_si64(c2[420],simde_mm_xor_si64(c2[240],simde_mm_xor_si64(c2[415],simde_mm_xor_si64(c2[1319],simde_mm_xor_si64(c2[55],simde_mm_xor_si64(c2[799],simde_mm_xor_si64(c2[619],simde_mm_xor_si64(c2[794],simde_mm_xor_si64(c2[260],simde_mm_xor_si64(c2[435],simde_mm_xor_si64(c2[258],simde_mm_xor_si64(c2[613],simde_mm_xor_si64(c2[433],simde_mm_xor_si64(c2[1172],simde_mm_xor_si64(c2[992],simde_mm_xor_si64(c2[1176],simde_mm_xor_si64(c2[1353],simde_mm_xor_si64(c2[278],simde_mm_xor_si64(c2[98],simde_mm_xor_si64(c2[1376],simde_mm_xor_si64(c2[1196],simde_mm_xor_si64(c2[1371],simde_mm_xor_si64(c2[113],simde_mm_xor_si64(c2[1372],simde_mm_xor_si64(c2[108],simde_mm_xor_si64(c2[831],simde_mm_xor_si64(c2[1195],simde_mm_xor_si64(c2[1015],simde_mm_xor_si64(c2[1212],simde_mm_xor_si64(c2[1032],simde_mm_xor_si64(c2[1207],simde_mm_xor_si64(c2[846],simde_mm_xor_si64(c2[1210],simde_mm_xor_si64(c2[1030],simde_mm_xor_si64(c2[849],simde_mm_xor_si64(c2[327],simde_mm_xor_si64(c2[147],simde_mm_xor_si64(c2[331],simde_mm_xor_si64(c2[512],simde_mm_xor_si64(c2[687],simde_mm_xor_si64(c2[685],simde_mm_xor_si64(c2[1049],simde_mm_xor_si64(c2[869],simde_mm_xor_si64(c2[703],simde_mm_xor_si64(c2[523],simde_mm_xor_si64(c2[707],simde_mm_xor_si64(c2[1242],simde_mm_xor_si64(c2[1062],simde_mm_xor_si64(c2[1246],simde_mm_xor_si64(c2[526],simde_mm_xor_si64(c2[890],c2[710])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 27
-     d2[243]=_mm_xor_si64(c2[901],c2[469]);
+     d2[243]=simde_mm_xor_si64(c2[901],c2[469]);
 
 //row: 28
-     d2[252]=_mm_xor_si64(c2[382],_mm_xor_si64(c2[936],c2[635]));
+     d2[252]=simde_mm_xor_si64(c2[382],simde_mm_xor_si64(c2[936],c2[635]));
 
 //row: 29
-     d2[261]=_mm_xor_si64(c2[907],c2[255]);
+     d2[261]=simde_mm_xor_si64(c2[907],c2[255]);
 
 //row: 30
-     d2[270]=_mm_xor_si64(c2[1297],_mm_xor_si64(c2[97],_mm_xor_si64(c2[130],c2[1250])));
+     d2[270]=simde_mm_xor_si64(c2[1297],simde_mm_xor_si64(c2[97],simde_mm_xor_si64(c2[130],c2[1250])));
 
 //row: 31
-     d2[279]=_mm_xor_si64(c2[908],_mm_xor_si64(c2[901],_mm_xor_si64(c2[182],_mm_xor_si64(c2[740],_mm_xor_si64(c2[741],_mm_xor_si64(c2[563],_mm_xor_si64(c2[383],_mm_xor_si64(c2[19],_mm_xor_si64(c2[1301],_mm_xor_si64(c2[761],_mm_xor_si64(c2[581],_mm_xor_si64(c2[957],_mm_xor_si64(c2[56],_mm_xor_si64(c2[1135],_mm_xor_si64(c2[435],_mm_xor_si64(c2[76],_mm_xor_si64(c2[254],_mm_xor_si64(c2[74],_mm_xor_si64(c2[817],_mm_xor_si64(c2[1358],_mm_xor_si64(c2[1178],_mm_xor_si64(c2[1012],_mm_xor_si64(c2[1188],_mm_xor_si64(c2[836],_mm_xor_si64(c2[656],_mm_xor_si64(c2[848],_mm_xor_si64(c2[851],_mm_xor_si64(c2[671],_mm_xor_si64(c2[1411],_mm_xor_si64(c2[328],_mm_xor_si64(c2[690],_mm_xor_si64(c2[510],_mm_xor_si64(c2[348],_mm_xor_si64(c2[887],_mm_xor_si64(c2[522],c2[342])))))))))))))))))))))))))))))))))));
+     d2[279]=simde_mm_xor_si64(c2[908],simde_mm_xor_si64(c2[901],simde_mm_xor_si64(c2[182],simde_mm_xor_si64(c2[740],simde_mm_xor_si64(c2[741],simde_mm_xor_si64(c2[563],simde_mm_xor_si64(c2[383],simde_mm_xor_si64(c2[19],simde_mm_xor_si64(c2[1301],simde_mm_xor_si64(c2[761],simde_mm_xor_si64(c2[581],simde_mm_xor_si64(c2[957],simde_mm_xor_si64(c2[56],simde_mm_xor_si64(c2[1135],simde_mm_xor_si64(c2[435],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[254],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[817],simde_mm_xor_si64(c2[1358],simde_mm_xor_si64(c2[1178],simde_mm_xor_si64(c2[1012],simde_mm_xor_si64(c2[1188],simde_mm_xor_si64(c2[836],simde_mm_xor_si64(c2[656],simde_mm_xor_si64(c2[848],simde_mm_xor_si64(c2[851],simde_mm_xor_si64(c2[671],simde_mm_xor_si64(c2[1411],simde_mm_xor_si64(c2[328],simde_mm_xor_si64(c2[690],simde_mm_xor_si64(c2[510],simde_mm_xor_si64(c2[348],simde_mm_xor_si64(c2[887],simde_mm_xor_si64(c2[522],c2[342])))))))))))))))))))))))))))))))))));
 
 //row: 32
-     d2[288]=_mm_xor_si64(c2[360],_mm_xor_si64(c2[180],_mm_xor_si64(c2[362],_mm_xor_si64(c2[182],_mm_xor_si64(c2[902],_mm_xor_si64(c2[4],_mm_xor_si64(c2[201],_mm_xor_si64(c2[21],_mm_xor_si64(c2[22],_mm_xor_si64(c2[1103],_mm_xor_si64(c2[762],_mm_xor_si64(c2[582],_mm_xor_si64(c2[1301],_mm_xor_si64(c2[418],_mm_xor_si64(c2[238],_mm_xor_si64(c2[956],_mm_xor_si64(c2[776],_mm_xor_si64(c2[416],_mm_xor_si64(c2[1335],_mm_xor_si64(c2[1155],_mm_xor_si64(c2[796],_mm_xor_si64(c2[794],_mm_xor_si64(c2[278],_mm_xor_si64(c2[98],_mm_xor_si64(c2[450],_mm_xor_si64(c2[272],_mm_xor_si64(c2[473],_mm_xor_si64(c2[293],_mm_xor_si64(c2[649],_mm_xor_si64(c2[469],_mm_xor_si64(c2[1376],_mm_xor_si64(c2[309],_mm_xor_si64(c2[129],_mm_xor_si64(c2[1391],_mm_xor_si64(c2[872],_mm_xor_si64(c2[692],_mm_xor_si64(c2[1048],_mm_xor_si64(c2[1230],_mm_xor_si64(c2[1248],_mm_xor_si64(c2[1068],_mm_xor_si64(c2[348],_mm_xor_si64(c2[168],c2[1062]))))))))))))))))))))))))))))))))))))))))));
+     d2[288]=simde_mm_xor_si64(c2[360],simde_mm_xor_si64(c2[180],simde_mm_xor_si64(c2[362],simde_mm_xor_si64(c2[182],simde_mm_xor_si64(c2[902],simde_mm_xor_si64(c2[4],simde_mm_xor_si64(c2[201],simde_mm_xor_si64(c2[21],simde_mm_xor_si64(c2[22],simde_mm_xor_si64(c2[1103],simde_mm_xor_si64(c2[762],simde_mm_xor_si64(c2[582],simde_mm_xor_si64(c2[1301],simde_mm_xor_si64(c2[418],simde_mm_xor_si64(c2[238],simde_mm_xor_si64(c2[956],simde_mm_xor_si64(c2[776],simde_mm_xor_si64(c2[416],simde_mm_xor_si64(c2[1335],simde_mm_xor_si64(c2[1155],simde_mm_xor_si64(c2[796],simde_mm_xor_si64(c2[794],simde_mm_xor_si64(c2[278],simde_mm_xor_si64(c2[98],simde_mm_xor_si64(c2[450],simde_mm_xor_si64(c2[272],simde_mm_xor_si64(c2[473],simde_mm_xor_si64(c2[293],simde_mm_xor_si64(c2[649],simde_mm_xor_si64(c2[469],simde_mm_xor_si64(c2[1376],simde_mm_xor_si64(c2[309],simde_mm_xor_si64(c2[129],simde_mm_xor_si64(c2[1391],simde_mm_xor_si64(c2[872],simde_mm_xor_si64(c2[692],simde_mm_xor_si64(c2[1048],simde_mm_xor_si64(c2[1230],simde_mm_xor_si64(c2[1248],simde_mm_xor_si64(c2[1068],simde_mm_xor_si64(c2[348],simde_mm_xor_si64(c2[168],c2[1062]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 33
-     d2[297]=_mm_xor_si64(c2[720],_mm_xor_si64(c2[722],_mm_xor_si64(c2[3],_mm_xor_si64(c2[561],_mm_xor_si64(c2[562],_mm_xor_si64(c2[204],_mm_xor_si64(c2[1122],_mm_xor_si64(c2[402],_mm_xor_si64(c2[1303],_mm_xor_si64(c2[778],_mm_xor_si64(c2[1316],_mm_xor_si64(c2[956],_mm_xor_si64(c2[256],_mm_xor_si64(c2[1336],_mm_xor_si64(c2[1334],_mm_xor_si64(c2[638],_mm_xor_si64(c2[990],_mm_xor_si64(c2[833],_mm_xor_si64(c2[1009],_mm_xor_si64(c2[468],_mm_xor_si64(c2[669],_mm_xor_si64(c2[492],_mm_xor_si64(c2[1206],_mm_xor_si64(c2[1232],_mm_xor_si64(c2[149],_mm_xor_si64(c2[331],_mm_xor_si64(c2[169],_mm_xor_si64(c2[708],c2[163]))))))))))))))))))))))))))));
+     d2[297]=simde_mm_xor_si64(c2[720],simde_mm_xor_si64(c2[722],simde_mm_xor_si64(c2[3],simde_mm_xor_si64(c2[561],simde_mm_xor_si64(c2[562],simde_mm_xor_si64(c2[204],simde_mm_xor_si64(c2[1122],simde_mm_xor_si64(c2[402],simde_mm_xor_si64(c2[1303],simde_mm_xor_si64(c2[778],simde_mm_xor_si64(c2[1316],simde_mm_xor_si64(c2[956],simde_mm_xor_si64(c2[256],simde_mm_xor_si64(c2[1336],simde_mm_xor_si64(c2[1334],simde_mm_xor_si64(c2[638],simde_mm_xor_si64(c2[990],simde_mm_xor_si64(c2[833],simde_mm_xor_si64(c2[1009],simde_mm_xor_si64(c2[468],simde_mm_xor_si64(c2[669],simde_mm_xor_si64(c2[492],simde_mm_xor_si64(c2[1206],simde_mm_xor_si64(c2[1232],simde_mm_xor_si64(c2[149],simde_mm_xor_si64(c2[331],simde_mm_xor_si64(c2[169],simde_mm_xor_si64(c2[708],c2[163]))))))))))))))))))))))))))));
 
 //row: 34
-     d2[306]=_mm_xor_si64(c2[723],_mm_xor_si64(c2[543],_mm_xor_si64(c2[725],_mm_xor_si64(c2[725],_mm_xor_si64(c2[545],_mm_xor_si64(c2[727],_mm_xor_si64(c2[1265],_mm_xor_si64(c2[8],_mm_xor_si64(c2[904],_mm_xor_si64(c2[564],_mm_xor_si64(c2[384],_mm_xor_si64(c2[566],_mm_xor_si64(c2[385],_mm_xor_si64(c2[558],_mm_xor_si64(c2[18],_mm_xor_si64(c2[380],_mm_xor_si64(c2[200],_mm_xor_si64(c2[1116],_mm_xor_si64(c2[936],_mm_xor_si64(c2[1118],_mm_xor_si64(c2[216],_mm_xor_si64(c2[578],_mm_xor_si64(c2[398],_mm_xor_si64(c2[781],_mm_xor_si64(c2[601],_mm_xor_si64(c2[774],_mm_xor_si64(c2[1319],_mm_xor_si64(c2[1139],_mm_xor_si64(c2[1321],_mm_xor_si64(c2[779],_mm_xor_si64(c2[961],_mm_xor_si64(c2[259],_mm_xor_si64(c2[79],_mm_xor_si64(c2[252],_mm_xor_si64(c2[1159],_mm_xor_si64(c2[1332],_mm_xor_si64(c2[1157],_mm_xor_si64(c2[80],_mm_xor_si64(c2[1339],_mm_xor_si64(c2[632],_mm_xor_si64(c2[452],_mm_xor_si64(c2[634],_mm_xor_si64(c2[813],_mm_xor_si64(c2[1175],_mm_xor_si64(c2[995],_mm_xor_si64(c2[836],_mm_xor_si64(c2[656],_mm_xor_si64(c2[829],_mm_xor_si64(c2[1012],_mm_xor_si64(c2[832],_mm_xor_si64(c2[1014],_mm_xor_si64(c2[291],_mm_xor_si64(c2[653],_mm_xor_si64(c2[473],_mm_xor_si64(c2[672],_mm_xor_si64(c2[492],_mm_xor_si64(c2[674],_mm_xor_si64(c2[306],_mm_xor_si64(c2[668],_mm_xor_si64(c2[488],_mm_xor_si64(c2[1226],_mm_xor_si64(c2[1046],_mm_xor_si64(c2[1228],_mm_xor_si64(c2[1411],_mm_xor_si64(c2[145],_mm_xor_si64(c2[145],_mm_xor_si64(c2[507],_mm_xor_si64(c2[327],_mm_xor_si64(c2[163],_mm_xor_si64(c2[1422],_mm_xor_si64(c2[165],_mm_xor_si64(c2[702],_mm_xor_si64(c2[522],_mm_xor_si64(c2[704],_mm_xor_si64(c2[1425],_mm_xor_si64(c2[348],c2[168]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[306]=simde_mm_xor_si64(c2[723],simde_mm_xor_si64(c2[543],simde_mm_xor_si64(c2[725],simde_mm_xor_si64(c2[725],simde_mm_xor_si64(c2[545],simde_mm_xor_si64(c2[727],simde_mm_xor_si64(c2[1265],simde_mm_xor_si64(c2[8],simde_mm_xor_si64(c2[904],simde_mm_xor_si64(c2[564],simde_mm_xor_si64(c2[384],simde_mm_xor_si64(c2[566],simde_mm_xor_si64(c2[385],simde_mm_xor_si64(c2[558],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[380],simde_mm_xor_si64(c2[200],simde_mm_xor_si64(c2[1116],simde_mm_xor_si64(c2[936],simde_mm_xor_si64(c2[1118],simde_mm_xor_si64(c2[216],simde_mm_xor_si64(c2[578],simde_mm_xor_si64(c2[398],simde_mm_xor_si64(c2[781],simde_mm_xor_si64(c2[601],simde_mm_xor_si64(c2[774],simde_mm_xor_si64(c2[1319],simde_mm_xor_si64(c2[1139],simde_mm_xor_si64(c2[1321],simde_mm_xor_si64(c2[779],simde_mm_xor_si64(c2[961],simde_mm_xor_si64(c2[259],simde_mm_xor_si64(c2[79],simde_mm_xor_si64(c2[252],simde_mm_xor_si64(c2[1159],simde_mm_xor_si64(c2[1332],simde_mm_xor_si64(c2[1157],simde_mm_xor_si64(c2[80],simde_mm_xor_si64(c2[1339],simde_mm_xor_si64(c2[632],simde_mm_xor_si64(c2[452],simde_mm_xor_si64(c2[634],simde_mm_xor_si64(c2[813],simde_mm_xor_si64(c2[1175],simde_mm_xor_si64(c2[995],simde_mm_xor_si64(c2[836],simde_mm_xor_si64(c2[656],simde_mm_xor_si64(c2[829],simde_mm_xor_si64(c2[1012],simde_mm_xor_si64(c2[832],simde_mm_xor_si64(c2[1014],simde_mm_xor_si64(c2[291],simde_mm_xor_si64(c2[653],simde_mm_xor_si64(c2[473],simde_mm_xor_si64(c2[672],simde_mm_xor_si64(c2[492],simde_mm_xor_si64(c2[674],simde_mm_xor_si64(c2[306],simde_mm_xor_si64(c2[668],simde_mm_xor_si64(c2[488],simde_mm_xor_si64(c2[1226],simde_mm_xor_si64(c2[1046],simde_mm_xor_si64(c2[1228],simde_mm_xor_si64(c2[1411],simde_mm_xor_si64(c2[145],simde_mm_xor_si64(c2[145],simde_mm_xor_si64(c2[507],simde_mm_xor_si64(c2[327],simde_mm_xor_si64(c2[163],simde_mm_xor_si64(c2[1422],simde_mm_xor_si64(c2[165],simde_mm_xor_si64(c2[702],simde_mm_xor_si64(c2[522],simde_mm_xor_si64(c2[704],simde_mm_xor_si64(c2[1425],simde_mm_xor_si64(c2[348],c2[168]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 35
-     d2[315]=_mm_xor_si64(c2[3],_mm_xor_si64(c2[1262],_mm_xor_si64(c2[1264],_mm_xor_si64(c2[545],_mm_xor_si64(c2[1283],_mm_xor_si64(c2[1103],_mm_xor_si64(c2[1104],_mm_xor_si64(c2[746],_mm_xor_si64(c2[745],_mm_xor_si64(c2[396],_mm_xor_si64(c2[216],_mm_xor_si64(c2[944],_mm_xor_si64(c2[61],_mm_xor_si64(c2[1320],_mm_xor_si64(c2[419],_mm_xor_si64(c2[59],_mm_xor_si64(c2[798],_mm_xor_si64(c2[439],_mm_xor_si64(c2[437],_mm_xor_si64(c2[1171],_mm_xor_si64(c2[93],_mm_xor_si64(c2[278],_mm_xor_si64(c2[116],_mm_xor_si64(c2[1375],_mm_xor_si64(c2[112],_mm_xor_si64(c2[1010],_mm_xor_si64(c2[1211],_mm_xor_si64(c2[1034],_mm_xor_si64(c2[326],_mm_xor_si64(c2[691],_mm_xor_si64(c2[864],_mm_xor_si64(c2[882],_mm_xor_si64(c2[702],_mm_xor_si64(c2[1250],c2[705]))))))))))))))))))))))))))))))))));
+     d2[315]=simde_mm_xor_si64(c2[3],simde_mm_xor_si64(c2[1262],simde_mm_xor_si64(c2[1264],simde_mm_xor_si64(c2[545],simde_mm_xor_si64(c2[1283],simde_mm_xor_si64(c2[1103],simde_mm_xor_si64(c2[1104],simde_mm_xor_si64(c2[746],simde_mm_xor_si64(c2[745],simde_mm_xor_si64(c2[396],simde_mm_xor_si64(c2[216],simde_mm_xor_si64(c2[944],simde_mm_xor_si64(c2[61],simde_mm_xor_si64(c2[1320],simde_mm_xor_si64(c2[419],simde_mm_xor_si64(c2[59],simde_mm_xor_si64(c2[798],simde_mm_xor_si64(c2[439],simde_mm_xor_si64(c2[437],simde_mm_xor_si64(c2[1171],simde_mm_xor_si64(c2[93],simde_mm_xor_si64(c2[278],simde_mm_xor_si64(c2[116],simde_mm_xor_si64(c2[1375],simde_mm_xor_si64(c2[112],simde_mm_xor_si64(c2[1010],simde_mm_xor_si64(c2[1211],simde_mm_xor_si64(c2[1034],simde_mm_xor_si64(c2[326],simde_mm_xor_si64(c2[691],simde_mm_xor_si64(c2[864],simde_mm_xor_si64(c2[882],simde_mm_xor_si64(c2[702],simde_mm_xor_si64(c2[1250],c2[705]))))))))))))))))))))))))))))))))));
 
 //row: 36
-     d2[324]=_mm_xor_si64(c2[186],_mm_xor_si64(c2[223],c2[489]));
+     d2[324]=simde_mm_xor_si64(c2[186],simde_mm_xor_si64(c2[223],c2[489]));
 
 //row: 37
-     d2[333]=_mm_xor_si64(c2[543],_mm_xor_si64(c2[367],_mm_xor_si64(c2[545],_mm_xor_si64(c2[360],_mm_xor_si64(c2[1265],_mm_xor_si64(c2[1080],_mm_xor_si64(c2[384],_mm_xor_si64(c2[199],_mm_xor_si64(c2[385],_mm_xor_si64(c2[200],_mm_xor_si64(c2[18],_mm_xor_si64(c2[22],_mm_xor_si64(c2[1281],_mm_xor_si64(c2[936],_mm_xor_si64(c2[760],_mm_xor_si64(c2[216],_mm_xor_si64(c2[220],_mm_xor_si64(c2[40],_mm_xor_si64(c2[601],_mm_xor_si64(c2[416],_mm_xor_si64(c2[1139],_mm_xor_si64(c2[954],_mm_xor_si64(c2[779],_mm_xor_si64(c2[594],_mm_xor_si64(c2[79],_mm_xor_si64(c2[1333],_mm_xor_si64(c2[1159],_mm_xor_si64(c2[974],_mm_xor_si64(c2[1157],_mm_xor_si64(c2[1152],_mm_xor_si64(c2[972],_mm_xor_si64(c2[452],_mm_xor_si64(c2[276],_mm_xor_si64(c2[813],_mm_xor_si64(c2[817],_mm_xor_si64(c2[637],_mm_xor_si64(c2[656],_mm_xor_si64(c2[471],_mm_xor_si64(c2[832],_mm_xor_si64(c2[656],_mm_xor_si64(c2[291],_mm_xor_si64(c2[295],_mm_xor_si64(c2[115],_mm_xor_si64(c2[492],_mm_xor_si64(c2[307],_mm_xor_si64(c2[306],_mm_xor_si64(c2[310],_mm_xor_si64(c2[130],_mm_xor_si64(c2[1046],_mm_xor_si64(c2[870],_mm_xor_si64(c2[1411],_mm_xor_si64(c2[1226],_mm_xor_si64(c2[145],_mm_xor_si64(c2[149],_mm_xor_si64(c2[1408],_mm_xor_si64(c2[1422],_mm_xor_si64(c2[1246],_mm_xor_si64(c2[522],_mm_xor_si64(c2[346],_mm_xor_si64(c2[1425],_mm_xor_si64(c2[1429],c2[1249])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[333]=simde_mm_xor_si64(c2[543],simde_mm_xor_si64(c2[367],simde_mm_xor_si64(c2[545],simde_mm_xor_si64(c2[360],simde_mm_xor_si64(c2[1265],simde_mm_xor_si64(c2[1080],simde_mm_xor_si64(c2[384],simde_mm_xor_si64(c2[199],simde_mm_xor_si64(c2[385],simde_mm_xor_si64(c2[200],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[22],simde_mm_xor_si64(c2[1281],simde_mm_xor_si64(c2[936],simde_mm_xor_si64(c2[760],simde_mm_xor_si64(c2[216],simde_mm_xor_si64(c2[220],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[601],simde_mm_xor_si64(c2[416],simde_mm_xor_si64(c2[1139],simde_mm_xor_si64(c2[954],simde_mm_xor_si64(c2[779],simde_mm_xor_si64(c2[594],simde_mm_xor_si64(c2[79],simde_mm_xor_si64(c2[1333],simde_mm_xor_si64(c2[1159],simde_mm_xor_si64(c2[974],simde_mm_xor_si64(c2[1157],simde_mm_xor_si64(c2[1152],simde_mm_xor_si64(c2[972],simde_mm_xor_si64(c2[452],simde_mm_xor_si64(c2[276],simde_mm_xor_si64(c2[813],simde_mm_xor_si64(c2[817],simde_mm_xor_si64(c2[637],simde_mm_xor_si64(c2[656],simde_mm_xor_si64(c2[471],simde_mm_xor_si64(c2[832],simde_mm_xor_si64(c2[656],simde_mm_xor_si64(c2[291],simde_mm_xor_si64(c2[295],simde_mm_xor_si64(c2[115],simde_mm_xor_si64(c2[492],simde_mm_xor_si64(c2[307],simde_mm_xor_si64(c2[306],simde_mm_xor_si64(c2[310],simde_mm_xor_si64(c2[130],simde_mm_xor_si64(c2[1046],simde_mm_xor_si64(c2[870],simde_mm_xor_si64(c2[1411],simde_mm_xor_si64(c2[1226],simde_mm_xor_si64(c2[145],simde_mm_xor_si64(c2[149],simde_mm_xor_si64(c2[1408],simde_mm_xor_si64(c2[1422],simde_mm_xor_si64(c2[1246],simde_mm_xor_si64(c2[522],simde_mm_xor_si64(c2[346],simde_mm_xor_si64(c2[1425],simde_mm_xor_si64(c2[1429],c2[1249])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 38
-     d2[342]=_mm_xor_si64(c2[720],_mm_xor_si64(c2[540],_mm_xor_si64(c2[542],_mm_xor_si64(c2[1262],_mm_xor_si64(c2[561],_mm_xor_si64(c2[381],_mm_xor_si64(c2[382],_mm_xor_si64(c2[24],_mm_xor_si64(c2[198],_mm_xor_si64(c2[1122],_mm_xor_si64(c2[942],_mm_xor_si64(c2[222],_mm_xor_si64(c2[778],_mm_xor_si64(c2[598],_mm_xor_si64(c2[1136],_mm_xor_si64(c2[776],_mm_xor_si64(c2[76],_mm_xor_si64(c2[1156],_mm_xor_si64(c2[1154],_mm_xor_si64(c2[458],_mm_xor_si64(c2[810],_mm_xor_si64(c2[1178],_mm_xor_si64(c2[833],_mm_xor_si64(c2[653],_mm_xor_si64(c2[829],_mm_xor_si64(c2[288],_mm_xor_si64(c2[489],_mm_xor_si64(c2[312],_mm_xor_si64(c2[1052],_mm_xor_si64(c2[1408],_mm_xor_si64(c2[151],_mm_xor_si64(c2[169],_mm_xor_si64(c2[1428],_mm_xor_si64(c2[528],c2[1422]))))))))))))))))))))))))))))))))));
+     d2[342]=simde_mm_xor_si64(c2[720],simde_mm_xor_si64(c2[540],simde_mm_xor_si64(c2[542],simde_mm_xor_si64(c2[1262],simde_mm_xor_si64(c2[561],simde_mm_xor_si64(c2[381],simde_mm_xor_si64(c2[382],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[198],simde_mm_xor_si64(c2[1122],simde_mm_xor_si64(c2[942],simde_mm_xor_si64(c2[222],simde_mm_xor_si64(c2[778],simde_mm_xor_si64(c2[598],simde_mm_xor_si64(c2[1136],simde_mm_xor_si64(c2[776],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[1156],simde_mm_xor_si64(c2[1154],simde_mm_xor_si64(c2[458],simde_mm_xor_si64(c2[810],simde_mm_xor_si64(c2[1178],simde_mm_xor_si64(c2[833],simde_mm_xor_si64(c2[653],simde_mm_xor_si64(c2[829],simde_mm_xor_si64(c2[288],simde_mm_xor_si64(c2[489],simde_mm_xor_si64(c2[312],simde_mm_xor_si64(c2[1052],simde_mm_xor_si64(c2[1408],simde_mm_xor_si64(c2[151],simde_mm_xor_si64(c2[169],simde_mm_xor_si64(c2[1428],simde_mm_xor_si64(c2[528],c2[1422]))))))))))))))))))))))))))))))))));
 
 //row: 39
-     d2[351]=_mm_xor_si64(c2[180],_mm_xor_si64(c2[0],_mm_xor_si64(c2[182],_mm_xor_si64(c2[2],_mm_xor_si64(c2[722],_mm_xor_si64(c2[1265],_mm_xor_si64(c2[21],_mm_xor_si64(c2[1280],_mm_xor_si64(c2[1281],_mm_xor_si64(c2[923],_mm_xor_si64(c2[582],_mm_xor_si64(c2[402],_mm_xor_si64(c2[1121],_mm_xor_si64(c2[238],_mm_xor_si64(c2[58],_mm_xor_si64(c2[776],_mm_xor_si64(c2[596],_mm_xor_si64(c2[236],_mm_xor_si64(c2[1155],_mm_xor_si64(c2[975],_mm_xor_si64(c2[616],_mm_xor_si64(c2[614],_mm_xor_si64(c2[98],_mm_xor_si64(c2[1357],_mm_xor_si64(c2[270],_mm_xor_si64(c2[293],_mm_xor_si64(c2[113],_mm_xor_si64(c2[469],_mm_xor_si64(c2[289],_mm_xor_si64(c2[1196],_mm_xor_si64(c2[129],_mm_xor_si64(c2[1388],_mm_xor_si64(c2[1211],_mm_xor_si64(c2[130],_mm_xor_si64(c2[692],_mm_xor_si64(c2[512],_mm_xor_si64(c2[868],_mm_xor_si64(c2[1050],_mm_xor_si64(c2[1068],_mm_xor_si64(c2[888],_mm_xor_si64(c2[168],_mm_xor_si64(c2[1427],c2[882]))))))))))))))))))))))))))))))))))))))))));
+     d2[351]=simde_mm_xor_si64(c2[180],simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[182],simde_mm_xor_si64(c2[2],simde_mm_xor_si64(c2[722],simde_mm_xor_si64(c2[1265],simde_mm_xor_si64(c2[21],simde_mm_xor_si64(c2[1280],simde_mm_xor_si64(c2[1281],simde_mm_xor_si64(c2[923],simde_mm_xor_si64(c2[582],simde_mm_xor_si64(c2[402],simde_mm_xor_si64(c2[1121],simde_mm_xor_si64(c2[238],simde_mm_xor_si64(c2[58],simde_mm_xor_si64(c2[776],simde_mm_xor_si64(c2[596],simde_mm_xor_si64(c2[236],simde_mm_xor_si64(c2[1155],simde_mm_xor_si64(c2[975],simde_mm_xor_si64(c2[616],simde_mm_xor_si64(c2[614],simde_mm_xor_si64(c2[98],simde_mm_xor_si64(c2[1357],simde_mm_xor_si64(c2[270],simde_mm_xor_si64(c2[293],simde_mm_xor_si64(c2[113],simde_mm_xor_si64(c2[469],simde_mm_xor_si64(c2[289],simde_mm_xor_si64(c2[1196],simde_mm_xor_si64(c2[129],simde_mm_xor_si64(c2[1388],simde_mm_xor_si64(c2[1211],simde_mm_xor_si64(c2[130],simde_mm_xor_si64(c2[692],simde_mm_xor_si64(c2[512],simde_mm_xor_si64(c2[868],simde_mm_xor_si64(c2[1050],simde_mm_xor_si64(c2[1068],simde_mm_xor_si64(c2[888],simde_mm_xor_si64(c2[168],simde_mm_xor_si64(c2[1427],c2[882]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 40
-     d2[360]=_mm_xor_si64(c2[540],_mm_xor_si64(c2[4],_mm_xor_si64(c2[542],_mm_xor_si64(c2[6],_mm_xor_si64(c2[1262],_mm_xor_si64(c2[726],_mm_xor_si64(c2[381],_mm_xor_si64(c2[1284],_mm_xor_si64(c2[382],_mm_xor_si64(c2[1285],_mm_xor_si64(c2[24],_mm_xor_si64(c2[1098],_mm_xor_si64(c2[918],_mm_xor_si64(c2[942],_mm_xor_si64(c2[397],_mm_xor_si64(c2[222],_mm_xor_si64(c2[1296],_mm_xor_si64(c2[1116],_mm_xor_si64(c2[1116],_mm_xor_si64(c2[598],_mm_xor_si64(c2[62],_mm_xor_si64(c2[1136],_mm_xor_si64(c2[600],_mm_xor_si64(c2[776],_mm_xor_si64(c2[240],_mm_xor_si64(c2[76],_mm_xor_si64(c2[979],_mm_xor_si64(c2[1156],_mm_xor_si64(c2[620],_mm_xor_si64(c2[1154],_mm_xor_si64(c2[798],_mm_xor_si64(c2[618],_mm_xor_si64(c2[458],_mm_xor_si64(c2[1352],_mm_xor_si64(c2[810],_mm_xor_si64(c2[454],_mm_xor_si64(c2[274],_mm_xor_si64(c2[653],_mm_xor_si64(c2[108],_mm_xor_si64(c2[829],_mm_xor_si64(c2[293],_mm_xor_si64(c2[288],_mm_xor_si64(c2[1371],_mm_xor_si64(c2[1191],_mm_xor_si64(c2[489],_mm_xor_si64(c2[1392],_mm_xor_si64(c2[312],_mm_xor_si64(c2[1386],_mm_xor_si64(c2[1206],_mm_xor_si64(c2[1052],_mm_xor_si64(c2[507],_mm_xor_si64(c2[1408],_mm_xor_si64(c2[872],_mm_xor_si64(c2[151],_mm_xor_si64(c2[1225],_mm_xor_si64(c2[1045],_mm_xor_si64(c2[1428],_mm_xor_si64(c2[883],_mm_xor_si64(c2[528],_mm_xor_si64(c2[1422],_mm_xor_si64(c2[1422],_mm_xor_si64(c2[1066],c2[886]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[360]=simde_mm_xor_si64(c2[540],simde_mm_xor_si64(c2[4],simde_mm_xor_si64(c2[542],simde_mm_xor_si64(c2[6],simde_mm_xor_si64(c2[1262],simde_mm_xor_si64(c2[726],simde_mm_xor_si64(c2[381],simde_mm_xor_si64(c2[1284],simde_mm_xor_si64(c2[382],simde_mm_xor_si64(c2[1285],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[1098],simde_mm_xor_si64(c2[918],simde_mm_xor_si64(c2[942],simde_mm_xor_si64(c2[397],simde_mm_xor_si64(c2[222],simde_mm_xor_si64(c2[1296],simde_mm_xor_si64(c2[1116],simde_mm_xor_si64(c2[1116],simde_mm_xor_si64(c2[598],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[1136],simde_mm_xor_si64(c2[600],simde_mm_xor_si64(c2[776],simde_mm_xor_si64(c2[240],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[979],simde_mm_xor_si64(c2[1156],simde_mm_xor_si64(c2[620],simde_mm_xor_si64(c2[1154],simde_mm_xor_si64(c2[798],simde_mm_xor_si64(c2[618],simde_mm_xor_si64(c2[458],simde_mm_xor_si64(c2[1352],simde_mm_xor_si64(c2[810],simde_mm_xor_si64(c2[454],simde_mm_xor_si64(c2[274],simde_mm_xor_si64(c2[653],simde_mm_xor_si64(c2[108],simde_mm_xor_si64(c2[829],simde_mm_xor_si64(c2[293],simde_mm_xor_si64(c2[288],simde_mm_xor_si64(c2[1371],simde_mm_xor_si64(c2[1191],simde_mm_xor_si64(c2[489],simde_mm_xor_si64(c2[1392],simde_mm_xor_si64(c2[312],simde_mm_xor_si64(c2[1386],simde_mm_xor_si64(c2[1206],simde_mm_xor_si64(c2[1052],simde_mm_xor_si64(c2[507],simde_mm_xor_si64(c2[1408],simde_mm_xor_si64(c2[872],simde_mm_xor_si64(c2[151],simde_mm_xor_si64(c2[1225],simde_mm_xor_si64(c2[1045],simde_mm_xor_si64(c2[1428],simde_mm_xor_si64(c2[883],simde_mm_xor_si64(c2[528],simde_mm_xor_si64(c2[1422],simde_mm_xor_si64(c2[1422],simde_mm_xor_si64(c2[1066],c2[886]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 41
-     d2[369]=_mm_xor_si64(c2[1082],_mm_xor_si64(c2[902],_mm_xor_si64(c2[904],_mm_xor_si64(c2[185],_mm_xor_si64(c2[923],_mm_xor_si64(c2[743],_mm_xor_si64(c2[744],_mm_xor_si64(c2[386],_mm_xor_si64(c2[24],_mm_xor_si64(c2[36],_mm_xor_si64(c2[1304],_mm_xor_si64(c2[584],_mm_xor_si64(c2[1140],_mm_xor_si64(c2[960],_mm_xor_si64(c2[59],_mm_xor_si64(c2[1138],_mm_xor_si64(c2[438],_mm_xor_si64(c2[79],_mm_xor_si64(c2[77],_mm_xor_si64(c2[811],_mm_xor_si64(c2[1172],_mm_xor_si64(c2[1355],_mm_xor_si64(c2[1195],_mm_xor_si64(c2[1015],_mm_xor_si64(c2[1191],_mm_xor_si64(c2[650],_mm_xor_si64(c2[851],_mm_xor_si64(c2[674],_mm_xor_si64(c2[1405],_mm_xor_si64(c2[331],_mm_xor_si64(c2[504],_mm_xor_si64(c2[522],_mm_xor_si64(c2[342],_mm_xor_si64(c2[890],c2[345]))))))))))))))))))))))))))))))))));
+     d2[369]=simde_mm_xor_si64(c2[1082],simde_mm_xor_si64(c2[902],simde_mm_xor_si64(c2[904],simde_mm_xor_si64(c2[185],simde_mm_xor_si64(c2[923],simde_mm_xor_si64(c2[743],simde_mm_xor_si64(c2[744],simde_mm_xor_si64(c2[386],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[1304],simde_mm_xor_si64(c2[584],simde_mm_xor_si64(c2[1140],simde_mm_xor_si64(c2[960],simde_mm_xor_si64(c2[59],simde_mm_xor_si64(c2[1138],simde_mm_xor_si64(c2[438],simde_mm_xor_si64(c2[79],simde_mm_xor_si64(c2[77],simde_mm_xor_si64(c2[811],simde_mm_xor_si64(c2[1172],simde_mm_xor_si64(c2[1355],simde_mm_xor_si64(c2[1195],simde_mm_xor_si64(c2[1015],simde_mm_xor_si64(c2[1191],simde_mm_xor_si64(c2[650],simde_mm_xor_si64(c2[851],simde_mm_xor_si64(c2[674],simde_mm_xor_si64(c2[1405],simde_mm_xor_si64(c2[331],simde_mm_xor_si64(c2[504],simde_mm_xor_si64(c2[522],simde_mm_xor_si64(c2[342],simde_mm_xor_si64(c2[890],c2[345]))))))))))))))))))))))))))))))))));
   }
 }
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc80_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc80_byte.c
index de7b5b06ef7f19c4905f966f36404c48a1c30c43..8512156d24a217dcff1658cbb0dcd2bccb61c182 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc80_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc80_byte.c
@@ -1,9 +1,8 @@
 #include "PHY/sse_intrin.h"
 // generated code for Zc=80, byte encoding
 static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG2_Zc80_byte(uint8_t *c,uint8_t *d) {
-  __m128i *csimd=(__m128i *)c,*dsimd=(__m128i *)d;
-
-  __m128i *c2,*d2;
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
+  simde__m128i *c2,*d2;
 
   int i2;
   for (i2=0; i2<5; i2++) {
@@ -11,129 +10,129 @@ static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG
      d2=&dsimd[i2];
 
 //row: 0
-     d2[0]=_mm_xor_si128(c2[1504],_mm_xor_si128(c2[803],_mm_xor_si128(c2[301],_mm_xor_si128(c2[1514],_mm_xor_si128(c2[1310],_mm_xor_si128(c2[512],_mm_xor_si128(c2[1524],_mm_xor_si128(c2[1420],_mm_xor_si128(c2[1534],_mm_xor_si128(c2[1132],_mm_xor_si128(c2[231],_mm_xor_si128(c2[1544],_mm_xor_si128(c2[840],_mm_xor_si128(c2[541],_mm_xor_si128(c2[1554],_mm_xor_si128(c2[154],_mm_xor_si128(c2[1564],_mm_xor_si128(c2[760],_mm_xor_si128(c2[1160],_mm_xor_si128(c2[1574],_mm_xor_si128(c2[873],_mm_xor_si128(c2[1584],_mm_xor_si128(c2[1181],_mm_xor_si128(c2[483],_mm_xor_si128(c2[1594],_mm_xor_si128(c2[693],c2[1392]))))))))))))))))))))))))));
+     d2[0]=simde_mm_xor_si128(c2[1504],simde_mm_xor_si128(c2[803],simde_mm_xor_si128(c2[301],simde_mm_xor_si128(c2[1514],simde_mm_xor_si128(c2[1310],simde_mm_xor_si128(c2[512],simde_mm_xor_si128(c2[1524],simde_mm_xor_si128(c2[1420],simde_mm_xor_si128(c2[1534],simde_mm_xor_si128(c2[1132],simde_mm_xor_si128(c2[231],simde_mm_xor_si128(c2[1544],simde_mm_xor_si128(c2[840],simde_mm_xor_si128(c2[541],simde_mm_xor_si128(c2[1554],simde_mm_xor_si128(c2[154],simde_mm_xor_si128(c2[1564],simde_mm_xor_si128(c2[760],simde_mm_xor_si128(c2[1160],simde_mm_xor_si128(c2[1574],simde_mm_xor_si128(c2[873],simde_mm_xor_si128(c2[1584],simde_mm_xor_si128(c2[1181],simde_mm_xor_si128(c2[483],simde_mm_xor_si128(c2[1594],simde_mm_xor_si128(c2[693],c2[1392]))))))))))))))))))))))))));
 
 //row: 1
-     d2[5]=_mm_xor_si128(c2[0],_mm_xor_si128(c2[1504],_mm_xor_si128(c2[803],_mm_xor_si128(c2[301],_mm_xor_si128(c2[10],_mm_xor_si128(c2[1514],_mm_xor_si128(c2[1310],_mm_xor_si128(c2[512],_mm_xor_si128(c2[20],_mm_xor_si128(c2[1524],_mm_xor_si128(c2[1420],_mm_xor_si128(c2[30],_mm_xor_si128(c2[1534],_mm_xor_si128(c2[1132],_mm_xor_si128(c2[231],_mm_xor_si128(c2[1544],_mm_xor_si128(c2[840],_mm_xor_si128(c2[541],_mm_xor_si128(c2[1554],_mm_xor_si128(c2[154],_mm_xor_si128(c2[60],_mm_xor_si128(c2[1564],_mm_xor_si128(c2[760],_mm_xor_si128(c2[1160],_mm_xor_si128(c2[1574],_mm_xor_si128(c2[873],_mm_xor_si128(c2[1584],_mm_xor_si128(c2[1181],_mm_xor_si128(c2[483],_mm_xor_si128(c2[90],_mm_xor_si128(c2[1594],_mm_xor_si128(c2[693],c2[1392]))))))))))))))))))))))))))))))));
+     d2[5]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[1504],simde_mm_xor_si128(c2[803],simde_mm_xor_si128(c2[301],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[1514],simde_mm_xor_si128(c2[1310],simde_mm_xor_si128(c2[512],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[1524],simde_mm_xor_si128(c2[1420],simde_mm_xor_si128(c2[30],simde_mm_xor_si128(c2[1534],simde_mm_xor_si128(c2[1132],simde_mm_xor_si128(c2[231],simde_mm_xor_si128(c2[1544],simde_mm_xor_si128(c2[840],simde_mm_xor_si128(c2[541],simde_mm_xor_si128(c2[1554],simde_mm_xor_si128(c2[154],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[1564],simde_mm_xor_si128(c2[760],simde_mm_xor_si128(c2[1160],simde_mm_xor_si128(c2[1574],simde_mm_xor_si128(c2[873],simde_mm_xor_si128(c2[1584],simde_mm_xor_si128(c2[1181],simde_mm_xor_si128(c2[483],simde_mm_xor_si128(c2[90],simde_mm_xor_si128(c2[1594],simde_mm_xor_si128(c2[693],c2[1392]))))))))))))))))))))))))))))))));
 
 //row: 2
-     d2[10]=_mm_xor_si128(c2[0],_mm_xor_si128(c2[1504],_mm_xor_si128(c2[903],_mm_xor_si128(c2[803],_mm_xor_si128(c2[301],_mm_xor_si128(c2[10],_mm_xor_si128(c2[1514],_mm_xor_si128(c2[1310],_mm_xor_si128(c2[512],_mm_xor_si128(c2[20],_mm_xor_si128(c2[1524],_mm_xor_si128(c2[1420],_mm_xor_si128(c2[30],_mm_xor_si128(c2[1534],_mm_xor_si128(c2[1232],_mm_xor_si128(c2[1132],_mm_xor_si128(c2[231],_mm_xor_si128(c2[40],_mm_xor_si128(c2[1544],_mm_xor_si128(c2[840],_mm_xor_si128(c2[541],_mm_xor_si128(c2[50],_mm_xor_si128(c2[1554],_mm_xor_si128(c2[154],_mm_xor_si128(c2[60],_mm_xor_si128(c2[1564],_mm_xor_si128(c2[860],_mm_xor_si128(c2[760],_mm_xor_si128(c2[1160],_mm_xor_si128(c2[70],_mm_xor_si128(c2[1574],_mm_xor_si128(c2[873],_mm_xor_si128(c2[80],_mm_xor_si128(c2[1584],_mm_xor_si128(c2[1181],_mm_xor_si128(c2[483],_mm_xor_si128(c2[90],_mm_xor_si128(c2[1594],_mm_xor_si128(c2[793],_mm_xor_si128(c2[693],c2[1392]))))))))))))))))))))))))))))))))))))))));
+     d2[10]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[1504],simde_mm_xor_si128(c2[903],simde_mm_xor_si128(c2[803],simde_mm_xor_si128(c2[301],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[1514],simde_mm_xor_si128(c2[1310],simde_mm_xor_si128(c2[512],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[1524],simde_mm_xor_si128(c2[1420],simde_mm_xor_si128(c2[30],simde_mm_xor_si128(c2[1534],simde_mm_xor_si128(c2[1232],simde_mm_xor_si128(c2[1132],simde_mm_xor_si128(c2[231],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[1544],simde_mm_xor_si128(c2[840],simde_mm_xor_si128(c2[541],simde_mm_xor_si128(c2[50],simde_mm_xor_si128(c2[1554],simde_mm_xor_si128(c2[154],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[1564],simde_mm_xor_si128(c2[860],simde_mm_xor_si128(c2[760],simde_mm_xor_si128(c2[1160],simde_mm_xor_si128(c2[70],simde_mm_xor_si128(c2[1574],simde_mm_xor_si128(c2[873],simde_mm_xor_si128(c2[80],simde_mm_xor_si128(c2[1584],simde_mm_xor_si128(c2[1181],simde_mm_xor_si128(c2[483],simde_mm_xor_si128(c2[90],simde_mm_xor_si128(c2[1594],simde_mm_xor_si128(c2[793],simde_mm_xor_si128(c2[693],c2[1392]))))))))))))))))))))))))))))))))))))))));
 
 //row: 3
-     d2[15]=_mm_xor_si128(c2[1504],_mm_xor_si128(c2[803],_mm_xor_si128(c2[301],_mm_xor_si128(c2[1514],_mm_xor_si128(c2[1310],_mm_xor_si128(c2[612],_mm_xor_si128(c2[512],_mm_xor_si128(c2[1524],_mm_xor_si128(c2[1520],_mm_xor_si128(c2[1420],_mm_xor_si128(c2[1534],_mm_xor_si128(c2[1132],_mm_xor_si128(c2[231],_mm_xor_si128(c2[1544],_mm_xor_si128(c2[840],_mm_xor_si128(c2[641],_mm_xor_si128(c2[541],_mm_xor_si128(c2[1554],_mm_xor_si128(c2[254],_mm_xor_si128(c2[154],_mm_xor_si128(c2[1564],_mm_xor_si128(c2[760],_mm_xor_si128(c2[1260],_mm_xor_si128(c2[1160],_mm_xor_si128(c2[1574],_mm_xor_si128(c2[973],_mm_xor_si128(c2[873],_mm_xor_si128(c2[1584],_mm_xor_si128(c2[1181],_mm_xor_si128(c2[583],_mm_xor_si128(c2[483],_mm_xor_si128(c2[1594],_mm_xor_si128(c2[693],_mm_xor_si128(c2[1492],c2[1392]))))))))))))))))))))))))))))))))));
+     d2[15]=simde_mm_xor_si128(c2[1504],simde_mm_xor_si128(c2[803],simde_mm_xor_si128(c2[301],simde_mm_xor_si128(c2[1514],simde_mm_xor_si128(c2[1310],simde_mm_xor_si128(c2[612],simde_mm_xor_si128(c2[512],simde_mm_xor_si128(c2[1524],simde_mm_xor_si128(c2[1520],simde_mm_xor_si128(c2[1420],simde_mm_xor_si128(c2[1534],simde_mm_xor_si128(c2[1132],simde_mm_xor_si128(c2[231],simde_mm_xor_si128(c2[1544],simde_mm_xor_si128(c2[840],simde_mm_xor_si128(c2[641],simde_mm_xor_si128(c2[541],simde_mm_xor_si128(c2[1554],simde_mm_xor_si128(c2[254],simde_mm_xor_si128(c2[154],simde_mm_xor_si128(c2[1564],simde_mm_xor_si128(c2[760],simde_mm_xor_si128(c2[1260],simde_mm_xor_si128(c2[1160],simde_mm_xor_si128(c2[1574],simde_mm_xor_si128(c2[973],simde_mm_xor_si128(c2[873],simde_mm_xor_si128(c2[1584],simde_mm_xor_si128(c2[1181],simde_mm_xor_si128(c2[583],simde_mm_xor_si128(c2[483],simde_mm_xor_si128(c2[1594],simde_mm_xor_si128(c2[693],simde_mm_xor_si128(c2[1492],c2[1392]))))))))))))))))))))))))))))))))));
 
 //row: 4
-     d2[20]=_mm_xor_si128(c2[1304],_mm_xor_si128(c2[1204],_mm_xor_si128(c2[503],_mm_xor_si128(c2[1],_mm_xor_si128(c2[0],_mm_xor_si128(c2[1314],_mm_xor_si128(c2[1214],_mm_xor_si128(c2[1010],_mm_xor_si128(c2[212],_mm_xor_si128(c2[813],_mm_xor_si128(c2[1324],_mm_xor_si128(c2[1224],_mm_xor_si128(c2[1120],_mm_xor_si128(c2[1334],_mm_xor_si128(c2[1234],_mm_xor_si128(c2[832],_mm_xor_si128(c2[1530],_mm_xor_si128(c2[1244],_mm_xor_si128(c2[540],_mm_xor_si128(c2[241],_mm_xor_si128(c2[1254],_mm_xor_si128(c2[1453],_mm_xor_si128(c2[1364],_mm_xor_si128(c2[1264],_mm_xor_si128(c2[460],_mm_xor_si128(c2[860],_mm_xor_si128(c2[1274],_mm_xor_si128(c2[573],_mm_xor_si128(c2[1284],_mm_xor_si128(c2[881],_mm_xor_si128(c2[183],_mm_xor_si128(c2[1394],_mm_xor_si128(c2[1294],_mm_xor_si128(c2[393],c2[1092]))))))))))))))))))))))))))))))))));
+     d2[20]=simde_mm_xor_si128(c2[1304],simde_mm_xor_si128(c2[1204],simde_mm_xor_si128(c2[503],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[1314],simde_mm_xor_si128(c2[1214],simde_mm_xor_si128(c2[1010],simde_mm_xor_si128(c2[212],simde_mm_xor_si128(c2[813],simde_mm_xor_si128(c2[1324],simde_mm_xor_si128(c2[1224],simde_mm_xor_si128(c2[1120],simde_mm_xor_si128(c2[1334],simde_mm_xor_si128(c2[1234],simde_mm_xor_si128(c2[832],simde_mm_xor_si128(c2[1530],simde_mm_xor_si128(c2[1244],simde_mm_xor_si128(c2[540],simde_mm_xor_si128(c2[241],simde_mm_xor_si128(c2[1254],simde_mm_xor_si128(c2[1453],simde_mm_xor_si128(c2[1364],simde_mm_xor_si128(c2[1264],simde_mm_xor_si128(c2[460],simde_mm_xor_si128(c2[860],simde_mm_xor_si128(c2[1274],simde_mm_xor_si128(c2[573],simde_mm_xor_si128(c2[1284],simde_mm_xor_si128(c2[881],simde_mm_xor_si128(c2[183],simde_mm_xor_si128(c2[1394],simde_mm_xor_si128(c2[1294],simde_mm_xor_si128(c2[393],c2[1092]))))))))))))))))))))))))))))))))));
 
 //row: 5
-     d2[25]=_mm_xor_si128(c2[4],_mm_xor_si128(c2[1503],_mm_xor_si128(c2[802],_mm_xor_si128(c2[300],_mm_xor_si128(c2[0],_mm_xor_si128(c2[14],_mm_xor_si128(c2[1513],_mm_xor_si128(c2[1314],_mm_xor_si128(c2[511],_mm_xor_si128(c2[313],_mm_xor_si128(c2[24],_mm_xor_si128(c2[1523],_mm_xor_si128(c2[1424],_mm_xor_si128(c2[34],_mm_xor_si128(c2[1533],_mm_xor_si128(c2[1131],_mm_xor_si128(c2[230],_mm_xor_si128(c2[1543],_mm_xor_si128(c2[844],_mm_xor_si128(c2[540],_mm_xor_si128(c2[1553],_mm_xor_si128(c2[153],_mm_xor_si128(c2[1453],_mm_xor_si128(c2[64],_mm_xor_si128(c2[1563],_mm_xor_si128(c2[764],_mm_xor_si128(c2[1164],_mm_xor_si128(c2[1573],_mm_xor_si128(c2[872],_mm_xor_si128(c2[1373],_mm_xor_si128(c2[1583],_mm_xor_si128(c2[1180],_mm_xor_si128(c2[482],_mm_xor_si128(c2[94],_mm_xor_si128(c2[1593],_mm_xor_si128(c2[692],c2[1391]))))))))))))))))))))))))))))))))))));
+     d2[25]=simde_mm_xor_si128(c2[4],simde_mm_xor_si128(c2[1503],simde_mm_xor_si128(c2[802],simde_mm_xor_si128(c2[300],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[1513],simde_mm_xor_si128(c2[1314],simde_mm_xor_si128(c2[511],simde_mm_xor_si128(c2[313],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[1523],simde_mm_xor_si128(c2[1424],simde_mm_xor_si128(c2[34],simde_mm_xor_si128(c2[1533],simde_mm_xor_si128(c2[1131],simde_mm_xor_si128(c2[230],simde_mm_xor_si128(c2[1543],simde_mm_xor_si128(c2[844],simde_mm_xor_si128(c2[540],simde_mm_xor_si128(c2[1553],simde_mm_xor_si128(c2[153],simde_mm_xor_si128(c2[1453],simde_mm_xor_si128(c2[64],simde_mm_xor_si128(c2[1563],simde_mm_xor_si128(c2[764],simde_mm_xor_si128(c2[1164],simde_mm_xor_si128(c2[1573],simde_mm_xor_si128(c2[872],simde_mm_xor_si128(c2[1373],simde_mm_xor_si128(c2[1583],simde_mm_xor_si128(c2[1180],simde_mm_xor_si128(c2[482],simde_mm_xor_si128(c2[94],simde_mm_xor_si128(c2[1593],simde_mm_xor_si128(c2[692],c2[1391]))))))))))))))))))))))))))))))))))));
 
 //row: 6
-     d2[30]=_mm_xor_si128(c2[404],_mm_xor_si128(c2[304],_mm_xor_si128(c2[1202],_mm_xor_si128(c2[700],_mm_xor_si128(c2[0],_mm_xor_si128(c2[414],_mm_xor_si128(c2[314],_mm_xor_si128(c2[110],_mm_xor_si128(c2[911],_mm_xor_si128(c2[424],_mm_xor_si128(c2[324],_mm_xor_si128(c2[220],_mm_xor_si128(c2[434],_mm_xor_si128(c2[334],_mm_xor_si128(c2[1531],_mm_xor_si128(c2[630],_mm_xor_si128(c2[344],_mm_xor_si128(c2[1244],_mm_xor_si128(c2[940],_mm_xor_si128(c2[354],_mm_xor_si128(c2[553],_mm_xor_si128(c2[1252],_mm_xor_si128(c2[464],_mm_xor_si128(c2[364],_mm_xor_si128(c2[1164],_mm_xor_si128(c2[1564],_mm_xor_si128(c2[374],_mm_xor_si128(c2[1272],_mm_xor_si128(c2[371],_mm_xor_si128(c2[384],_mm_xor_si128(c2[1580],_mm_xor_si128(c2[882],_mm_xor_si128(c2[494],_mm_xor_si128(c2[394],_mm_xor_si128(c2[1092],_mm_xor_si128(c2[192],c2[1392]))))))))))))))))))))))))))))))))))));
+     d2[30]=simde_mm_xor_si128(c2[404],simde_mm_xor_si128(c2[304],simde_mm_xor_si128(c2[1202],simde_mm_xor_si128(c2[700],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[414],simde_mm_xor_si128(c2[314],simde_mm_xor_si128(c2[110],simde_mm_xor_si128(c2[911],simde_mm_xor_si128(c2[424],simde_mm_xor_si128(c2[324],simde_mm_xor_si128(c2[220],simde_mm_xor_si128(c2[434],simde_mm_xor_si128(c2[334],simde_mm_xor_si128(c2[1531],simde_mm_xor_si128(c2[630],simde_mm_xor_si128(c2[344],simde_mm_xor_si128(c2[1244],simde_mm_xor_si128(c2[940],simde_mm_xor_si128(c2[354],simde_mm_xor_si128(c2[553],simde_mm_xor_si128(c2[1252],simde_mm_xor_si128(c2[464],simde_mm_xor_si128(c2[364],simde_mm_xor_si128(c2[1164],simde_mm_xor_si128(c2[1564],simde_mm_xor_si128(c2[374],simde_mm_xor_si128(c2[1272],simde_mm_xor_si128(c2[371],simde_mm_xor_si128(c2[384],simde_mm_xor_si128(c2[1580],simde_mm_xor_si128(c2[882],simde_mm_xor_si128(c2[494],simde_mm_xor_si128(c2[394],simde_mm_xor_si128(c2[1092],simde_mm_xor_si128(c2[192],c2[1392]))))))))))))))))))))))))))))))))))));
 
 //row: 7
-     d2[35]=_mm_xor_si128(c2[1],_mm_xor_si128(c2[1500],_mm_xor_si128(c2[1304],_mm_xor_si128(c2[804],_mm_xor_si128(c2[603],_mm_xor_si128(c2[302],_mm_xor_si128(c2[101],_mm_xor_si128(c2[11],_mm_xor_si128(c2[1510],_mm_xor_si128(c2[1314],_mm_xor_si128(c2[1311],_mm_xor_si128(c2[1110],_mm_xor_si128(c2[513],_mm_xor_si128(c2[412],_mm_xor_si128(c2[312],_mm_xor_si128(c2[10],_mm_xor_si128(c2[21],_mm_xor_si128(c2[1520],_mm_xor_si128(c2[1324],_mm_xor_si128(c2[1421],_mm_xor_si128(c2[1320],_mm_xor_si128(c2[1220],_mm_xor_si128(c2[31],_mm_xor_si128(c2[1530],_mm_xor_si128(c2[1334],_mm_xor_si128(c2[1133],_mm_xor_si128(c2[932],_mm_xor_si128(c2[232],_mm_xor_si128(c2[31],_mm_xor_si128(c2[1540],_mm_xor_si128(c2[1344],_mm_xor_si128(c2[841],_mm_xor_si128(c2[640],_mm_xor_si128(c2[542],_mm_xor_si128(c2[441],_mm_xor_si128(c2[341],_mm_xor_si128(c2[1550],_mm_xor_si128(c2[1354],_mm_xor_si128(c2[150],_mm_xor_si128(c2[54],_mm_xor_si128(c2[1553],_mm_xor_si128(c2[1352],_mm_xor_si128(c2[61],_mm_xor_si128(c2[1560],_mm_xor_si128(c2[1364],_mm_xor_si128(c2[761],_mm_xor_si128(c2[560],_mm_xor_si128(c2[1161],_mm_xor_si128(c2[1060],_mm_xor_si128(c2[960],_mm_xor_si128(c2[1570],_mm_xor_si128(c2[1374],_mm_xor_si128(c2[874],_mm_xor_si128(c2[773],_mm_xor_si128(c2[673],_mm_xor_si128(c2[474],_mm_xor_si128(c2[1580],_mm_xor_si128(c2[1384],_mm_xor_si128(c2[1182],_mm_xor_si128(c2[981],_mm_xor_si128(c2[484],_mm_xor_si128(c2[383],_mm_xor_si128(c2[283],_mm_xor_si128(c2[91],_mm_xor_si128(c2[1590],_mm_xor_si128(c2[1394],_mm_xor_si128(c2[694],_mm_xor_si128(c2[493],_mm_xor_si128(c2[1393],_mm_xor_si128(c2[1292],c2[1192]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[35]=simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[1500],simde_mm_xor_si128(c2[1304],simde_mm_xor_si128(c2[804],simde_mm_xor_si128(c2[603],simde_mm_xor_si128(c2[302],simde_mm_xor_si128(c2[101],simde_mm_xor_si128(c2[11],simde_mm_xor_si128(c2[1510],simde_mm_xor_si128(c2[1314],simde_mm_xor_si128(c2[1311],simde_mm_xor_si128(c2[1110],simde_mm_xor_si128(c2[513],simde_mm_xor_si128(c2[412],simde_mm_xor_si128(c2[312],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[21],simde_mm_xor_si128(c2[1520],simde_mm_xor_si128(c2[1324],simde_mm_xor_si128(c2[1421],simde_mm_xor_si128(c2[1320],simde_mm_xor_si128(c2[1220],simde_mm_xor_si128(c2[31],simde_mm_xor_si128(c2[1530],simde_mm_xor_si128(c2[1334],simde_mm_xor_si128(c2[1133],simde_mm_xor_si128(c2[932],simde_mm_xor_si128(c2[232],simde_mm_xor_si128(c2[31],simde_mm_xor_si128(c2[1540],simde_mm_xor_si128(c2[1344],simde_mm_xor_si128(c2[841],simde_mm_xor_si128(c2[640],simde_mm_xor_si128(c2[542],simde_mm_xor_si128(c2[441],simde_mm_xor_si128(c2[341],simde_mm_xor_si128(c2[1550],simde_mm_xor_si128(c2[1354],simde_mm_xor_si128(c2[150],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[1553],simde_mm_xor_si128(c2[1352],simde_mm_xor_si128(c2[61],simde_mm_xor_si128(c2[1560],simde_mm_xor_si128(c2[1364],simde_mm_xor_si128(c2[761],simde_mm_xor_si128(c2[560],simde_mm_xor_si128(c2[1161],simde_mm_xor_si128(c2[1060],simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[1570],simde_mm_xor_si128(c2[1374],simde_mm_xor_si128(c2[874],simde_mm_xor_si128(c2[773],simde_mm_xor_si128(c2[673],simde_mm_xor_si128(c2[474],simde_mm_xor_si128(c2[1580],simde_mm_xor_si128(c2[1384],simde_mm_xor_si128(c2[1182],simde_mm_xor_si128(c2[981],simde_mm_xor_si128(c2[484],simde_mm_xor_si128(c2[383],simde_mm_xor_si128(c2[283],simde_mm_xor_si128(c2[91],simde_mm_xor_si128(c2[1590],simde_mm_xor_si128(c2[1394],simde_mm_xor_si128(c2[694],simde_mm_xor_si128(c2[493],simde_mm_xor_si128(c2[1393],simde_mm_xor_si128(c2[1292],c2[1192]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 8
-     d2[40]=_mm_xor_si128(c2[700],_mm_xor_si128(c2[600],_mm_xor_si128(c2[4],_mm_xor_si128(c2[1503],_mm_xor_si128(c2[1001],_mm_xor_si128(c2[0],_mm_xor_si128(c2[710],_mm_xor_si128(c2[610],_mm_xor_si128(c2[411],_mm_xor_si128(c2[1212],_mm_xor_si128(c2[114],_mm_xor_si128(c2[720],_mm_xor_si128(c2[620],_mm_xor_si128(c2[521],_mm_xor_si128(c2[730],_mm_xor_si128(c2[630],_mm_xor_si128(c2[333],_mm_xor_si128(c2[233],_mm_xor_si128(c2[931],_mm_xor_si128(c2[740],_mm_xor_si128(c2[640],_mm_xor_si128(c2[1540],_mm_xor_si128(c2[1241],_mm_xor_si128(c2[750],_mm_xor_si128(c2[650],_mm_xor_si128(c2[854],_mm_xor_si128(c2[760],_mm_xor_si128(c2[660],_mm_xor_si128(c2[1560],_mm_xor_si128(c2[1460],_mm_xor_si128(c2[261],_mm_xor_si128(c2[770],_mm_xor_si128(c2[670],_mm_xor_si128(c2[1573],_mm_xor_si128(c2[780],_mm_xor_si128(c2[680],_mm_xor_si128(c2[282],_mm_xor_si128(c2[1183],_mm_xor_si128(c2[790],_mm_xor_si128(c2[690],_mm_xor_si128(c2[1493],_mm_xor_si128(c2[1393],c2[493]))))))))))))))))))))))))))))))))))))))))));
+     d2[40]=simde_mm_xor_si128(c2[700],simde_mm_xor_si128(c2[600],simde_mm_xor_si128(c2[4],simde_mm_xor_si128(c2[1503],simde_mm_xor_si128(c2[1001],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[710],simde_mm_xor_si128(c2[610],simde_mm_xor_si128(c2[411],simde_mm_xor_si128(c2[1212],simde_mm_xor_si128(c2[114],simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[620],simde_mm_xor_si128(c2[521],simde_mm_xor_si128(c2[730],simde_mm_xor_si128(c2[630],simde_mm_xor_si128(c2[333],simde_mm_xor_si128(c2[233],simde_mm_xor_si128(c2[931],simde_mm_xor_si128(c2[740],simde_mm_xor_si128(c2[640],simde_mm_xor_si128(c2[1540],simde_mm_xor_si128(c2[1241],simde_mm_xor_si128(c2[750],simde_mm_xor_si128(c2[650],simde_mm_xor_si128(c2[854],simde_mm_xor_si128(c2[760],simde_mm_xor_si128(c2[660],simde_mm_xor_si128(c2[1560],simde_mm_xor_si128(c2[1460],simde_mm_xor_si128(c2[261],simde_mm_xor_si128(c2[770],simde_mm_xor_si128(c2[670],simde_mm_xor_si128(c2[1573],simde_mm_xor_si128(c2[780],simde_mm_xor_si128(c2[680],simde_mm_xor_si128(c2[282],simde_mm_xor_si128(c2[1183],simde_mm_xor_si128(c2[790],simde_mm_xor_si128(c2[690],simde_mm_xor_si128(c2[1493],simde_mm_xor_si128(c2[1393],c2[493]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 9
-     d2[45]=_mm_xor_si128(c2[203],_mm_xor_si128(c2[500],_mm_xor_si128(c2[400],_mm_xor_si128(c2[1101],_mm_xor_si128(c2[1303],_mm_xor_si128(c2[604],_mm_xor_si128(c2[801],_mm_xor_si128(c2[213],_mm_xor_si128(c2[510],_mm_xor_si128(c2[410],_mm_xor_si128(c2[14],_mm_xor_si128(c2[211],_mm_xor_si128(c2[810],_mm_xor_si128(c2[1012],_mm_xor_si128(c2[10],_mm_xor_si128(c2[223],_mm_xor_si128(c2[520],_mm_xor_si128(c2[420],_mm_xor_si128(c2[124],_mm_xor_si128(c2[321],_mm_xor_si128(c2[233],_mm_xor_si128(c2[530],_mm_xor_si128(c2[430],_mm_xor_si128(c2[1430],_mm_xor_si128(c2[33],_mm_xor_si128(c2[534],_mm_xor_si128(c2[731],_mm_xor_si128(c2[243],_mm_xor_si128(c2[440],_mm_xor_si128(c2[1143],_mm_xor_si128(c2[1340],_mm_xor_si128(c2[844],_mm_xor_si128(c2[1041],_mm_xor_si128(c2[253],_mm_xor_si128(c2[450],_mm_xor_si128(c2[452],_mm_xor_si128(c2[654],_mm_xor_si128(c2[263],_mm_xor_si128(c2[560],_mm_xor_si128(c2[460],_mm_xor_si128(c2[1063],_mm_xor_si128(c2[1260],_mm_xor_si128(c2[1463],_mm_xor_si128(c2[61],_mm_xor_si128(c2[273],_mm_xor_si128(c2[470],_mm_xor_si128(c2[1171],_mm_xor_si128(c2[1373],_mm_xor_si128(c2[283],_mm_xor_si128(c2[480],_mm_xor_si128(c2[1484],_mm_xor_si128(c2[82],_mm_xor_si128(c2[781],_mm_xor_si128(c2[983],_mm_xor_si128(c2[181],_mm_xor_si128(c2[293],_mm_xor_si128(c2[590],_mm_xor_si128(c2[490],_mm_xor_si128(c2[991],_mm_xor_si128(c2[1193],_mm_xor_si128(c2[91],c2[293])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[45]=simde_mm_xor_si128(c2[203],simde_mm_xor_si128(c2[500],simde_mm_xor_si128(c2[400],simde_mm_xor_si128(c2[1101],simde_mm_xor_si128(c2[1303],simde_mm_xor_si128(c2[604],simde_mm_xor_si128(c2[801],simde_mm_xor_si128(c2[213],simde_mm_xor_si128(c2[510],simde_mm_xor_si128(c2[410],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[211],simde_mm_xor_si128(c2[810],simde_mm_xor_si128(c2[1012],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[223],simde_mm_xor_si128(c2[520],simde_mm_xor_si128(c2[420],simde_mm_xor_si128(c2[124],simde_mm_xor_si128(c2[321],simde_mm_xor_si128(c2[233],simde_mm_xor_si128(c2[530],simde_mm_xor_si128(c2[430],simde_mm_xor_si128(c2[1430],simde_mm_xor_si128(c2[33],simde_mm_xor_si128(c2[534],simde_mm_xor_si128(c2[731],simde_mm_xor_si128(c2[243],simde_mm_xor_si128(c2[440],simde_mm_xor_si128(c2[1143],simde_mm_xor_si128(c2[1340],simde_mm_xor_si128(c2[844],simde_mm_xor_si128(c2[1041],simde_mm_xor_si128(c2[253],simde_mm_xor_si128(c2[450],simde_mm_xor_si128(c2[452],simde_mm_xor_si128(c2[654],simde_mm_xor_si128(c2[263],simde_mm_xor_si128(c2[560],simde_mm_xor_si128(c2[460],simde_mm_xor_si128(c2[1063],simde_mm_xor_si128(c2[1260],simde_mm_xor_si128(c2[1463],simde_mm_xor_si128(c2[61],simde_mm_xor_si128(c2[273],simde_mm_xor_si128(c2[470],simde_mm_xor_si128(c2[1171],simde_mm_xor_si128(c2[1373],simde_mm_xor_si128(c2[283],simde_mm_xor_si128(c2[480],simde_mm_xor_si128(c2[1484],simde_mm_xor_si128(c2[82],simde_mm_xor_si128(c2[781],simde_mm_xor_si128(c2[983],simde_mm_xor_si128(c2[181],simde_mm_xor_si128(c2[293],simde_mm_xor_si128(c2[590],simde_mm_xor_si128(c2[490],simde_mm_xor_si128(c2[991],simde_mm_xor_si128(c2[1193],simde_mm_xor_si128(c2[91],c2[293])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 10
-     d2[50]=_mm_xor_si128(c2[0],_mm_xor_si128(c2[111],_mm_xor_si128(c2[1264],c2[471])));
+     d2[50]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[111],simde_mm_xor_si128(c2[1264],c2[471])));
 
 //row: 11
-     d2[55]=_mm_xor_si128(c2[100],_mm_xor_si128(c2[1003],_mm_xor_si128(c2[501],_mm_xor_si128(c2[0],_mm_xor_si128(c2[110],_mm_xor_si128(c2[1510],_mm_xor_si128(c2[812],_mm_xor_si128(c2[712],_mm_xor_si128(c2[120],_mm_xor_si128(c2[121],_mm_xor_si128(c2[21],_mm_xor_si128(c2[130],_mm_xor_si128(c2[1332],_mm_xor_si128(c2[431],_mm_xor_si128(c2[140],_mm_xor_si128(c2[1040],_mm_xor_si128(c2[841],_mm_xor_si128(c2[741],_mm_xor_si128(c2[150],_mm_xor_si128(c2[454],_mm_xor_si128(c2[354],_mm_xor_si128(c2[160],_mm_xor_si128(c2[960],_mm_xor_si128(c2[1460],_mm_xor_si128(c2[1360],_mm_xor_si128(c2[170],_mm_xor_si128(c2[1173],_mm_xor_si128(c2[1073],_mm_xor_si128(c2[770],_mm_xor_si128(c2[180],_mm_xor_si128(c2[1381],_mm_xor_si128(c2[783],_mm_xor_si128(c2[683],_mm_xor_si128(c2[190],_mm_xor_si128(c2[893],_mm_xor_si128(c2[93],_mm_xor_si128(c2[1592],c2[490])))))))))))))))))))))))))))))))))))));
+     d2[55]=simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[1003],simde_mm_xor_si128(c2[501],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[110],simde_mm_xor_si128(c2[1510],simde_mm_xor_si128(c2[812],simde_mm_xor_si128(c2[712],simde_mm_xor_si128(c2[120],simde_mm_xor_si128(c2[121],simde_mm_xor_si128(c2[21],simde_mm_xor_si128(c2[130],simde_mm_xor_si128(c2[1332],simde_mm_xor_si128(c2[431],simde_mm_xor_si128(c2[140],simde_mm_xor_si128(c2[1040],simde_mm_xor_si128(c2[841],simde_mm_xor_si128(c2[741],simde_mm_xor_si128(c2[150],simde_mm_xor_si128(c2[454],simde_mm_xor_si128(c2[354],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[1460],simde_mm_xor_si128(c2[1360],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[1173],simde_mm_xor_si128(c2[1073],simde_mm_xor_si128(c2[770],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[1381],simde_mm_xor_si128(c2[783],simde_mm_xor_si128(c2[683],simde_mm_xor_si128(c2[190],simde_mm_xor_si128(c2[893],simde_mm_xor_si128(c2[93],simde_mm_xor_si128(c2[1592],c2[490])))))))))))))))))))))))))))))))))))));
 
 //row: 12
-     d2[60]=_mm_xor_si128(c2[3],_mm_xor_si128(c2[1502],_mm_xor_si128(c2[801],_mm_xor_si128(c2[304],_mm_xor_si128(c2[13],_mm_xor_si128(c2[1512],_mm_xor_si128(c2[1313],_mm_xor_si128(c2[510],_mm_xor_si128(c2[10],_mm_xor_si128(c2[23],_mm_xor_si128(c2[1522],_mm_xor_si128(c2[1423],_mm_xor_si128(c2[33],_mm_xor_si128(c2[1532],_mm_xor_si128(c2[1130],_mm_xor_si128(c2[234],_mm_xor_si128(c2[132],_mm_xor_si128(c2[1542],_mm_xor_si128(c2[843],_mm_xor_si128(c2[544],_mm_xor_si128(c2[1552],_mm_xor_si128(c2[152],_mm_xor_si128(c2[63],_mm_xor_si128(c2[1562],_mm_xor_si128(c2[763],_mm_xor_si128(c2[1163],_mm_xor_si128(c2[1572],_mm_xor_si128(c2[871],_mm_xor_si128(c2[1582],_mm_xor_si128(c2[1184],_mm_xor_si128(c2[481],_mm_xor_si128(c2[93],_mm_xor_si128(c2[1592],_mm_xor_si128(c2[691],c2[1390]))))))))))))))))))))))))))))))))));
+     d2[60]=simde_mm_xor_si128(c2[3],simde_mm_xor_si128(c2[1502],simde_mm_xor_si128(c2[801],simde_mm_xor_si128(c2[304],simde_mm_xor_si128(c2[13],simde_mm_xor_si128(c2[1512],simde_mm_xor_si128(c2[1313],simde_mm_xor_si128(c2[510],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[23],simde_mm_xor_si128(c2[1522],simde_mm_xor_si128(c2[1423],simde_mm_xor_si128(c2[33],simde_mm_xor_si128(c2[1532],simde_mm_xor_si128(c2[1130],simde_mm_xor_si128(c2[234],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[1542],simde_mm_xor_si128(c2[843],simde_mm_xor_si128(c2[544],simde_mm_xor_si128(c2[1552],simde_mm_xor_si128(c2[152],simde_mm_xor_si128(c2[63],simde_mm_xor_si128(c2[1562],simde_mm_xor_si128(c2[763],simde_mm_xor_si128(c2[1163],simde_mm_xor_si128(c2[1572],simde_mm_xor_si128(c2[871],simde_mm_xor_si128(c2[1582],simde_mm_xor_si128(c2[1184],simde_mm_xor_si128(c2[481],simde_mm_xor_si128(c2[93],simde_mm_xor_si128(c2[1592],simde_mm_xor_si128(c2[691],c2[1390]))))))))))))))))))))))))))))))))));
 
 //row: 13
-     d2[65]=_mm_xor_si128(c2[901],_mm_xor_si128(c2[200],_mm_xor_si128(c2[1302],_mm_xor_si128(c2[0],_mm_xor_si128(c2[911],_mm_xor_si128(c2[712],_mm_xor_si128(c2[14],_mm_xor_si128(c2[1513],_mm_xor_si128(c2[12],_mm_xor_si128(c2[921],_mm_xor_si128(c2[922],_mm_xor_si128(c2[822],_mm_xor_si128(c2[931],_mm_xor_si128(c2[534],_mm_xor_si128(c2[1232],_mm_xor_si128(c2[941],_mm_xor_si128(c2[242],_mm_xor_si128(c2[43],_mm_xor_si128(c2[1542],_mm_xor_si128(c2[951],_mm_xor_si128(c2[1250],_mm_xor_si128(c2[1150],_mm_xor_si128(c2[961],_mm_xor_si128(c2[162],_mm_xor_si128(c2[662],_mm_xor_si128(c2[562],_mm_xor_si128(c2[971],_mm_xor_si128(c2[370],_mm_xor_si128(c2[270],_mm_xor_si128(c2[981],_mm_xor_si128(c2[583],_mm_xor_si128(c2[1584],_mm_xor_si128(c2[1484],_mm_xor_si128(c2[681],_mm_xor_si128(c2[991],_mm_xor_si128(c2[90],_mm_xor_si128(c2[894],c2[794])))))))))))))))))))))))))))))))))))));
+     d2[65]=simde_mm_xor_si128(c2[901],simde_mm_xor_si128(c2[200],simde_mm_xor_si128(c2[1302],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[911],simde_mm_xor_si128(c2[712],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[1513],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[921],simde_mm_xor_si128(c2[922],simde_mm_xor_si128(c2[822],simde_mm_xor_si128(c2[931],simde_mm_xor_si128(c2[534],simde_mm_xor_si128(c2[1232],simde_mm_xor_si128(c2[941],simde_mm_xor_si128(c2[242],simde_mm_xor_si128(c2[43],simde_mm_xor_si128(c2[1542],simde_mm_xor_si128(c2[951],simde_mm_xor_si128(c2[1250],simde_mm_xor_si128(c2[1150],simde_mm_xor_si128(c2[961],simde_mm_xor_si128(c2[162],simde_mm_xor_si128(c2[662],simde_mm_xor_si128(c2[562],simde_mm_xor_si128(c2[971],simde_mm_xor_si128(c2[370],simde_mm_xor_si128(c2[270],simde_mm_xor_si128(c2[981],simde_mm_xor_si128(c2[583],simde_mm_xor_si128(c2[1584],simde_mm_xor_si128(c2[1484],simde_mm_xor_si128(c2[681],simde_mm_xor_si128(c2[991],simde_mm_xor_si128(c2[90],simde_mm_xor_si128(c2[894],c2[794])))))))))))))))))))))))))))))))))))));
 
 //row: 14
-     d2[70]=_mm_xor_si128(c2[903],_mm_xor_si128(c2[803],_mm_xor_si128(c2[1001],_mm_xor_si128(c2[102],_mm_xor_si128(c2[300],_mm_xor_si128(c2[1204],_mm_xor_si128(c2[1402],_mm_xor_si128(c2[913],_mm_xor_si128(c2[813],_mm_xor_si128(c2[1011],_mm_xor_si128(c2[614],_mm_xor_si128(c2[812],_mm_xor_si128(c2[1410],_mm_xor_si128(c2[114],_mm_xor_si128(c2[14],_mm_xor_si128(c2[10],_mm_xor_si128(c2[923],_mm_xor_si128(c2[823],_mm_xor_si128(c2[1021],_mm_xor_si128(c2[724],_mm_xor_si128(c2[1022],_mm_xor_si128(c2[922],_mm_xor_si128(c2[933],_mm_xor_si128(c2[833],_mm_xor_si128(c2[1031],_mm_xor_si128(c2[431],_mm_xor_si128(c2[634],_mm_xor_si128(c2[1134],_mm_xor_si128(c2[1332],_mm_xor_si128(c2[843],_mm_xor_si128(c2[1041],_mm_xor_si128(c2[144],_mm_xor_si128(c2[342],_mm_xor_si128(c2[1444],_mm_xor_si128(c2[143],_mm_xor_si128(c2[43],_mm_xor_si128(c2[853],_mm_xor_si128(c2[1051],_mm_xor_si128(c2[1052],_mm_xor_si128(c2[1350],_mm_xor_si128(c2[1250],_mm_xor_si128(c2[963],_mm_xor_si128(c2[863],_mm_xor_si128(c2[1061],_mm_xor_si128(c2[64],_mm_xor_si128(c2[262],_mm_xor_si128(c2[464],_mm_xor_si128(c2[762],_mm_xor_si128(c2[662],_mm_xor_si128(c2[1063],_mm_xor_si128(c2[873],_mm_xor_si128(c2[1071],_mm_xor_si128(c2[172],_mm_xor_si128(c2[470],_mm_xor_si128(c2[370],_mm_xor_si128(c2[883],_mm_xor_si128(c2[1081],_mm_xor_si128(c2[480],_mm_xor_si128(c2[683],_mm_xor_si128(c2[1381],_mm_xor_si128(c2[80],_mm_xor_si128(c2[1584],_mm_xor_si128(c2[993],_mm_xor_si128(c2[893],_mm_xor_si128(c2[1091],_mm_xor_si128(c2[1591],_mm_xor_si128(c2[190],_mm_xor_si128(c2[691],_mm_xor_si128(c2[994],c2[894])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[70]=simde_mm_xor_si128(c2[903],simde_mm_xor_si128(c2[803],simde_mm_xor_si128(c2[1001],simde_mm_xor_si128(c2[102],simde_mm_xor_si128(c2[300],simde_mm_xor_si128(c2[1204],simde_mm_xor_si128(c2[1402],simde_mm_xor_si128(c2[913],simde_mm_xor_si128(c2[813],simde_mm_xor_si128(c2[1011],simde_mm_xor_si128(c2[614],simde_mm_xor_si128(c2[812],simde_mm_xor_si128(c2[1410],simde_mm_xor_si128(c2[114],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[923],simde_mm_xor_si128(c2[823],simde_mm_xor_si128(c2[1021],simde_mm_xor_si128(c2[724],simde_mm_xor_si128(c2[1022],simde_mm_xor_si128(c2[922],simde_mm_xor_si128(c2[933],simde_mm_xor_si128(c2[833],simde_mm_xor_si128(c2[1031],simde_mm_xor_si128(c2[431],simde_mm_xor_si128(c2[634],simde_mm_xor_si128(c2[1134],simde_mm_xor_si128(c2[1332],simde_mm_xor_si128(c2[843],simde_mm_xor_si128(c2[1041],simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[342],simde_mm_xor_si128(c2[1444],simde_mm_xor_si128(c2[143],simde_mm_xor_si128(c2[43],simde_mm_xor_si128(c2[853],simde_mm_xor_si128(c2[1051],simde_mm_xor_si128(c2[1052],simde_mm_xor_si128(c2[1350],simde_mm_xor_si128(c2[1250],simde_mm_xor_si128(c2[963],simde_mm_xor_si128(c2[863],simde_mm_xor_si128(c2[1061],simde_mm_xor_si128(c2[64],simde_mm_xor_si128(c2[262],simde_mm_xor_si128(c2[464],simde_mm_xor_si128(c2[762],simde_mm_xor_si128(c2[662],simde_mm_xor_si128(c2[1063],simde_mm_xor_si128(c2[873],simde_mm_xor_si128(c2[1071],simde_mm_xor_si128(c2[172],simde_mm_xor_si128(c2[470],simde_mm_xor_si128(c2[370],simde_mm_xor_si128(c2[883],simde_mm_xor_si128(c2[1081],simde_mm_xor_si128(c2[480],simde_mm_xor_si128(c2[683],simde_mm_xor_si128(c2[1381],simde_mm_xor_si128(c2[80],simde_mm_xor_si128(c2[1584],simde_mm_xor_si128(c2[993],simde_mm_xor_si128(c2[893],simde_mm_xor_si128(c2[1091],simde_mm_xor_si128(c2[1591],simde_mm_xor_si128(c2[190],simde_mm_xor_si128(c2[691],simde_mm_xor_si128(c2[994],c2[894])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 15
-     d2[75]=_mm_xor_si128(c2[804],_mm_xor_si128(c2[301],_mm_xor_si128(c2[201],_mm_xor_si128(c2[103],_mm_xor_si128(c2[1104],_mm_xor_si128(c2[1200],_mm_xor_si128(c2[602],_mm_xor_si128(c2[0],_mm_xor_si128(c2[814],_mm_xor_si128(c2[311],_mm_xor_si128(c2[211],_mm_xor_si128(c2[610],_mm_xor_si128(c2[12],_mm_xor_si128(c2[1411],_mm_xor_si128(c2[813],_mm_xor_si128(c2[824],_mm_xor_si128(c2[321],_mm_xor_si128(c2[221],_mm_xor_si128(c2[720],_mm_xor_si128(c2[122],_mm_xor_si128(c2[834],_mm_xor_si128(c2[331],_mm_xor_si128(c2[231],_mm_xor_si128(c2[432],_mm_xor_si128(c2[1433],_mm_xor_si128(c2[1130],_mm_xor_si128(c2[532],_mm_xor_si128(c2[844],_mm_xor_si128(c2[241],_mm_xor_si128(c2[140],_mm_xor_si128(c2[1141],_mm_xor_si128(c2[1440],_mm_xor_si128(c2[842],_mm_xor_si128(c2[854],_mm_xor_si128(c2[251],_mm_xor_si128(c2[1053],_mm_xor_si128(c2[450],_mm_xor_si128(c2[864],_mm_xor_si128(c2[361],_mm_xor_si128(c2[261],_mm_xor_si128(c2[60],_mm_xor_si128(c2[1061],_mm_xor_si128(c2[460],_mm_xor_si128(c2[1461],_mm_xor_si128(c2[874],_mm_xor_si128(c2[271],_mm_xor_si128(c2[173],_mm_xor_si128(c2[1174],_mm_xor_si128(c2[884],_mm_xor_si128(c2[281],_mm_xor_si128(c2[481],_mm_xor_si128(c2[1482],_mm_xor_si128(c2[1382],_mm_xor_si128(c2[784],_mm_xor_si128(c2[894],_mm_xor_si128(c2[391],_mm_xor_si128(c2[291],_mm_xor_si128(c2[1592],_mm_xor_si128(c2[994],_mm_xor_si128(c2[692],c2[94]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[75]=simde_mm_xor_si128(c2[804],simde_mm_xor_si128(c2[301],simde_mm_xor_si128(c2[201],simde_mm_xor_si128(c2[103],simde_mm_xor_si128(c2[1104],simde_mm_xor_si128(c2[1200],simde_mm_xor_si128(c2[602],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[814],simde_mm_xor_si128(c2[311],simde_mm_xor_si128(c2[211],simde_mm_xor_si128(c2[610],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[1411],simde_mm_xor_si128(c2[813],simde_mm_xor_si128(c2[824],simde_mm_xor_si128(c2[321],simde_mm_xor_si128(c2[221],simde_mm_xor_si128(c2[720],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[834],simde_mm_xor_si128(c2[331],simde_mm_xor_si128(c2[231],simde_mm_xor_si128(c2[432],simde_mm_xor_si128(c2[1433],simde_mm_xor_si128(c2[1130],simde_mm_xor_si128(c2[532],simde_mm_xor_si128(c2[844],simde_mm_xor_si128(c2[241],simde_mm_xor_si128(c2[140],simde_mm_xor_si128(c2[1141],simde_mm_xor_si128(c2[1440],simde_mm_xor_si128(c2[842],simde_mm_xor_si128(c2[854],simde_mm_xor_si128(c2[251],simde_mm_xor_si128(c2[1053],simde_mm_xor_si128(c2[450],simde_mm_xor_si128(c2[864],simde_mm_xor_si128(c2[361],simde_mm_xor_si128(c2[261],simde_mm_xor_si128(c2[60],simde_mm_xor_si128(c2[1061],simde_mm_xor_si128(c2[460],simde_mm_xor_si128(c2[1461],simde_mm_xor_si128(c2[874],simde_mm_xor_si128(c2[271],simde_mm_xor_si128(c2[173],simde_mm_xor_si128(c2[1174],simde_mm_xor_si128(c2[884],simde_mm_xor_si128(c2[281],simde_mm_xor_si128(c2[481],simde_mm_xor_si128(c2[1482],simde_mm_xor_si128(c2[1382],simde_mm_xor_si128(c2[784],simde_mm_xor_si128(c2[894],simde_mm_xor_si128(c2[391],simde_mm_xor_si128(c2[291],simde_mm_xor_si128(c2[1592],simde_mm_xor_si128(c2[994],simde_mm_xor_si128(c2[692],c2[94]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 16
-     d2[80]=_mm_xor_si128(c2[1501],_mm_xor_si128(c2[1401],_mm_xor_si128(c2[1503],_mm_xor_si128(c2[1403],_mm_xor_si128(c2[700],_mm_xor_si128(c2[802],_mm_xor_si128(c2[702],_mm_xor_si128(c2[203],_mm_xor_si128(c2[200],_mm_xor_si128(c2[1511],_mm_xor_si128(c2[1411],_mm_xor_si128(c2[1513],_mm_xor_si128(c2[1413],_mm_xor_si128(c2[1212],_mm_xor_si128(c2[1214],_mm_xor_si128(c2[414],_mm_xor_si128(c2[411],_mm_xor_si128(c2[10],_mm_xor_si128(c2[1521],_mm_xor_si128(c2[1421],_mm_xor_si128(c2[1523],_mm_xor_si128(c2[1423],_mm_xor_si128(c2[1322],_mm_xor_si128(c2[1324],_mm_xor_si128(c2[1531],_mm_xor_si128(c2[1431],_mm_xor_si128(c2[1533],_mm_xor_si128(c2[1433],_mm_xor_si128(c2[1034],_mm_xor_si128(c2[1131],_mm_xor_si128(c2[1031],_mm_xor_si128(c2[133],_mm_xor_si128(c2[130],_mm_xor_si128(c2[1441],_mm_xor_si128(c2[1543],_mm_xor_si128(c2[1443],_mm_xor_si128(c2[742],_mm_xor_si128(c2[744],_mm_xor_si128(c2[443],_mm_xor_si128(c2[440],_mm_xor_si128(c2[1451],_mm_xor_si128(c2[1553],_mm_xor_si128(c2[1453],_mm_xor_si128(c2[51],_mm_xor_si128(c2[53],_mm_xor_si128(c2[1561],_mm_xor_si128(c2[1461],_mm_xor_si128(c2[1563],_mm_xor_si128(c2[1463],_mm_xor_si128(c2[662],_mm_xor_si128(c2[764],_mm_xor_si128(c2[664],_mm_xor_si128(c2[1062],_mm_xor_si128(c2[1064],_mm_xor_si128(c2[1471],_mm_xor_si128(c2[1573],_mm_xor_si128(c2[1473],_mm_xor_si128(c2[770],_mm_xor_si128(c2[772],_mm_xor_si128(c2[1481],_mm_xor_si128(c2[1583],_mm_xor_si128(c2[1483],_mm_xor_si128(c2[1083],_mm_xor_si128(c2[1080],_mm_xor_si128(c2[380],_mm_xor_si128(c2[382],_mm_xor_si128(c2[1591],_mm_xor_si128(c2[1491],_mm_xor_si128(c2[1593],_mm_xor_si128(c2[1493],_mm_xor_si128(c2[590],_mm_xor_si128(c2[692],_mm_xor_si128(c2[592],_mm_xor_si128(c2[1294],_mm_xor_si128(c2[1291],c2[1594])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[80]=simde_mm_xor_si128(c2[1501],simde_mm_xor_si128(c2[1401],simde_mm_xor_si128(c2[1503],simde_mm_xor_si128(c2[1403],simde_mm_xor_si128(c2[700],simde_mm_xor_si128(c2[802],simde_mm_xor_si128(c2[702],simde_mm_xor_si128(c2[203],simde_mm_xor_si128(c2[200],simde_mm_xor_si128(c2[1511],simde_mm_xor_si128(c2[1411],simde_mm_xor_si128(c2[1513],simde_mm_xor_si128(c2[1413],simde_mm_xor_si128(c2[1212],simde_mm_xor_si128(c2[1214],simde_mm_xor_si128(c2[414],simde_mm_xor_si128(c2[411],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[1521],simde_mm_xor_si128(c2[1421],simde_mm_xor_si128(c2[1523],simde_mm_xor_si128(c2[1423],simde_mm_xor_si128(c2[1322],simde_mm_xor_si128(c2[1324],simde_mm_xor_si128(c2[1531],simde_mm_xor_si128(c2[1431],simde_mm_xor_si128(c2[1533],simde_mm_xor_si128(c2[1433],simde_mm_xor_si128(c2[1034],simde_mm_xor_si128(c2[1131],simde_mm_xor_si128(c2[1031],simde_mm_xor_si128(c2[133],simde_mm_xor_si128(c2[130],simde_mm_xor_si128(c2[1441],simde_mm_xor_si128(c2[1543],simde_mm_xor_si128(c2[1443],simde_mm_xor_si128(c2[742],simde_mm_xor_si128(c2[744],simde_mm_xor_si128(c2[443],simde_mm_xor_si128(c2[440],simde_mm_xor_si128(c2[1451],simde_mm_xor_si128(c2[1553],simde_mm_xor_si128(c2[1453],simde_mm_xor_si128(c2[51],simde_mm_xor_si128(c2[53],simde_mm_xor_si128(c2[1561],simde_mm_xor_si128(c2[1461],simde_mm_xor_si128(c2[1563],simde_mm_xor_si128(c2[1463],simde_mm_xor_si128(c2[662],simde_mm_xor_si128(c2[764],simde_mm_xor_si128(c2[664],simde_mm_xor_si128(c2[1062],simde_mm_xor_si128(c2[1064],simde_mm_xor_si128(c2[1471],simde_mm_xor_si128(c2[1573],simde_mm_xor_si128(c2[1473],simde_mm_xor_si128(c2[770],simde_mm_xor_si128(c2[772],simde_mm_xor_si128(c2[1481],simde_mm_xor_si128(c2[1583],simde_mm_xor_si128(c2[1483],simde_mm_xor_si128(c2[1083],simde_mm_xor_si128(c2[1080],simde_mm_xor_si128(c2[380],simde_mm_xor_si128(c2[382],simde_mm_xor_si128(c2[1591],simde_mm_xor_si128(c2[1491],simde_mm_xor_si128(c2[1593],simde_mm_xor_si128(c2[1493],simde_mm_xor_si128(c2[590],simde_mm_xor_si128(c2[692],simde_mm_xor_si128(c2[592],simde_mm_xor_si128(c2[1294],simde_mm_xor_si128(c2[1291],c2[1594])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 17
-     d2[85]=_mm_xor_si128(c2[1301],_mm_xor_si128(c2[1201],_mm_xor_si128(c2[201],_mm_xor_si128(c2[101],_mm_xor_si128(c2[500],_mm_xor_si128(c2[1104],_mm_xor_si128(c2[1004],_mm_xor_si128(c2[3],_mm_xor_si128(c2[502],_mm_xor_si128(c2[1311],_mm_xor_si128(c2[1211],_mm_xor_si128(c2[211],_mm_xor_si128(c2[111],_mm_xor_si128(c2[1012],_mm_xor_si128(c2[1511],_mm_xor_si128(c2[214],_mm_xor_si128(c2[713],_mm_xor_si128(c2[10],_mm_xor_si128(c2[1321],_mm_xor_si128(c2[1221],_mm_xor_si128(c2[221],_mm_xor_si128(c2[121],_mm_xor_si128(c2[1122],_mm_xor_si128(c2[22],_mm_xor_si128(c2[1331],_mm_xor_si128(c2[1231],_mm_xor_si128(c2[231],_mm_xor_si128(c2[131],_mm_xor_si128(c2[834],_mm_xor_si128(c2[1433],_mm_xor_si128(c2[1333],_mm_xor_si128(c2[1532],_mm_xor_si128(c2[432],_mm_xor_si128(c2[1241],_mm_xor_si128(c2[241],_mm_xor_si128(c2[141],_mm_xor_si128(c2[542],_mm_xor_si128(c2[1041],_mm_xor_si128(c2[243],_mm_xor_si128(c2[742],_mm_xor_si128(c2[1251],_mm_xor_si128(c2[251],_mm_xor_si128(c2[151],_mm_xor_si128(c2[1450],_mm_xor_si128(c2[350],_mm_xor_si128(c2[851],_mm_xor_si128(c2[1361],_mm_xor_si128(c2[1261],_mm_xor_si128(c2[261],_mm_xor_si128(c2[161],_mm_xor_si128(c2[462],_mm_xor_si128(c2[1061],_mm_xor_si128(c2[961],_mm_xor_si128(c2[862],_mm_xor_si128(c2[1361],_mm_xor_si128(c2[1271],_mm_xor_si128(c2[271],_mm_xor_si128(c2[171],_mm_xor_si128(c2[570],_mm_xor_si128(c2[1074],_mm_xor_si128(c2[1281],_mm_xor_si128(c2[281],_mm_xor_si128(c2[181],_mm_xor_si128(c2[883],_mm_xor_si128(c2[1382],_mm_xor_si128(c2[180],_mm_xor_si128(c2[684],_mm_xor_si128(c2[1391],_mm_xor_si128(c2[1291],_mm_xor_si128(c2[291],_mm_xor_si128(c2[191],_mm_xor_si128(c2[390],_mm_xor_si128(c2[994],_mm_xor_si128(c2[894],_mm_xor_si128(c2[1094],c2[1593])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[85]=simde_mm_xor_si128(c2[1301],simde_mm_xor_si128(c2[1201],simde_mm_xor_si128(c2[201],simde_mm_xor_si128(c2[101],simde_mm_xor_si128(c2[500],simde_mm_xor_si128(c2[1104],simde_mm_xor_si128(c2[1004],simde_mm_xor_si128(c2[3],simde_mm_xor_si128(c2[502],simde_mm_xor_si128(c2[1311],simde_mm_xor_si128(c2[1211],simde_mm_xor_si128(c2[211],simde_mm_xor_si128(c2[111],simde_mm_xor_si128(c2[1012],simde_mm_xor_si128(c2[1511],simde_mm_xor_si128(c2[214],simde_mm_xor_si128(c2[713],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[1321],simde_mm_xor_si128(c2[1221],simde_mm_xor_si128(c2[221],simde_mm_xor_si128(c2[121],simde_mm_xor_si128(c2[1122],simde_mm_xor_si128(c2[22],simde_mm_xor_si128(c2[1331],simde_mm_xor_si128(c2[1231],simde_mm_xor_si128(c2[231],simde_mm_xor_si128(c2[131],simde_mm_xor_si128(c2[834],simde_mm_xor_si128(c2[1433],simde_mm_xor_si128(c2[1333],simde_mm_xor_si128(c2[1532],simde_mm_xor_si128(c2[432],simde_mm_xor_si128(c2[1241],simde_mm_xor_si128(c2[241],simde_mm_xor_si128(c2[141],simde_mm_xor_si128(c2[542],simde_mm_xor_si128(c2[1041],simde_mm_xor_si128(c2[243],simde_mm_xor_si128(c2[742],simde_mm_xor_si128(c2[1251],simde_mm_xor_si128(c2[251],simde_mm_xor_si128(c2[151],simde_mm_xor_si128(c2[1450],simde_mm_xor_si128(c2[350],simde_mm_xor_si128(c2[851],simde_mm_xor_si128(c2[1361],simde_mm_xor_si128(c2[1261],simde_mm_xor_si128(c2[261],simde_mm_xor_si128(c2[161],simde_mm_xor_si128(c2[462],simde_mm_xor_si128(c2[1061],simde_mm_xor_si128(c2[961],simde_mm_xor_si128(c2[862],simde_mm_xor_si128(c2[1361],simde_mm_xor_si128(c2[1271],simde_mm_xor_si128(c2[271],simde_mm_xor_si128(c2[171],simde_mm_xor_si128(c2[570],simde_mm_xor_si128(c2[1074],simde_mm_xor_si128(c2[1281],simde_mm_xor_si128(c2[281],simde_mm_xor_si128(c2[181],simde_mm_xor_si128(c2[883],simde_mm_xor_si128(c2[1382],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[684],simde_mm_xor_si128(c2[1391],simde_mm_xor_si128(c2[1291],simde_mm_xor_si128(c2[291],simde_mm_xor_si128(c2[191],simde_mm_xor_si128(c2[390],simde_mm_xor_si128(c2[994],simde_mm_xor_si128(c2[894],simde_mm_xor_si128(c2[1094],c2[1593])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 18
-     d2[90]=_mm_xor_si128(c2[0],_mm_xor_si128(c2[261],c2[670]));
+     d2[90]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[261],c2[670]));
 
 //row: 19
-     d2[95]=_mm_xor_si128(c2[904],_mm_xor_si128(c2[203],_mm_xor_si128(c2[1300],_mm_xor_si128(c2[0],_mm_xor_si128(c2[914],_mm_xor_si128(c2[710],_mm_xor_si128(c2[1511],_mm_xor_si128(c2[1414],_mm_xor_si128(c2[924],_mm_xor_si128(c2[820],_mm_xor_si128(c2[934],_mm_xor_si128(c2[532],_mm_xor_si128(c2[1230],_mm_xor_si128(c2[944],_mm_xor_si128(c2[240],_mm_xor_si128(c2[1540],_mm_xor_si128(c2[954],_mm_xor_si128(c2[1153],_mm_xor_si128(c2[964],_mm_xor_si128(c2[160],_mm_xor_si128(c2[560],_mm_xor_si128(c2[974],_mm_xor_si128(c2[273],_mm_xor_si128(c2[984],_mm_xor_si128(c2[581],_mm_xor_si128(c2[1482],_mm_xor_si128(c2[994],_mm_xor_si128(c2[93],c2[792]))))))))))))))))))))))))))));
+     d2[95]=simde_mm_xor_si128(c2[904],simde_mm_xor_si128(c2[203],simde_mm_xor_si128(c2[1300],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[914],simde_mm_xor_si128(c2[710],simde_mm_xor_si128(c2[1511],simde_mm_xor_si128(c2[1414],simde_mm_xor_si128(c2[924],simde_mm_xor_si128(c2[820],simde_mm_xor_si128(c2[934],simde_mm_xor_si128(c2[532],simde_mm_xor_si128(c2[1230],simde_mm_xor_si128(c2[944],simde_mm_xor_si128(c2[240],simde_mm_xor_si128(c2[1540],simde_mm_xor_si128(c2[954],simde_mm_xor_si128(c2[1153],simde_mm_xor_si128(c2[964],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[560],simde_mm_xor_si128(c2[974],simde_mm_xor_si128(c2[273],simde_mm_xor_si128(c2[984],simde_mm_xor_si128(c2[581],simde_mm_xor_si128(c2[1482],simde_mm_xor_si128(c2[994],simde_mm_xor_si128(c2[93],c2[792]))))))))))))))))))))))))))));
 
 //row: 20
-     d2[100]=_mm_xor_si128(c2[801],_mm_xor_si128(c2[701],_mm_xor_si128(c2[0],_mm_xor_si128(c2[1102],_mm_xor_si128(c2[811],_mm_xor_si128(c2[711],_mm_xor_si128(c2[512],_mm_xor_si128(c2[1313],_mm_xor_si128(c2[10],_mm_xor_si128(c2[821],_mm_xor_si128(c2[721],_mm_xor_si128(c2[622],_mm_xor_si128(c2[831],_mm_xor_si128(c2[731],_mm_xor_si128(c2[334],_mm_xor_si128(c2[1032],_mm_xor_si128(c2[741],_mm_xor_si128(c2[42],_mm_xor_si128(c2[1342],_mm_xor_si128(c2[444],_mm_xor_si128(c2[751],_mm_xor_si128(c2[950],_mm_xor_si128(c2[861],_mm_xor_si128(c2[761],_mm_xor_si128(c2[1561],_mm_xor_si128(c2[362],_mm_xor_si128(c2[771],_mm_xor_si128(c2[70],_mm_xor_si128(c2[781],_mm_xor_si128(c2[383],_mm_xor_si128(c2[1284],_mm_xor_si128(c2[891],_mm_xor_si128(c2[791],_mm_xor_si128(c2[1494],c2[594]))))))))))))))))))))))))))))))))));
+     d2[100]=simde_mm_xor_si128(c2[801],simde_mm_xor_si128(c2[701],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[1102],simde_mm_xor_si128(c2[811],simde_mm_xor_si128(c2[711],simde_mm_xor_si128(c2[512],simde_mm_xor_si128(c2[1313],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[821],simde_mm_xor_si128(c2[721],simde_mm_xor_si128(c2[622],simde_mm_xor_si128(c2[831],simde_mm_xor_si128(c2[731],simde_mm_xor_si128(c2[334],simde_mm_xor_si128(c2[1032],simde_mm_xor_si128(c2[741],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[1342],simde_mm_xor_si128(c2[444],simde_mm_xor_si128(c2[751],simde_mm_xor_si128(c2[950],simde_mm_xor_si128(c2[861],simde_mm_xor_si128(c2[761],simde_mm_xor_si128(c2[1561],simde_mm_xor_si128(c2[362],simde_mm_xor_si128(c2[771],simde_mm_xor_si128(c2[70],simde_mm_xor_si128(c2[781],simde_mm_xor_si128(c2[383],simde_mm_xor_si128(c2[1284],simde_mm_xor_si128(c2[891],simde_mm_xor_si128(c2[791],simde_mm_xor_si128(c2[1494],c2[594]))))))))))))))))))))))))))))))))));
 
 //row: 21
-     d2[105]=_mm_xor_si128(c2[2],_mm_xor_si128(c2[900],_mm_xor_si128(c2[403],_mm_xor_si128(c2[0],_mm_xor_si128(c2[12],_mm_xor_si128(c2[1412],_mm_xor_si128(c2[714],_mm_xor_si128(c2[614],_mm_xor_si128(c2[22],_mm_xor_si128(c2[23],_mm_xor_si128(c2[1522],_mm_xor_si128(c2[32],_mm_xor_si128(c2[1234],_mm_xor_si128(c2[333],_mm_xor_si128(c2[42],_mm_xor_si128(c2[942],_mm_xor_si128(c2[743],_mm_xor_si128(c2[643],_mm_xor_si128(c2[52],_mm_xor_si128(c2[351],_mm_xor_si128(c2[251],_mm_xor_si128(c2[62],_mm_xor_si128(c2[862],_mm_xor_si128(c2[1362],_mm_xor_si128(c2[1262],_mm_xor_si128(c2[72],_mm_xor_si128(c2[1070],_mm_xor_si128(c2[970],_mm_xor_si128(c2[82],_mm_xor_si128(c2[1283],_mm_xor_si128(c2[680],_mm_xor_si128(c2[580],_mm_xor_si128(c2[181],_mm_xor_si128(c2[92],_mm_xor_si128(c2[790],_mm_xor_si128(c2[1594],c2[1494]))))))))))))))))))))))))))))))))))));
+     d2[105]=simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[900],simde_mm_xor_si128(c2[403],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[1412],simde_mm_xor_si128(c2[714],simde_mm_xor_si128(c2[614],simde_mm_xor_si128(c2[22],simde_mm_xor_si128(c2[23],simde_mm_xor_si128(c2[1522],simde_mm_xor_si128(c2[32],simde_mm_xor_si128(c2[1234],simde_mm_xor_si128(c2[333],simde_mm_xor_si128(c2[42],simde_mm_xor_si128(c2[942],simde_mm_xor_si128(c2[743],simde_mm_xor_si128(c2[643],simde_mm_xor_si128(c2[52],simde_mm_xor_si128(c2[351],simde_mm_xor_si128(c2[251],simde_mm_xor_si128(c2[62],simde_mm_xor_si128(c2[862],simde_mm_xor_si128(c2[1362],simde_mm_xor_si128(c2[1262],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[1070],simde_mm_xor_si128(c2[970],simde_mm_xor_si128(c2[82],simde_mm_xor_si128(c2[1283],simde_mm_xor_si128(c2[680],simde_mm_xor_si128(c2[580],simde_mm_xor_si128(c2[181],simde_mm_xor_si128(c2[92],simde_mm_xor_si128(c2[790],simde_mm_xor_si128(c2[1594],c2[1494]))))))))))))))))))))))))))))))))))));
 
 //row: 22
-     d2[110]=_mm_xor_si128(c2[10],c2[420]);
+     d2[110]=simde_mm_xor_si128(c2[10],c2[420]);
 
 //row: 23
-     d2[115]=_mm_xor_si128(c2[0],_mm_xor_si128(c2[1134],c2[1454]));
+     d2[115]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[1134],c2[1454]));
 
 //row: 24
-     d2[120]=_mm_xor_si128(c2[10],_mm_xor_si128(c2[524],c2[790]));
+     d2[120]=simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[524],c2[790]));
 
 //row: 25
-     d2[125]=_mm_xor_si128(c2[0],c2[154]);
+     d2[125]=simde_mm_xor_si128(c2[0],c2[154]);
 
 //row: 26
-     d2[130]=_mm_xor_si128(c2[1300],_mm_xor_si128(c2[1200],_mm_xor_si128(c2[600],_mm_xor_si128(c2[604],_mm_xor_si128(c2[504],_mm_xor_si128(c2[1503],_mm_xor_si128(c2[2],_mm_xor_si128(c2[1001],_mm_xor_si128(c2[1310],_mm_xor_si128(c2[1210],_mm_xor_si128(c2[610],_mm_xor_si128(c2[1011],_mm_xor_si128(c2[411],_mm_xor_si128(c2[213],_mm_xor_si128(c2[1312],_mm_xor_si128(c2[1212],_mm_xor_si128(c2[1320],_mm_xor_si128(c2[1220],_mm_xor_si128(c2[620],_mm_xor_si128(c2[1121],_mm_xor_si128(c2[621],_mm_xor_si128(c2[521],_mm_xor_si128(c2[20],_mm_xor_si128(c2[1330],_mm_xor_si128(c2[1230],_mm_xor_si128(c2[630],_mm_xor_si128(c2[933],_mm_xor_si128(c2[833],_mm_xor_si128(c2[233],_mm_xor_si128(c2[1531],_mm_xor_si128(c2[931],_mm_xor_si128(c2[1340],_mm_xor_si128(c2[1240],_mm_xor_si128(c2[640],_mm_xor_si128(c2[541],_mm_xor_si128(c2[1540],_mm_xor_si128(c2[242],_mm_xor_si128(c2[1341],_mm_xor_si128(c2[1241],_mm_xor_si128(c2[1350],_mm_xor_si128(c2[1250],_mm_xor_si128(c2[650],_mm_xor_si128(c2[1454],_mm_xor_si128(c2[954],_mm_xor_si128(c2[854],_mm_xor_si128(c2[1360],_mm_xor_si128(c2[1260],_mm_xor_si128(c2[660],_mm_xor_si128(c2[561],_mm_xor_si128(c2[461],_mm_xor_si128(c2[1460],_mm_xor_si128(c2[861],_mm_xor_si128(c2[361],_mm_xor_si128(c2[261],_mm_xor_si128(c2[1370],_mm_xor_si128(c2[1270],_mm_xor_si128(c2[670],_mm_xor_si128(c2[574],_mm_xor_si128(c2[74],_mm_xor_si128(c2[1573],_mm_xor_si128(c2[471],_mm_xor_si128(c2[1380],_mm_xor_si128(c2[1280],_mm_xor_si128(c2[680],_mm_xor_si128(c2[882],_mm_xor_si128(c2[282],_mm_xor_si128(c2[184],_mm_xor_si128(c2[1283],_mm_xor_si128(c2[1183],_mm_xor_si128(c2[1390],_mm_xor_si128(c2[1290],_mm_xor_si128(c2[690],_mm_xor_si128(c2[494],_mm_xor_si128(c2[394],_mm_xor_si128(c2[1393],_mm_xor_si128(c2[1093],_mm_xor_si128(c2[593],c2[493])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[130]=simde_mm_xor_si128(c2[1300],simde_mm_xor_si128(c2[1200],simde_mm_xor_si128(c2[600],simde_mm_xor_si128(c2[604],simde_mm_xor_si128(c2[504],simde_mm_xor_si128(c2[1503],simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[1001],simde_mm_xor_si128(c2[1310],simde_mm_xor_si128(c2[1210],simde_mm_xor_si128(c2[610],simde_mm_xor_si128(c2[1011],simde_mm_xor_si128(c2[411],simde_mm_xor_si128(c2[213],simde_mm_xor_si128(c2[1312],simde_mm_xor_si128(c2[1212],simde_mm_xor_si128(c2[1320],simde_mm_xor_si128(c2[1220],simde_mm_xor_si128(c2[620],simde_mm_xor_si128(c2[1121],simde_mm_xor_si128(c2[621],simde_mm_xor_si128(c2[521],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[1330],simde_mm_xor_si128(c2[1230],simde_mm_xor_si128(c2[630],simde_mm_xor_si128(c2[933],simde_mm_xor_si128(c2[833],simde_mm_xor_si128(c2[233],simde_mm_xor_si128(c2[1531],simde_mm_xor_si128(c2[931],simde_mm_xor_si128(c2[1340],simde_mm_xor_si128(c2[1240],simde_mm_xor_si128(c2[640],simde_mm_xor_si128(c2[541],simde_mm_xor_si128(c2[1540],simde_mm_xor_si128(c2[242],simde_mm_xor_si128(c2[1341],simde_mm_xor_si128(c2[1241],simde_mm_xor_si128(c2[1350],simde_mm_xor_si128(c2[1250],simde_mm_xor_si128(c2[650],simde_mm_xor_si128(c2[1454],simde_mm_xor_si128(c2[954],simde_mm_xor_si128(c2[854],simde_mm_xor_si128(c2[1360],simde_mm_xor_si128(c2[1260],simde_mm_xor_si128(c2[660],simde_mm_xor_si128(c2[561],simde_mm_xor_si128(c2[461],simde_mm_xor_si128(c2[1460],simde_mm_xor_si128(c2[861],simde_mm_xor_si128(c2[361],simde_mm_xor_si128(c2[261],simde_mm_xor_si128(c2[1370],simde_mm_xor_si128(c2[1270],simde_mm_xor_si128(c2[670],simde_mm_xor_si128(c2[574],simde_mm_xor_si128(c2[74],simde_mm_xor_si128(c2[1573],simde_mm_xor_si128(c2[471],simde_mm_xor_si128(c2[1380],simde_mm_xor_si128(c2[1280],simde_mm_xor_si128(c2[680],simde_mm_xor_si128(c2[882],simde_mm_xor_si128(c2[282],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[1283],simde_mm_xor_si128(c2[1183],simde_mm_xor_si128(c2[1390],simde_mm_xor_si128(c2[1290],simde_mm_xor_si128(c2[690],simde_mm_xor_si128(c2[494],simde_mm_xor_si128(c2[394],simde_mm_xor_si128(c2[1393],simde_mm_xor_si128(c2[1093],simde_mm_xor_si128(c2[593],c2[493])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 27
-     d2[135]=_mm_xor_si128(c2[0],c2[62]);
+     d2[135]=simde_mm_xor_si128(c2[0],c2[62]);
 
 //row: 28
-     d2[140]=_mm_xor_si128(c2[10],_mm_xor_si128(c2[1422],c2[1451]));
+     d2[140]=simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[1422],c2[1451]));
 
 //row: 29
-     d2[145]=_mm_xor_si128(c2[0],c2[1044]);
+     d2[145]=simde_mm_xor_si128(c2[0],c2[1044]);
 
 //row: 30
-     d2[150]=_mm_xor_si128(c2[20],_mm_xor_si128(c2[352],_mm_xor_si128(c2[373],c2[693])));
+     d2[150]=simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[352],simde_mm_xor_si128(c2[373],c2[693])));
 
 //row: 31
-     d2[155]=_mm_xor_si128(c2[301],_mm_xor_si128(c2[1204],_mm_xor_si128(c2[702],_mm_xor_si128(c2[311],_mm_xor_si128(c2[112],_mm_xor_si128(c2[1013],_mm_xor_si128(c2[913],_mm_xor_si128(c2[10],_mm_xor_si128(c2[321],_mm_xor_si128(c2[322],_mm_xor_si128(c2[222],_mm_xor_si128(c2[331],_mm_xor_si128(c2[1533],_mm_xor_si128(c2[632],_mm_xor_si128(c2[341],_mm_xor_si128(c2[1241],_mm_xor_si128(c2[1042],_mm_xor_si128(c2[942],_mm_xor_si128(c2[351],_mm_xor_si128(c2[650],_mm_xor_si128(c2[550],_mm_xor_si128(c2[361],_mm_xor_si128(c2[1161],_mm_xor_si128(c2[62],_mm_xor_si128(c2[1561],_mm_xor_si128(c2[371],_mm_xor_si128(c2[1374],_mm_xor_si128(c2[1274],_mm_xor_si128(c2[381],_mm_xor_si128(c2[1582],_mm_xor_si128(c2[984],_mm_xor_si128(c2[884],_mm_xor_si128(c2[391],_mm_xor_si128(c2[1094],_mm_xor_si128(c2[294],c2[194])))))))))))))))))))))))))))))))))));
+     d2[155]=simde_mm_xor_si128(c2[301],simde_mm_xor_si128(c2[1204],simde_mm_xor_si128(c2[702],simde_mm_xor_si128(c2[311],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[1013],simde_mm_xor_si128(c2[913],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[321],simde_mm_xor_si128(c2[322],simde_mm_xor_si128(c2[222],simde_mm_xor_si128(c2[331],simde_mm_xor_si128(c2[1533],simde_mm_xor_si128(c2[632],simde_mm_xor_si128(c2[341],simde_mm_xor_si128(c2[1241],simde_mm_xor_si128(c2[1042],simde_mm_xor_si128(c2[942],simde_mm_xor_si128(c2[351],simde_mm_xor_si128(c2[650],simde_mm_xor_si128(c2[550],simde_mm_xor_si128(c2[361],simde_mm_xor_si128(c2[1161],simde_mm_xor_si128(c2[62],simde_mm_xor_si128(c2[1561],simde_mm_xor_si128(c2[371],simde_mm_xor_si128(c2[1374],simde_mm_xor_si128(c2[1274],simde_mm_xor_si128(c2[381],simde_mm_xor_si128(c2[1582],simde_mm_xor_si128(c2[984],simde_mm_xor_si128(c2[884],simde_mm_xor_si128(c2[391],simde_mm_xor_si128(c2[1094],simde_mm_xor_si128(c2[294],c2[194])))))))))))))))))))))))))))))))))));
 
 //row: 32
-     d2[160]=_mm_xor_si128(c2[1002],_mm_xor_si128(c2[902],_mm_xor_si128(c2[301],_mm_xor_si128(c2[201],_mm_xor_si128(c2[1303],_mm_xor_si128(c2[0],_mm_xor_si128(c2[1012],_mm_xor_si128(c2[912],_mm_xor_si128(c2[713],_mm_xor_si128(c2[1514],_mm_xor_si128(c2[1022],_mm_xor_si128(c2[922],_mm_xor_si128(c2[823],_mm_xor_si128(c2[1032],_mm_xor_si128(c2[932],_mm_xor_si128(c2[630],_mm_xor_si128(c2[530],_mm_xor_si128(c2[1233],_mm_xor_si128(c2[1042],_mm_xor_si128(c2[942],_mm_xor_si128(c2[243],_mm_xor_si128(c2[1543],_mm_xor_si128(c2[1052],_mm_xor_si128(c2[952],_mm_xor_si128(c2[1151],_mm_xor_si128(c2[451],_mm_xor_si128(c2[1062],_mm_xor_si128(c2[962],_mm_xor_si128(c2[263],_mm_xor_si128(c2[163],_mm_xor_si128(c2[563],_mm_xor_si128(c2[1072],_mm_xor_si128(c2[972],_mm_xor_si128(c2[271],_mm_xor_si128(c2[1082],_mm_xor_si128(c2[982],_mm_xor_si128(c2[584],_mm_xor_si128(c2[1480],_mm_xor_si128(c2[1092],_mm_xor_si128(c2[992],_mm_xor_si128(c2[191],_mm_xor_si128(c2[91],c2[790]))))))))))))))))))))))))))))))))))))))))));
+     d2[160]=simde_mm_xor_si128(c2[1002],simde_mm_xor_si128(c2[902],simde_mm_xor_si128(c2[301],simde_mm_xor_si128(c2[201],simde_mm_xor_si128(c2[1303],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[1012],simde_mm_xor_si128(c2[912],simde_mm_xor_si128(c2[713],simde_mm_xor_si128(c2[1514],simde_mm_xor_si128(c2[1022],simde_mm_xor_si128(c2[922],simde_mm_xor_si128(c2[823],simde_mm_xor_si128(c2[1032],simde_mm_xor_si128(c2[932],simde_mm_xor_si128(c2[630],simde_mm_xor_si128(c2[530],simde_mm_xor_si128(c2[1233],simde_mm_xor_si128(c2[1042],simde_mm_xor_si128(c2[942],simde_mm_xor_si128(c2[243],simde_mm_xor_si128(c2[1543],simde_mm_xor_si128(c2[1052],simde_mm_xor_si128(c2[952],simde_mm_xor_si128(c2[1151],simde_mm_xor_si128(c2[451],simde_mm_xor_si128(c2[1062],simde_mm_xor_si128(c2[962],simde_mm_xor_si128(c2[263],simde_mm_xor_si128(c2[163],simde_mm_xor_si128(c2[563],simde_mm_xor_si128(c2[1072],simde_mm_xor_si128(c2[972],simde_mm_xor_si128(c2[271],simde_mm_xor_si128(c2[1082],simde_mm_xor_si128(c2[982],simde_mm_xor_si128(c2[584],simde_mm_xor_si128(c2[1480],simde_mm_xor_si128(c2[1092],simde_mm_xor_si128(c2[992],simde_mm_xor_si128(c2[191],simde_mm_xor_si128(c2[91],c2[790]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 33
-     d2[165]=_mm_xor_si128(c2[1200],_mm_xor_si128(c2[504],_mm_xor_si128(c2[2],_mm_xor_si128(c2[1210],_mm_xor_si128(c2[1011],_mm_xor_si128(c2[213],_mm_xor_si128(c2[1220],_mm_xor_si128(c2[1121],_mm_xor_si128(c2[20],_mm_xor_si128(c2[1230],_mm_xor_si128(c2[833],_mm_xor_si128(c2[1531],_mm_xor_si128(c2[1240],_mm_xor_si128(c2[541],_mm_xor_si128(c2[242],_mm_xor_si128(c2[1250],_mm_xor_si128(c2[1454],_mm_xor_si128(c2[1260],_mm_xor_si128(c2[461],_mm_xor_si128(c2[861],_mm_xor_si128(c2[1270],_mm_xor_si128(c2[574],_mm_xor_si128(c2[870],_mm_xor_si128(c2[1280],_mm_xor_si128(c2[882],_mm_xor_si128(c2[184],_mm_xor_si128(c2[1290],_mm_xor_si128(c2[394],c2[1093]))))))))))))))))))))))))))));
+     d2[165]=simde_mm_xor_si128(c2[1200],simde_mm_xor_si128(c2[504],simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[1210],simde_mm_xor_si128(c2[1011],simde_mm_xor_si128(c2[213],simde_mm_xor_si128(c2[1220],simde_mm_xor_si128(c2[1121],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[1230],simde_mm_xor_si128(c2[833],simde_mm_xor_si128(c2[1531],simde_mm_xor_si128(c2[1240],simde_mm_xor_si128(c2[541],simde_mm_xor_si128(c2[242],simde_mm_xor_si128(c2[1250],simde_mm_xor_si128(c2[1454],simde_mm_xor_si128(c2[1260],simde_mm_xor_si128(c2[461],simde_mm_xor_si128(c2[861],simde_mm_xor_si128(c2[1270],simde_mm_xor_si128(c2[574],simde_mm_xor_si128(c2[870],simde_mm_xor_si128(c2[1280],simde_mm_xor_si128(c2[882],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[1290],simde_mm_xor_si128(c2[394],c2[1093]))))))))))))))))))))))))))));
 
 //row: 34
-     d2[170]=_mm_xor_si128(c2[301],_mm_xor_si128(c2[201],_mm_xor_si128(c2[1304],_mm_xor_si128(c2[1204],_mm_xor_si128(c2[1104],_mm_xor_si128(c2[603],_mm_xor_si128(c2[602],_mm_xor_si128(c2[101],_mm_xor_si128(c2[0],_mm_xor_si128(c2[311],_mm_xor_si128(c2[211],_mm_xor_si128(c2[1314],_mm_xor_si128(c2[12],_mm_xor_si128(c2[1110],_mm_xor_si128(c2[813],_mm_xor_si128(c2[412],_mm_xor_si128(c2[312],_mm_xor_si128(c2[321],_mm_xor_si128(c2[221],_mm_xor_si128(c2[1324],_mm_xor_si128(c2[122],_mm_xor_si128(c2[1320],_mm_xor_si128(c2[1220],_mm_xor_si128(c2[331],_mm_xor_si128(c2[231],_mm_xor_si128(c2[1334],_mm_xor_si128(c2[1533],_mm_xor_si128(c2[1433],_mm_xor_si128(c2[932],_mm_xor_si128(c2[532],_mm_xor_si128(c2[31],_mm_xor_si128(c2[341],_mm_xor_si128(c2[241],_mm_xor_si128(c2[1344],_mm_xor_si128(c2[1141],_mm_xor_si128(c2[640],_mm_xor_si128(c2[842],_mm_xor_si128(c2[441],_mm_xor_si128(c2[341],_mm_xor_si128(c2[351],_mm_xor_si128(c2[251],_mm_xor_si128(c2[1354],_mm_xor_si128(c2[450],_mm_xor_si128(c2[54],_mm_xor_si128(c2[1553],_mm_xor_si128(c2[361],_mm_xor_si128(c2[261],_mm_xor_si128(c2[1364],_mm_xor_si128(c2[1161],_mm_xor_si128(c2[1061],_mm_xor_si128(c2[560],_mm_xor_si128(c2[1461],_mm_xor_si128(c2[1060],_mm_xor_si128(c2[960],_mm_xor_si128(c2[371],_mm_xor_si128(c2[271],_mm_xor_si128(c2[1374],_mm_xor_si128(c2[1174],_mm_xor_si128(c2[773],_mm_xor_si128(c2[673],_mm_xor_si128(c2[381],_mm_xor_si128(c2[281],_mm_xor_si128(c2[1384],_mm_xor_si128(c2[1482],_mm_xor_si128(c2[981],_mm_xor_si128(c2[784],_mm_xor_si128(c2[383],_mm_xor_si128(c2[283],_mm_xor_si128(c2[391],_mm_xor_si128(c2[291],_mm_xor_si128(c2[1394],_mm_xor_si128(c2[1094],_mm_xor_si128(c2[994],_mm_xor_si128(c2[493],_mm_xor_si128(c2[94],_mm_xor_si128(c2[1292],c2[1192]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[170]=simde_mm_xor_si128(c2[301],simde_mm_xor_si128(c2[201],simde_mm_xor_si128(c2[1304],simde_mm_xor_si128(c2[1204],simde_mm_xor_si128(c2[1104],simde_mm_xor_si128(c2[603],simde_mm_xor_si128(c2[602],simde_mm_xor_si128(c2[101],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[311],simde_mm_xor_si128(c2[211],simde_mm_xor_si128(c2[1314],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[1110],simde_mm_xor_si128(c2[813],simde_mm_xor_si128(c2[412],simde_mm_xor_si128(c2[312],simde_mm_xor_si128(c2[321],simde_mm_xor_si128(c2[221],simde_mm_xor_si128(c2[1324],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[1320],simde_mm_xor_si128(c2[1220],simde_mm_xor_si128(c2[331],simde_mm_xor_si128(c2[231],simde_mm_xor_si128(c2[1334],simde_mm_xor_si128(c2[1533],simde_mm_xor_si128(c2[1433],simde_mm_xor_si128(c2[932],simde_mm_xor_si128(c2[532],simde_mm_xor_si128(c2[31],simde_mm_xor_si128(c2[341],simde_mm_xor_si128(c2[241],simde_mm_xor_si128(c2[1344],simde_mm_xor_si128(c2[1141],simde_mm_xor_si128(c2[640],simde_mm_xor_si128(c2[842],simde_mm_xor_si128(c2[441],simde_mm_xor_si128(c2[341],simde_mm_xor_si128(c2[351],simde_mm_xor_si128(c2[251],simde_mm_xor_si128(c2[1354],simde_mm_xor_si128(c2[450],simde_mm_xor_si128(c2[54],simde_mm_xor_si128(c2[1553],simde_mm_xor_si128(c2[361],simde_mm_xor_si128(c2[261],simde_mm_xor_si128(c2[1364],simde_mm_xor_si128(c2[1161],simde_mm_xor_si128(c2[1061],simde_mm_xor_si128(c2[560],simde_mm_xor_si128(c2[1461],simde_mm_xor_si128(c2[1060],simde_mm_xor_si128(c2[960],simde_mm_xor_si128(c2[371],simde_mm_xor_si128(c2[271],simde_mm_xor_si128(c2[1374],simde_mm_xor_si128(c2[1174],simde_mm_xor_si128(c2[773],simde_mm_xor_si128(c2[673],simde_mm_xor_si128(c2[381],simde_mm_xor_si128(c2[281],simde_mm_xor_si128(c2[1384],simde_mm_xor_si128(c2[1482],simde_mm_xor_si128(c2[981],simde_mm_xor_si128(c2[784],simde_mm_xor_si128(c2[383],simde_mm_xor_si128(c2[283],simde_mm_xor_si128(c2[391],simde_mm_xor_si128(c2[291],simde_mm_xor_si128(c2[1394],simde_mm_xor_si128(c2[1094],simde_mm_xor_si128(c2[994],simde_mm_xor_si128(c2[493],simde_mm_xor_si128(c2[94],simde_mm_xor_si128(c2[1292],c2[1192]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 35
-     d2[175]=_mm_xor_si128(c2[600],_mm_xor_si128(c2[500],_mm_xor_si128(c2[1403],_mm_xor_si128(c2[901],_mm_xor_si128(c2[610],_mm_xor_si128(c2[510],_mm_xor_si128(c2[311],_mm_xor_si128(c2[1112],_mm_xor_si128(c2[10],_mm_xor_si128(c2[620],_mm_xor_si128(c2[520],_mm_xor_si128(c2[421],_mm_xor_si128(c2[630],_mm_xor_si128(c2[530],_mm_xor_si128(c2[133],_mm_xor_si128(c2[831],_mm_xor_si128(c2[540],_mm_xor_si128(c2[1440],_mm_xor_si128(c2[1141],_mm_xor_si128(c2[550],_mm_xor_si128(c2[754],_mm_xor_si128(c2[1354],_mm_xor_si128(c2[660],_mm_xor_si128(c2[560],_mm_xor_si128(c2[1360],_mm_xor_si128(c2[161],_mm_xor_si128(c2[570],_mm_xor_si128(c2[1473],_mm_xor_si128(c2[580],_mm_xor_si128(c2[182],_mm_xor_si128(c2[1083],_mm_xor_si128(c2[690],_mm_xor_si128(c2[590],_mm_xor_si128(c2[1293],c2[393]))))))))))))))))))))))))))))))))));
+     d2[175]=simde_mm_xor_si128(c2[600],simde_mm_xor_si128(c2[500],simde_mm_xor_si128(c2[1403],simde_mm_xor_si128(c2[901],simde_mm_xor_si128(c2[610],simde_mm_xor_si128(c2[510],simde_mm_xor_si128(c2[311],simde_mm_xor_si128(c2[1112],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[620],simde_mm_xor_si128(c2[520],simde_mm_xor_si128(c2[421],simde_mm_xor_si128(c2[630],simde_mm_xor_si128(c2[530],simde_mm_xor_si128(c2[133],simde_mm_xor_si128(c2[831],simde_mm_xor_si128(c2[540],simde_mm_xor_si128(c2[1440],simde_mm_xor_si128(c2[1141],simde_mm_xor_si128(c2[550],simde_mm_xor_si128(c2[754],simde_mm_xor_si128(c2[1354],simde_mm_xor_si128(c2[660],simde_mm_xor_si128(c2[560],simde_mm_xor_si128(c2[1360],simde_mm_xor_si128(c2[161],simde_mm_xor_si128(c2[570],simde_mm_xor_si128(c2[1473],simde_mm_xor_si128(c2[580],simde_mm_xor_si128(c2[182],simde_mm_xor_si128(c2[1083],simde_mm_xor_si128(c2[690],simde_mm_xor_si128(c2[590],simde_mm_xor_si128(c2[1293],c2[393]))))))))))))))))))))))))))))))))));
 
 //row: 36
-     d2[180]=_mm_xor_si128(c2[0],_mm_xor_si128(c2[1523],c2[270]));
+     d2[180]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[1523],c2[270]));
 
 //row: 37
-     d2[185]=_mm_xor_si128(c2[1504],_mm_xor_si128(c2[1503],_mm_xor_si128(c2[803],_mm_xor_si128(c2[802],_mm_xor_si128(c2[301],_mm_xor_si128(c2[300],_mm_xor_si128(c2[1514],_mm_xor_si128(c2[1513],_mm_xor_si128(c2[1310],_mm_xor_si128(c2[1314],_mm_xor_si128(c2[512],_mm_xor_si128(c2[611],_mm_xor_si128(c2[511],_mm_xor_si128(c2[1524],_mm_xor_si128(c2[1523],_mm_xor_si128(c2[1420],_mm_xor_si128(c2[1524],_mm_xor_si128(c2[1424],_mm_xor_si128(c2[1534],_mm_xor_si128(c2[1533],_mm_xor_si128(c2[1132],_mm_xor_si128(c2[1131],_mm_xor_si128(c2[231],_mm_xor_si128(c2[230],_mm_xor_si128(c2[1544],_mm_xor_si128(c2[1543],_mm_xor_si128(c2[840],_mm_xor_si128(c2[844],_mm_xor_si128(c2[541],_mm_xor_si128(c2[640],_mm_xor_si128(c2[540],_mm_xor_si128(c2[1554],_mm_xor_si128(c2[1553],_mm_xor_si128(c2[154],_mm_xor_si128(c2[253],_mm_xor_si128(c2[153],_mm_xor_si128(c2[1564],_mm_xor_si128(c2[1563],_mm_xor_si128(c2[760],_mm_xor_si128(c2[764],_mm_xor_si128(c2[1160],_mm_xor_si128(c2[1264],_mm_xor_si128(c2[1164],_mm_xor_si128(c2[1574],_mm_xor_si128(c2[1573],_mm_xor_si128(c2[873],_mm_xor_si128(c2[972],_mm_xor_si128(c2[872],_mm_xor_si128(c2[1584],_mm_xor_si128(c2[1583],_mm_xor_si128(c2[1181],_mm_xor_si128(c2[1180],_mm_xor_si128(c2[483],_mm_xor_si128(c2[582],_mm_xor_si128(c2[482],_mm_xor_si128(c2[1594],_mm_xor_si128(c2[1593],_mm_xor_si128(c2[693],_mm_xor_si128(c2[692],_mm_xor_si128(c2[1392],_mm_xor_si128(c2[1491],c2[1391])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[185]=simde_mm_xor_si128(c2[1504],simde_mm_xor_si128(c2[1503],simde_mm_xor_si128(c2[803],simde_mm_xor_si128(c2[802],simde_mm_xor_si128(c2[301],simde_mm_xor_si128(c2[300],simde_mm_xor_si128(c2[1514],simde_mm_xor_si128(c2[1513],simde_mm_xor_si128(c2[1310],simde_mm_xor_si128(c2[1314],simde_mm_xor_si128(c2[512],simde_mm_xor_si128(c2[611],simde_mm_xor_si128(c2[511],simde_mm_xor_si128(c2[1524],simde_mm_xor_si128(c2[1523],simde_mm_xor_si128(c2[1420],simde_mm_xor_si128(c2[1524],simde_mm_xor_si128(c2[1424],simde_mm_xor_si128(c2[1534],simde_mm_xor_si128(c2[1533],simde_mm_xor_si128(c2[1132],simde_mm_xor_si128(c2[1131],simde_mm_xor_si128(c2[231],simde_mm_xor_si128(c2[230],simde_mm_xor_si128(c2[1544],simde_mm_xor_si128(c2[1543],simde_mm_xor_si128(c2[840],simde_mm_xor_si128(c2[844],simde_mm_xor_si128(c2[541],simde_mm_xor_si128(c2[640],simde_mm_xor_si128(c2[540],simde_mm_xor_si128(c2[1554],simde_mm_xor_si128(c2[1553],simde_mm_xor_si128(c2[154],simde_mm_xor_si128(c2[253],simde_mm_xor_si128(c2[153],simde_mm_xor_si128(c2[1564],simde_mm_xor_si128(c2[1563],simde_mm_xor_si128(c2[760],simde_mm_xor_si128(c2[764],simde_mm_xor_si128(c2[1160],simde_mm_xor_si128(c2[1264],simde_mm_xor_si128(c2[1164],simde_mm_xor_si128(c2[1574],simde_mm_xor_si128(c2[1573],simde_mm_xor_si128(c2[873],simde_mm_xor_si128(c2[972],simde_mm_xor_si128(c2[872],simde_mm_xor_si128(c2[1584],simde_mm_xor_si128(c2[1583],simde_mm_xor_si128(c2[1181],simde_mm_xor_si128(c2[1180],simde_mm_xor_si128(c2[483],simde_mm_xor_si128(c2[582],simde_mm_xor_si128(c2[482],simde_mm_xor_si128(c2[1594],simde_mm_xor_si128(c2[1593],simde_mm_xor_si128(c2[693],simde_mm_xor_si128(c2[692],simde_mm_xor_si128(c2[1392],simde_mm_xor_si128(c2[1491],c2[1391])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 38
-     d2[190]=_mm_xor_si128(c2[301],_mm_xor_si128(c2[201],_mm_xor_si128(c2[1104],_mm_xor_si128(c2[602],_mm_xor_si128(c2[311],_mm_xor_si128(c2[211],_mm_xor_si128(c2[12],_mm_xor_si128(c2[813],_mm_xor_si128(c2[10],_mm_xor_si128(c2[321],_mm_xor_si128(c2[221],_mm_xor_si128(c2[122],_mm_xor_si128(c2[331],_mm_xor_si128(c2[231],_mm_xor_si128(c2[1433],_mm_xor_si128(c2[532],_mm_xor_si128(c2[241],_mm_xor_si128(c2[1141],_mm_xor_si128(c2[842],_mm_xor_si128(c2[251],_mm_xor_si128(c2[450],_mm_xor_si128(c2[1350],_mm_xor_si128(c2[361],_mm_xor_si128(c2[261],_mm_xor_si128(c2[1061],_mm_xor_si128(c2[1461],_mm_xor_si128(c2[271],_mm_xor_si128(c2[1174],_mm_xor_si128(c2[281],_mm_xor_si128(c2[1482],_mm_xor_si128(c2[784],_mm_xor_si128(c2[391],_mm_xor_si128(c2[291],_mm_xor_si128(c2[994],c2[94]))))))))))))))))))))))))))))))))));
+     d2[190]=simde_mm_xor_si128(c2[301],simde_mm_xor_si128(c2[201],simde_mm_xor_si128(c2[1104],simde_mm_xor_si128(c2[602],simde_mm_xor_si128(c2[311],simde_mm_xor_si128(c2[211],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[813],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[321],simde_mm_xor_si128(c2[221],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[331],simde_mm_xor_si128(c2[231],simde_mm_xor_si128(c2[1433],simde_mm_xor_si128(c2[532],simde_mm_xor_si128(c2[241],simde_mm_xor_si128(c2[1141],simde_mm_xor_si128(c2[842],simde_mm_xor_si128(c2[251],simde_mm_xor_si128(c2[450],simde_mm_xor_si128(c2[1350],simde_mm_xor_si128(c2[361],simde_mm_xor_si128(c2[261],simde_mm_xor_si128(c2[1061],simde_mm_xor_si128(c2[1461],simde_mm_xor_si128(c2[271],simde_mm_xor_si128(c2[1174],simde_mm_xor_si128(c2[281],simde_mm_xor_si128(c2[1482],simde_mm_xor_si128(c2[784],simde_mm_xor_si128(c2[391],simde_mm_xor_si128(c2[291],simde_mm_xor_si128(c2[994],c2[94]))))))))))))))))))))))))))))))))));
 
 //row: 39
-     d2[195]=_mm_xor_si128(c2[1003],_mm_xor_si128(c2[903],_mm_xor_si128(c2[302],_mm_xor_si128(c2[202],_mm_xor_si128(c2[1304],_mm_xor_si128(c2[0],_mm_xor_si128(c2[1013],_mm_xor_si128(c2[913],_mm_xor_si128(c2[714],_mm_xor_si128(c2[1510],_mm_xor_si128(c2[1023],_mm_xor_si128(c2[923],_mm_xor_si128(c2[824],_mm_xor_si128(c2[1033],_mm_xor_si128(c2[933],_mm_xor_si128(c2[631],_mm_xor_si128(c2[531],_mm_xor_si128(c2[1234],_mm_xor_si128(c2[1043],_mm_xor_si128(c2[943],_mm_xor_si128(c2[244],_mm_xor_si128(c2[1544],_mm_xor_si128(c2[1053],_mm_xor_si128(c2[953],_mm_xor_si128(c2[1152],_mm_xor_si128(c2[1063],_mm_xor_si128(c2[963],_mm_xor_si128(c2[264],_mm_xor_si128(c2[164],_mm_xor_si128(c2[564],_mm_xor_si128(c2[1073],_mm_xor_si128(c2[973],_mm_xor_si128(c2[272],_mm_xor_si128(c2[871],_mm_xor_si128(c2[1083],_mm_xor_si128(c2[983],_mm_xor_si128(c2[580],_mm_xor_si128(c2[1481],_mm_xor_si128(c2[1093],_mm_xor_si128(c2[993],_mm_xor_si128(c2[192],_mm_xor_si128(c2[92],c2[791]))))))))))))))))))))))))))))))))))))))))));
+     d2[195]=simde_mm_xor_si128(c2[1003],simde_mm_xor_si128(c2[903],simde_mm_xor_si128(c2[302],simde_mm_xor_si128(c2[202],simde_mm_xor_si128(c2[1304],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[1013],simde_mm_xor_si128(c2[913],simde_mm_xor_si128(c2[714],simde_mm_xor_si128(c2[1510],simde_mm_xor_si128(c2[1023],simde_mm_xor_si128(c2[923],simde_mm_xor_si128(c2[824],simde_mm_xor_si128(c2[1033],simde_mm_xor_si128(c2[933],simde_mm_xor_si128(c2[631],simde_mm_xor_si128(c2[531],simde_mm_xor_si128(c2[1234],simde_mm_xor_si128(c2[1043],simde_mm_xor_si128(c2[943],simde_mm_xor_si128(c2[244],simde_mm_xor_si128(c2[1544],simde_mm_xor_si128(c2[1053],simde_mm_xor_si128(c2[953],simde_mm_xor_si128(c2[1152],simde_mm_xor_si128(c2[1063],simde_mm_xor_si128(c2[963],simde_mm_xor_si128(c2[264],simde_mm_xor_si128(c2[164],simde_mm_xor_si128(c2[564],simde_mm_xor_si128(c2[1073],simde_mm_xor_si128(c2[973],simde_mm_xor_si128(c2[272],simde_mm_xor_si128(c2[871],simde_mm_xor_si128(c2[1083],simde_mm_xor_si128(c2[983],simde_mm_xor_si128(c2[580],simde_mm_xor_si128(c2[1481],simde_mm_xor_si128(c2[1093],simde_mm_xor_si128(c2[993],simde_mm_xor_si128(c2[192],simde_mm_xor_si128(c2[92],c2[791]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 40
-     d2[200]=_mm_xor_si128(c2[302],_mm_xor_si128(c2[1403],_mm_xor_si128(c2[1200],_mm_xor_si128(c2[702],_mm_xor_si128(c2[703],_mm_xor_si128(c2[200],_mm_xor_si128(c2[312],_mm_xor_si128(c2[1413],_mm_xor_si128(c2[113],_mm_xor_si128(c2[1214],_mm_xor_si128(c2[914],_mm_xor_si128(c2[511],_mm_xor_si128(c2[411],_mm_xor_si128(c2[322],_mm_xor_si128(c2[1423],_mm_xor_si128(c2[223],_mm_xor_si128(c2[1424],_mm_xor_si128(c2[1324],_mm_xor_si128(c2[20],_mm_xor_si128(c2[332],_mm_xor_si128(c2[1433],_mm_xor_si128(c2[1534],_mm_xor_si128(c2[1031],_mm_xor_si128(c2[633],_mm_xor_si128(c2[130],_mm_xor_si128(c2[342],_mm_xor_si128(c2[1443],_mm_xor_si128(c2[1242],_mm_xor_si128(c2[744],_mm_xor_si128(c2[943],_mm_xor_si128(c2[540],_mm_xor_si128(c2[440],_mm_xor_si128(c2[352],_mm_xor_si128(c2[1453],_mm_xor_si128(c2[551],_mm_xor_si128(c2[153],_mm_xor_si128(c2[53],_mm_xor_si128(c2[362],_mm_xor_si128(c2[1463],_mm_xor_si128(c2[1162],_mm_xor_si128(c2[664],_mm_xor_si128(c2[1562],_mm_xor_si128(c2[1164],_mm_xor_si128(c2[1064],_mm_xor_si128(c2[372],_mm_xor_si128(c2[1473],_mm_xor_si128(c2[1270],_mm_xor_si128(c2[872],_mm_xor_si128(c2[772],_mm_xor_si128(c2[382],_mm_xor_si128(c2[1483],_mm_xor_si128(c2[1583],_mm_xor_si128(c2[1080],_mm_xor_si128(c2[880],_mm_xor_si128(c2[482],_mm_xor_si128(c2[382],_mm_xor_si128(c2[392],_mm_xor_si128(c2[1493],_mm_xor_si128(c2[1090],_mm_xor_si128(c2[592],_mm_xor_si128(c2[190],_mm_xor_si128(c2[1391],c2[1291]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[200]=simde_mm_xor_si128(c2[302],simde_mm_xor_si128(c2[1403],simde_mm_xor_si128(c2[1200],simde_mm_xor_si128(c2[702],simde_mm_xor_si128(c2[703],simde_mm_xor_si128(c2[200],simde_mm_xor_si128(c2[312],simde_mm_xor_si128(c2[1413],simde_mm_xor_si128(c2[113],simde_mm_xor_si128(c2[1214],simde_mm_xor_si128(c2[914],simde_mm_xor_si128(c2[511],simde_mm_xor_si128(c2[411],simde_mm_xor_si128(c2[322],simde_mm_xor_si128(c2[1423],simde_mm_xor_si128(c2[223],simde_mm_xor_si128(c2[1424],simde_mm_xor_si128(c2[1324],simde_mm_xor_si128(c2[20],simde_mm_xor_si128(c2[332],simde_mm_xor_si128(c2[1433],simde_mm_xor_si128(c2[1534],simde_mm_xor_si128(c2[1031],simde_mm_xor_si128(c2[633],simde_mm_xor_si128(c2[130],simde_mm_xor_si128(c2[342],simde_mm_xor_si128(c2[1443],simde_mm_xor_si128(c2[1242],simde_mm_xor_si128(c2[744],simde_mm_xor_si128(c2[943],simde_mm_xor_si128(c2[540],simde_mm_xor_si128(c2[440],simde_mm_xor_si128(c2[352],simde_mm_xor_si128(c2[1453],simde_mm_xor_si128(c2[551],simde_mm_xor_si128(c2[153],simde_mm_xor_si128(c2[53],simde_mm_xor_si128(c2[362],simde_mm_xor_si128(c2[1463],simde_mm_xor_si128(c2[1162],simde_mm_xor_si128(c2[664],simde_mm_xor_si128(c2[1562],simde_mm_xor_si128(c2[1164],simde_mm_xor_si128(c2[1064],simde_mm_xor_si128(c2[372],simde_mm_xor_si128(c2[1473],simde_mm_xor_si128(c2[1270],simde_mm_xor_si128(c2[872],simde_mm_xor_si128(c2[772],simde_mm_xor_si128(c2[382],simde_mm_xor_si128(c2[1483],simde_mm_xor_si128(c2[1583],simde_mm_xor_si128(c2[1080],simde_mm_xor_si128(c2[880],simde_mm_xor_si128(c2[482],simde_mm_xor_si128(c2[382],simde_mm_xor_si128(c2[392],simde_mm_xor_si128(c2[1493],simde_mm_xor_si128(c2[1090],simde_mm_xor_si128(c2[592],simde_mm_xor_si128(c2[190],simde_mm_xor_si128(c2[1391],c2[1291]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 41
-     d2[205]=_mm_xor_si128(c2[703],_mm_xor_si128(c2[603],_mm_xor_si128(c2[1501],_mm_xor_si128(c2[1004],_mm_xor_si128(c2[713],_mm_xor_si128(c2[613],_mm_xor_si128(c2[414],_mm_xor_si128(c2[1210],_mm_xor_si128(c2[10],_mm_xor_si128(c2[723],_mm_xor_si128(c2[623],_mm_xor_si128(c2[524],_mm_xor_si128(c2[733],_mm_xor_si128(c2[633],_mm_xor_si128(c2[231],_mm_xor_si128(c2[934],_mm_xor_si128(c2[643],_mm_xor_si128(c2[1543],_mm_xor_si128(c2[1244],_mm_xor_si128(c2[653],_mm_xor_si128(c2[852],_mm_xor_si128(c2[250],_mm_xor_si128(c2[763],_mm_xor_si128(c2[663],_mm_xor_si128(c2[1463],_mm_xor_si128(c2[264],_mm_xor_si128(c2[673],_mm_xor_si128(c2[1571],_mm_xor_si128(c2[683],_mm_xor_si128(c2[280],_mm_xor_si128(c2[1181],_mm_xor_si128(c2[793],_mm_xor_si128(c2[693],_mm_xor_si128(c2[1391],c2[491]))))))))))))))))))))))))))))))))));
+     d2[205]=simde_mm_xor_si128(c2[703],simde_mm_xor_si128(c2[603],simde_mm_xor_si128(c2[1501],simde_mm_xor_si128(c2[1004],simde_mm_xor_si128(c2[713],simde_mm_xor_si128(c2[613],simde_mm_xor_si128(c2[414],simde_mm_xor_si128(c2[1210],simde_mm_xor_si128(c2[10],simde_mm_xor_si128(c2[723],simde_mm_xor_si128(c2[623],simde_mm_xor_si128(c2[524],simde_mm_xor_si128(c2[733],simde_mm_xor_si128(c2[633],simde_mm_xor_si128(c2[231],simde_mm_xor_si128(c2[934],simde_mm_xor_si128(c2[643],simde_mm_xor_si128(c2[1543],simde_mm_xor_si128(c2[1244],simde_mm_xor_si128(c2[653],simde_mm_xor_si128(c2[852],simde_mm_xor_si128(c2[250],simde_mm_xor_si128(c2[763],simde_mm_xor_si128(c2[663],simde_mm_xor_si128(c2[1463],simde_mm_xor_si128(c2[264],simde_mm_xor_si128(c2[673],simde_mm_xor_si128(c2[1571],simde_mm_xor_si128(c2[683],simde_mm_xor_si128(c2[280],simde_mm_xor_si128(c2[1181],simde_mm_xor_si128(c2[793],simde_mm_xor_si128(c2[693],simde_mm_xor_si128(c2[1391],c2[491]))))))))))))))))))))))))))))))))));
   }
 }
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc88_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc88_byte.c
index 8dfd2d92c65c32c93f21ccd68797998cbffb4b0d..4e011b2649a8ab312356a2fecfe41fa2590f4104 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc88_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc88_byte.c
@@ -1,9 +1,8 @@
 #include "PHY/sse_intrin.h"
 // generated code for Zc=88, byte encoding
 static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG2_Zc88_byte(uint8_t *c,uint8_t *d) {
-  __m64 *csimd=(__m64 *)c,*dsimd=(__m64 *)d;
-
-  __m64 *c2,*d2;
+  simde__m64 *csimd=(simde__m64 *)c,*dsimd=(simde__m64 *)d;
+  simde__m64 *c2,*d2;
 
   int i2;
   for (i2=0; i2<11; i2++) {
@@ -11,129 +10,129 @@ static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG
      d2=&dsimd[i2];
 
 //row: 0
-     d2[0]=_mm_xor_si64(c2[668],_mm_xor_si64(c2[2],_mm_xor_si64(c2[221],_mm_xor_si64(c2[1348],_mm_xor_si64(c2[1571],_mm_xor_si64(c2[908],_mm_xor_si64(c2[1145],_mm_xor_si64(c2[1154],_mm_xor_si64(c2[506],_mm_xor_si64(c2[74],_mm_xor_si64(c2[508],_mm_xor_si64(c2[1415],_mm_xor_si64(c2[318],_mm_xor_si64(c2[1198],_mm_xor_si64(c2[110],_mm_xor_si64(c2[1434],_mm_xor_si64(c2[1676],_mm_xor_si64(c2[1458],_mm_xor_si64(c2[134],_mm_xor_si64(c2[1038],_mm_xor_si64(c2[154],_mm_xor_si64(c2[626],_mm_xor_si64(c2[1282],_mm_xor_si64(c2[178],_mm_xor_si64(c2[642],_mm_xor_si64(c2[1083],c2[200]))))))))))))))))))))))))));
+     d2[0]=simde_mm_xor_si64(c2[668],simde_mm_xor_si64(c2[2],simde_mm_xor_si64(c2[221],simde_mm_xor_si64(c2[1348],simde_mm_xor_si64(c2[1571],simde_mm_xor_si64(c2[908],simde_mm_xor_si64(c2[1145],simde_mm_xor_si64(c2[1154],simde_mm_xor_si64(c2[506],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[508],simde_mm_xor_si64(c2[1415],simde_mm_xor_si64(c2[318],simde_mm_xor_si64(c2[1198],simde_mm_xor_si64(c2[110],simde_mm_xor_si64(c2[1434],simde_mm_xor_si64(c2[1676],simde_mm_xor_si64(c2[1458],simde_mm_xor_si64(c2[134],simde_mm_xor_si64(c2[1038],simde_mm_xor_si64(c2[154],simde_mm_xor_si64(c2[626],simde_mm_xor_si64(c2[1282],simde_mm_xor_si64(c2[178],simde_mm_xor_si64(c2[642],simde_mm_xor_si64(c2[1083],c2[200]))))))))))))))))))))))))));
 
 //row: 1
-     d2[11]=_mm_xor_si64(c2[888],_mm_xor_si64(c2[668],_mm_xor_si64(c2[2],_mm_xor_si64(c2[221],_mm_xor_si64(c2[1568],_mm_xor_si64(c2[1348],_mm_xor_si64(c2[1571],_mm_xor_si64(c2[908],_mm_xor_si64(c2[1365],_mm_xor_si64(c2[1145],_mm_xor_si64(c2[1154],_mm_xor_si64(c2[726],_mm_xor_si64(c2[506],_mm_xor_si64(c2[74],_mm_xor_si64(c2[508],_mm_xor_si64(c2[1415],_mm_xor_si64(c2[318],_mm_xor_si64(c2[1198],_mm_xor_si64(c2[110],_mm_xor_si64(c2[1434],_mm_xor_si64(c2[137],_mm_xor_si64(c2[1676],_mm_xor_si64(c2[1458],_mm_xor_si64(c2[134],_mm_xor_si64(c2[1038],_mm_xor_si64(c2[154],_mm_xor_si64(c2[626],_mm_xor_si64(c2[1282],_mm_xor_si64(c2[178],_mm_xor_si64(c2[862],_mm_xor_si64(c2[642],_mm_xor_si64(c2[1083],c2[200]))))))))))))))))))))))))))))))));
+     d2[11]=simde_mm_xor_si64(c2[888],simde_mm_xor_si64(c2[668],simde_mm_xor_si64(c2[2],simde_mm_xor_si64(c2[221],simde_mm_xor_si64(c2[1568],simde_mm_xor_si64(c2[1348],simde_mm_xor_si64(c2[1571],simde_mm_xor_si64(c2[908],simde_mm_xor_si64(c2[1365],simde_mm_xor_si64(c2[1145],simde_mm_xor_si64(c2[1154],simde_mm_xor_si64(c2[726],simde_mm_xor_si64(c2[506],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[508],simde_mm_xor_si64(c2[1415],simde_mm_xor_si64(c2[318],simde_mm_xor_si64(c2[1198],simde_mm_xor_si64(c2[110],simde_mm_xor_si64(c2[1434],simde_mm_xor_si64(c2[137],simde_mm_xor_si64(c2[1676],simde_mm_xor_si64(c2[1458],simde_mm_xor_si64(c2[134],simde_mm_xor_si64(c2[1038],simde_mm_xor_si64(c2[154],simde_mm_xor_si64(c2[626],simde_mm_xor_si64(c2[1282],simde_mm_xor_si64(c2[178],simde_mm_xor_si64(c2[862],simde_mm_xor_si64(c2[642],simde_mm_xor_si64(c2[1083],c2[200]))))))))))))))))))))))))))))))));
 
 //row: 2
-     d2[22]=_mm_xor_si64(c2[888],_mm_xor_si64(c2[668],_mm_xor_si64(c2[222],_mm_xor_si64(c2[2],_mm_xor_si64(c2[221],_mm_xor_si64(c2[1568],_mm_xor_si64(c2[1348],_mm_xor_si64(c2[1571],_mm_xor_si64(c2[908],_mm_xor_si64(c2[1365],_mm_xor_si64(c2[1145],_mm_xor_si64(c2[1154],_mm_xor_si64(c2[726],_mm_xor_si64(c2[506],_mm_xor_si64(c2[294],_mm_xor_si64(c2[74],_mm_xor_si64(c2[508],_mm_xor_si64(c2[1635],_mm_xor_si64(c2[1415],_mm_xor_si64(c2[318],_mm_xor_si64(c2[1198],_mm_xor_si64(c2[330],_mm_xor_si64(c2[110],_mm_xor_si64(c2[1434],_mm_xor_si64(c2[137],_mm_xor_si64(c2[1676],_mm_xor_si64(c2[1678],_mm_xor_si64(c2[1458],_mm_xor_si64(c2[134],_mm_xor_si64(c2[1258],_mm_xor_si64(c2[1038],_mm_xor_si64(c2[154],_mm_xor_si64(c2[846],_mm_xor_si64(c2[626],_mm_xor_si64(c2[1282],_mm_xor_si64(c2[178],_mm_xor_si64(c2[862],_mm_xor_si64(c2[642],_mm_xor_si64(c2[1303],_mm_xor_si64(c2[1083],c2[200]))))))))))))))))))))))))))))))))))))))));
+     d2[22]=simde_mm_xor_si64(c2[888],simde_mm_xor_si64(c2[668],simde_mm_xor_si64(c2[222],simde_mm_xor_si64(c2[2],simde_mm_xor_si64(c2[221],simde_mm_xor_si64(c2[1568],simde_mm_xor_si64(c2[1348],simde_mm_xor_si64(c2[1571],simde_mm_xor_si64(c2[908],simde_mm_xor_si64(c2[1365],simde_mm_xor_si64(c2[1145],simde_mm_xor_si64(c2[1154],simde_mm_xor_si64(c2[726],simde_mm_xor_si64(c2[506],simde_mm_xor_si64(c2[294],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[508],simde_mm_xor_si64(c2[1635],simde_mm_xor_si64(c2[1415],simde_mm_xor_si64(c2[318],simde_mm_xor_si64(c2[1198],simde_mm_xor_si64(c2[330],simde_mm_xor_si64(c2[110],simde_mm_xor_si64(c2[1434],simde_mm_xor_si64(c2[137],simde_mm_xor_si64(c2[1676],simde_mm_xor_si64(c2[1678],simde_mm_xor_si64(c2[1458],simde_mm_xor_si64(c2[134],simde_mm_xor_si64(c2[1258],simde_mm_xor_si64(c2[1038],simde_mm_xor_si64(c2[154],simde_mm_xor_si64(c2[846],simde_mm_xor_si64(c2[626],simde_mm_xor_si64(c2[1282],simde_mm_xor_si64(c2[178],simde_mm_xor_si64(c2[862],simde_mm_xor_si64(c2[642],simde_mm_xor_si64(c2[1303],simde_mm_xor_si64(c2[1083],c2[200]))))))))))))))))))))))))))))))))))))))));
 
 //row: 3
-     d2[33]=_mm_xor_si64(c2[668],_mm_xor_si64(c2[2],_mm_xor_si64(c2[221],_mm_xor_si64(c2[1348],_mm_xor_si64(c2[1571],_mm_xor_si64(c2[1128],_mm_xor_si64(c2[908],_mm_xor_si64(c2[1145],_mm_xor_si64(c2[1374],_mm_xor_si64(c2[1154],_mm_xor_si64(c2[506],_mm_xor_si64(c2[74],_mm_xor_si64(c2[508],_mm_xor_si64(c2[1415],_mm_xor_si64(c2[318],_mm_xor_si64(c2[1418],_mm_xor_si64(c2[1198],_mm_xor_si64(c2[110],_mm_xor_si64(c2[1654],_mm_xor_si64(c2[1434],_mm_xor_si64(c2[1676],_mm_xor_si64(c2[1458],_mm_xor_si64(c2[354],_mm_xor_si64(c2[134],_mm_xor_si64(c2[1038],_mm_xor_si64(c2[374],_mm_xor_si64(c2[154],_mm_xor_si64(c2[626],_mm_xor_si64(c2[1282],_mm_xor_si64(c2[398],_mm_xor_si64(c2[178],_mm_xor_si64(c2[642],_mm_xor_si64(c2[1083],_mm_xor_si64(c2[420],c2[200]))))))))))))))))))))))))))))))))));
+     d2[33]=simde_mm_xor_si64(c2[668],simde_mm_xor_si64(c2[2],simde_mm_xor_si64(c2[221],simde_mm_xor_si64(c2[1348],simde_mm_xor_si64(c2[1571],simde_mm_xor_si64(c2[1128],simde_mm_xor_si64(c2[908],simde_mm_xor_si64(c2[1145],simde_mm_xor_si64(c2[1374],simde_mm_xor_si64(c2[1154],simde_mm_xor_si64(c2[506],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[508],simde_mm_xor_si64(c2[1415],simde_mm_xor_si64(c2[318],simde_mm_xor_si64(c2[1418],simde_mm_xor_si64(c2[1198],simde_mm_xor_si64(c2[110],simde_mm_xor_si64(c2[1654],simde_mm_xor_si64(c2[1434],simde_mm_xor_si64(c2[1676],simde_mm_xor_si64(c2[1458],simde_mm_xor_si64(c2[354],simde_mm_xor_si64(c2[134],simde_mm_xor_si64(c2[1038],simde_mm_xor_si64(c2[374],simde_mm_xor_si64(c2[154],simde_mm_xor_si64(c2[626],simde_mm_xor_si64(c2[1282],simde_mm_xor_si64(c2[398],simde_mm_xor_si64(c2[178],simde_mm_xor_si64(c2[642],simde_mm_xor_si64(c2[1083],simde_mm_xor_si64(c2[420],c2[200]))))))))))))))))))))))))))))))))));
 
 //row: 4
-     d2[44]=_mm_xor_si64(c2[1547],_mm_xor_si64(c2[1327],_mm_xor_si64(c2[661],_mm_xor_si64(c2[880],_mm_xor_si64(c2[1330],_mm_xor_si64(c2[468],_mm_xor_si64(c2[248],_mm_xor_si64(c2[471],_mm_xor_si64(c2[1567],_mm_xor_si64(c2[690],_mm_xor_si64(c2[265],_mm_xor_si64(c2[45],_mm_xor_si64(c2[54],_mm_xor_si64(c2[1396],_mm_xor_si64(c2[1176],_mm_xor_si64(c2[733],_mm_xor_si64(c2[1167],_mm_xor_si64(c2[315],_mm_xor_si64(c2[977],_mm_xor_si64(c2[98],_mm_xor_si64(c2[780],_mm_xor_si64(c2[334],_mm_xor_si64(c2[796],_mm_xor_si64(c2[576],_mm_xor_si64(c2[358],_mm_xor_si64(c2[793],_mm_xor_si64(c2[1697],_mm_xor_si64(c2[824],_mm_xor_si64(c2[1285],_mm_xor_si64(c2[182],_mm_xor_si64(c2[837],_mm_xor_si64(c2[1521],_mm_xor_si64(c2[1301],_mm_xor_si64(c2[1742],c2[859]))))))))))))))))))))))))))))))))));
+     d2[44]=simde_mm_xor_si64(c2[1547],simde_mm_xor_si64(c2[1327],simde_mm_xor_si64(c2[661],simde_mm_xor_si64(c2[880],simde_mm_xor_si64(c2[1330],simde_mm_xor_si64(c2[468],simde_mm_xor_si64(c2[248],simde_mm_xor_si64(c2[471],simde_mm_xor_si64(c2[1567],simde_mm_xor_si64(c2[690],simde_mm_xor_si64(c2[265],simde_mm_xor_si64(c2[45],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[1396],simde_mm_xor_si64(c2[1176],simde_mm_xor_si64(c2[733],simde_mm_xor_si64(c2[1167],simde_mm_xor_si64(c2[315],simde_mm_xor_si64(c2[977],simde_mm_xor_si64(c2[98],simde_mm_xor_si64(c2[780],simde_mm_xor_si64(c2[334],simde_mm_xor_si64(c2[796],simde_mm_xor_si64(c2[576],simde_mm_xor_si64(c2[358],simde_mm_xor_si64(c2[793],simde_mm_xor_si64(c2[1697],simde_mm_xor_si64(c2[824],simde_mm_xor_si64(c2[1285],simde_mm_xor_si64(c2[182],simde_mm_xor_si64(c2[837],simde_mm_xor_si64(c2[1521],simde_mm_xor_si64(c2[1301],simde_mm_xor_si64(c2[1742],c2[859]))))))))))))))))))))))))))))))))));
 
 //row: 5
-     d2[55]=_mm_xor_si64(c2[5],_mm_xor_si64(c2[1544],_mm_xor_si64(c2[889],_mm_xor_si64(c2[1108],_mm_xor_si64(c2[1549],_mm_xor_si64(c2[685],_mm_xor_si64(c2[465],_mm_xor_si64(c2[688],_mm_xor_si64(c2[25],_mm_xor_si64(c2[912],_mm_xor_si64(c2[493],_mm_xor_si64(c2[273],_mm_xor_si64(c2[271],_mm_xor_si64(c2[1613],_mm_xor_si64(c2[1393],_mm_xor_si64(c2[950],_mm_xor_si64(c2[1395],_mm_xor_si64(c2[532],_mm_xor_si64(c2[1194],_mm_xor_si64(c2[315],_mm_xor_si64(c2[997],_mm_xor_si64(c2[551],_mm_xor_si64(c2[774],_mm_xor_si64(c2[1013],_mm_xor_si64(c2[793],_mm_xor_si64(c2[575],_mm_xor_si64(c2[1021],_mm_xor_si64(c2[155],_mm_xor_si64(c2[1041],_mm_xor_si64(c2[1695],_mm_xor_si64(c2[1502],_mm_xor_si64(c2[399],_mm_xor_si64(c2[1065],_mm_xor_si64(c2[1738],_mm_xor_si64(c2[1518],_mm_xor_si64(c2[200],c2[1087]))))))))))))))))))))))))))))))))))));
+     d2[55]=simde_mm_xor_si64(c2[5],simde_mm_xor_si64(c2[1544],simde_mm_xor_si64(c2[889],simde_mm_xor_si64(c2[1108],simde_mm_xor_si64(c2[1549],simde_mm_xor_si64(c2[685],simde_mm_xor_si64(c2[465],simde_mm_xor_si64(c2[688],simde_mm_xor_si64(c2[25],simde_mm_xor_si64(c2[912],simde_mm_xor_si64(c2[493],simde_mm_xor_si64(c2[273],simde_mm_xor_si64(c2[271],simde_mm_xor_si64(c2[1613],simde_mm_xor_si64(c2[1393],simde_mm_xor_si64(c2[950],simde_mm_xor_si64(c2[1395],simde_mm_xor_si64(c2[532],simde_mm_xor_si64(c2[1194],simde_mm_xor_si64(c2[315],simde_mm_xor_si64(c2[997],simde_mm_xor_si64(c2[551],simde_mm_xor_si64(c2[774],simde_mm_xor_si64(c2[1013],simde_mm_xor_si64(c2[793],simde_mm_xor_si64(c2[575],simde_mm_xor_si64(c2[1021],simde_mm_xor_si64(c2[155],simde_mm_xor_si64(c2[1041],simde_mm_xor_si64(c2[1695],simde_mm_xor_si64(c2[1502],simde_mm_xor_si64(c2[399],simde_mm_xor_si64(c2[1065],simde_mm_xor_si64(c2[1738],simde_mm_xor_si64(c2[1518],simde_mm_xor_si64(c2[200],c2[1087]))))))))))))))))))))))))))))))))))));
 
 //row: 6
-     d2[66]=_mm_xor_si64(c2[221],_mm_xor_si64(c2[1],_mm_xor_si64(c2[1105],_mm_xor_si64(c2[1324],_mm_xor_si64(c2[1545],_mm_xor_si64(c2[912],_mm_xor_si64(c2[692],_mm_xor_si64(c2[904],_mm_xor_si64(c2[252],_mm_xor_si64(c2[709],_mm_xor_si64(c2[489],_mm_xor_si64(c2[487],_mm_xor_si64(c2[70],_mm_xor_si64(c2[1609],_mm_xor_si64(c2[1166],_mm_xor_si64(c2[1611],_mm_xor_si64(c2[748],_mm_xor_si64(c2[1410],_mm_xor_si64(c2[531],_mm_xor_si64(c2[1213],_mm_xor_si64(c2[778],_mm_xor_si64(c2[558],_mm_xor_si64(c2[1240],_mm_xor_si64(c2[1020],_mm_xor_si64(c2[802],_mm_xor_si64(c2[1237],_mm_xor_si64(c2[382],_mm_xor_si64(c2[1257],_mm_xor_si64(c2[595],_mm_xor_si64(c2[1718],_mm_xor_si64(c2[626],_mm_xor_si64(c2[1281],_mm_xor_si64(c2[206],_mm_xor_si64(c2[1745],_mm_xor_si64(c2[427],_mm_xor_si64(c2[1303],c2[866]))))))))))))))))))))))))))))))))))));
+     d2[66]=simde_mm_xor_si64(c2[221],simde_mm_xor_si64(c2[1],simde_mm_xor_si64(c2[1105],simde_mm_xor_si64(c2[1324],simde_mm_xor_si64(c2[1545],simde_mm_xor_si64(c2[912],simde_mm_xor_si64(c2[692],simde_mm_xor_si64(c2[904],simde_mm_xor_si64(c2[252],simde_mm_xor_si64(c2[709],simde_mm_xor_si64(c2[489],simde_mm_xor_si64(c2[487],simde_mm_xor_si64(c2[70],simde_mm_xor_si64(c2[1609],simde_mm_xor_si64(c2[1166],simde_mm_xor_si64(c2[1611],simde_mm_xor_si64(c2[748],simde_mm_xor_si64(c2[1410],simde_mm_xor_si64(c2[531],simde_mm_xor_si64(c2[1213],simde_mm_xor_si64(c2[778],simde_mm_xor_si64(c2[558],simde_mm_xor_si64(c2[1240],simde_mm_xor_si64(c2[1020],simde_mm_xor_si64(c2[802],simde_mm_xor_si64(c2[1237],simde_mm_xor_si64(c2[382],simde_mm_xor_si64(c2[1257],simde_mm_xor_si64(c2[595],simde_mm_xor_si64(c2[1718],simde_mm_xor_si64(c2[626],simde_mm_xor_si64(c2[1281],simde_mm_xor_si64(c2[206],simde_mm_xor_si64(c2[1745],simde_mm_xor_si64(c2[427],simde_mm_xor_si64(c2[1303],c2[866]))))))))))))))))))))))))))))))))))));
 
 //row: 7
-     d2[77]=_mm_xor_si64(c2[663],_mm_xor_si64(c2[443],_mm_xor_si64(c2[444],_mm_xor_si64(c2[1547],_mm_xor_si64(c2[1548],_mm_xor_si64(c2[7],_mm_xor_si64(c2[8],_mm_xor_si64(c2[1343],_mm_xor_si64(c2[1123],_mm_xor_si64(c2[1124],_mm_xor_si64(c2[1346],_mm_xor_si64(c2[1347],_mm_xor_si64(c2[683],_mm_xor_si64(c2[904],_mm_xor_si64(c2[684],_mm_xor_si64(c2[28],_mm_xor_si64(c2[1151],_mm_xor_si64(c2[931],_mm_xor_si64(c2[932],_mm_xor_si64(c2[929],_mm_xor_si64(c2[1150],_mm_xor_si64(c2[930],_mm_xor_si64(c2[512],_mm_xor_si64(c2[292],_mm_xor_si64(c2[293],_mm_xor_si64(c2[1608],_mm_xor_si64(c2[1609],_mm_xor_si64(c2[294],_mm_xor_si64(c2[295],_mm_xor_si64(c2[1190],_mm_xor_si64(c2[1191],_mm_xor_si64(c2[93],_mm_xor_si64(c2[94],_mm_xor_si64(c2[973],_mm_xor_si64(c2[1194],_mm_xor_si64(c2[974],_mm_xor_si64(c2[1655],_mm_xor_si64(c2[1656],_mm_xor_si64(c2[1220],_mm_xor_si64(c2[1430],_mm_xor_si64(c2[1210],_mm_xor_si64(c2[1214],_mm_xor_si64(c2[1682],_mm_xor_si64(c2[1462],_mm_xor_si64(c2[1452],_mm_xor_si64(c2[1233],_mm_xor_si64(c2[1234],_mm_xor_si64(c2[1679],_mm_xor_si64(c2[141],_mm_xor_si64(c2[1680],_mm_xor_si64(c2[824],_mm_xor_si64(c2[814],_mm_xor_si64(c2[1699],_mm_xor_si64(c2[161],_mm_xor_si64(c2[1700],_mm_xor_si64(c2[157],_mm_xor_si64(c2[401],_mm_xor_si64(c2[402],_mm_xor_si64(c2[1057],_mm_xor_si64(c2[1058],_mm_xor_si64(c2[1723],_mm_xor_si64(c2[185],_mm_xor_si64(c2[1724],_mm_xor_si64(c2[648],_mm_xor_si64(c2[428],_mm_xor_si64(c2[418],_mm_xor_si64(c2[858],_mm_xor_si64(c2[859],_mm_xor_si64(c2[1745],_mm_xor_si64(c2[207],c2[1746]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[77]=simde_mm_xor_si64(c2[663],simde_mm_xor_si64(c2[443],simde_mm_xor_si64(c2[444],simde_mm_xor_si64(c2[1547],simde_mm_xor_si64(c2[1548],simde_mm_xor_si64(c2[7],simde_mm_xor_si64(c2[8],simde_mm_xor_si64(c2[1343],simde_mm_xor_si64(c2[1123],simde_mm_xor_si64(c2[1124],simde_mm_xor_si64(c2[1346],simde_mm_xor_si64(c2[1347],simde_mm_xor_si64(c2[683],simde_mm_xor_si64(c2[904],simde_mm_xor_si64(c2[684],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[1151],simde_mm_xor_si64(c2[931],simde_mm_xor_si64(c2[932],simde_mm_xor_si64(c2[929],simde_mm_xor_si64(c2[1150],simde_mm_xor_si64(c2[930],simde_mm_xor_si64(c2[512],simde_mm_xor_si64(c2[292],simde_mm_xor_si64(c2[293],simde_mm_xor_si64(c2[1608],simde_mm_xor_si64(c2[1609],simde_mm_xor_si64(c2[294],simde_mm_xor_si64(c2[295],simde_mm_xor_si64(c2[1190],simde_mm_xor_si64(c2[1191],simde_mm_xor_si64(c2[93],simde_mm_xor_si64(c2[94],simde_mm_xor_si64(c2[973],simde_mm_xor_si64(c2[1194],simde_mm_xor_si64(c2[974],simde_mm_xor_si64(c2[1655],simde_mm_xor_si64(c2[1656],simde_mm_xor_si64(c2[1220],simde_mm_xor_si64(c2[1430],simde_mm_xor_si64(c2[1210],simde_mm_xor_si64(c2[1214],simde_mm_xor_si64(c2[1682],simde_mm_xor_si64(c2[1462],simde_mm_xor_si64(c2[1452],simde_mm_xor_si64(c2[1233],simde_mm_xor_si64(c2[1234],simde_mm_xor_si64(c2[1679],simde_mm_xor_si64(c2[141],simde_mm_xor_si64(c2[1680],simde_mm_xor_si64(c2[824],simde_mm_xor_si64(c2[814],simde_mm_xor_si64(c2[1699],simde_mm_xor_si64(c2[161],simde_mm_xor_si64(c2[1700],simde_mm_xor_si64(c2[157],simde_mm_xor_si64(c2[401],simde_mm_xor_si64(c2[402],simde_mm_xor_si64(c2[1057],simde_mm_xor_si64(c2[1058],simde_mm_xor_si64(c2[1723],simde_mm_xor_si64(c2[185],simde_mm_xor_si64(c2[1724],simde_mm_xor_si64(c2[648],simde_mm_xor_si64(c2[428],simde_mm_xor_si64(c2[418],simde_mm_xor_si64(c2[858],simde_mm_xor_si64(c2[859],simde_mm_xor_si64(c2[1745],simde_mm_xor_si64(c2[207],c2[1746]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 8
-     d2[88]=_mm_xor_si64(c2[1106],_mm_xor_si64(c2[886],_mm_xor_si64(c2[440],_mm_xor_si64(c2[220],_mm_xor_si64(c2[450],_mm_xor_si64(c2[1106],_mm_xor_si64(c2[27],_mm_xor_si64(c2[1566],_mm_xor_si64(c2[30],_mm_xor_si64(c2[1126],_mm_xor_si64(c2[1565],_mm_xor_si64(c2[1594],_mm_xor_si64(c2[1374],_mm_xor_si64(c2[1372],_mm_xor_si64(c2[955],_mm_xor_si64(c2[735],_mm_xor_si64(c2[512],_mm_xor_si64(c2[292],_mm_xor_si64(c2[726],_mm_xor_si64(c2[94],_mm_xor_si64(c2[1633],_mm_xor_si64(c2[536],_mm_xor_si64(c2[1416],_mm_xor_si64(c2[559],_mm_xor_si64(c2[339],_mm_xor_si64(c2[1652],_mm_xor_si64(c2[355],_mm_xor_si64(c2[135],_mm_xor_si64(c2[137],_mm_xor_si64(c2[1676],_mm_xor_si64(c2[352],_mm_xor_si64(c2[1476],_mm_xor_si64(c2[1256],_mm_xor_si64(c2[383],_mm_xor_si64(c2[1064],_mm_xor_si64(c2[844],_mm_xor_si64(c2[1500],_mm_xor_si64(c2[396],_mm_xor_si64(c2[1080],_mm_xor_si64(c2[860],_mm_xor_si64(c2[1521],_mm_xor_si64(c2[1301],c2[418]))))))))))))))))))))))))))))))))))))))))));
+     d2[88]=simde_mm_xor_si64(c2[1106],simde_mm_xor_si64(c2[886],simde_mm_xor_si64(c2[440],simde_mm_xor_si64(c2[220],simde_mm_xor_si64(c2[450],simde_mm_xor_si64(c2[1106],simde_mm_xor_si64(c2[27],simde_mm_xor_si64(c2[1566],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[1126],simde_mm_xor_si64(c2[1565],simde_mm_xor_si64(c2[1594],simde_mm_xor_si64(c2[1374],simde_mm_xor_si64(c2[1372],simde_mm_xor_si64(c2[955],simde_mm_xor_si64(c2[735],simde_mm_xor_si64(c2[512],simde_mm_xor_si64(c2[292],simde_mm_xor_si64(c2[726],simde_mm_xor_si64(c2[94],simde_mm_xor_si64(c2[1633],simde_mm_xor_si64(c2[536],simde_mm_xor_si64(c2[1416],simde_mm_xor_si64(c2[559],simde_mm_xor_si64(c2[339],simde_mm_xor_si64(c2[1652],simde_mm_xor_si64(c2[355],simde_mm_xor_si64(c2[135],simde_mm_xor_si64(c2[137],simde_mm_xor_si64(c2[1676],simde_mm_xor_si64(c2[352],simde_mm_xor_si64(c2[1476],simde_mm_xor_si64(c2[1256],simde_mm_xor_si64(c2[383],simde_mm_xor_si64(c2[1064],simde_mm_xor_si64(c2[844],simde_mm_xor_si64(c2[1500],simde_mm_xor_si64(c2[396],simde_mm_xor_si64(c2[1080],simde_mm_xor_si64(c2[860],simde_mm_xor_si64(c2[1521],simde_mm_xor_si64(c2[1301],c2[418]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 9
-     d2[99]=_mm_xor_si64(c2[1329],_mm_xor_si64(c2[885],_mm_xor_si64(c2[665],_mm_xor_si64(c2[663],_mm_xor_si64(c2[10],_mm_xor_si64(c2[882],_mm_xor_si64(c2[229],_mm_xor_si64(c2[250],_mm_xor_si64(c2[1565],_mm_xor_si64(c2[1345],_mm_xor_si64(c2[462],_mm_xor_si64(c2[1568],_mm_xor_si64(c2[1569],_mm_xor_si64(c2[905],_mm_xor_si64(c2[24],_mm_xor_si64(c2[47],_mm_xor_si64(c2[1373],_mm_xor_si64(c2[1153],_mm_xor_si64(c2[45],_mm_xor_si64(c2[1151],_mm_xor_si64(c2[1167],_mm_xor_si64(c2[734],_mm_xor_si64(c2[514],_mm_xor_si64(c2[735],_mm_xor_si64(c2[71],_mm_xor_si64(c2[1169],_mm_xor_si64(c2[516],_mm_xor_si64(c2[317],_mm_xor_si64(c2[1412],_mm_xor_si64(c2[968],_mm_xor_si64(c2[315],_mm_xor_si64(c2[89],_mm_xor_si64(c2[1195],_mm_xor_si64(c2[771],_mm_xor_si64(c2[118],_mm_xor_si64(c2[336],_mm_xor_si64(c2[1431],_mm_xor_si64(c2[578],_mm_xor_si64(c2[134],_mm_xor_si64(c2[1673],_mm_xor_si64(c2[360],_mm_xor_si64(c2[1455],_mm_xor_si64(c2[795],_mm_xor_si64(c2[142],_mm_xor_si64(c2[1699],_mm_xor_si64(c2[1035],_mm_xor_si64(c2[815],_mm_xor_si64(c2[162],_mm_xor_si64(c2[1276],_mm_xor_si64(c2[623],_mm_xor_si64(c2[184],_mm_xor_si64(c2[1279],_mm_xor_si64(c2[839],_mm_xor_si64(c2[186],_mm_xor_si64(c2[1502],_mm_xor_si64(c2[1303],_mm_xor_si64(c2[859],_mm_xor_si64(c2[639],_mm_xor_si64(c2[1744],_mm_xor_si64(c2[1080],_mm_xor_si64(c2[861],c2[208])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[99]=simde_mm_xor_si64(c2[1329],simde_mm_xor_si64(c2[885],simde_mm_xor_si64(c2[665],simde_mm_xor_si64(c2[663],simde_mm_xor_si64(c2[10],simde_mm_xor_si64(c2[882],simde_mm_xor_si64(c2[229],simde_mm_xor_si64(c2[250],simde_mm_xor_si64(c2[1565],simde_mm_xor_si64(c2[1345],simde_mm_xor_si64(c2[462],simde_mm_xor_si64(c2[1568],simde_mm_xor_si64(c2[1569],simde_mm_xor_si64(c2[905],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[47],simde_mm_xor_si64(c2[1373],simde_mm_xor_si64(c2[1153],simde_mm_xor_si64(c2[45],simde_mm_xor_si64(c2[1151],simde_mm_xor_si64(c2[1167],simde_mm_xor_si64(c2[734],simde_mm_xor_si64(c2[514],simde_mm_xor_si64(c2[735],simde_mm_xor_si64(c2[71],simde_mm_xor_si64(c2[1169],simde_mm_xor_si64(c2[516],simde_mm_xor_si64(c2[317],simde_mm_xor_si64(c2[1412],simde_mm_xor_si64(c2[968],simde_mm_xor_si64(c2[315],simde_mm_xor_si64(c2[89],simde_mm_xor_si64(c2[1195],simde_mm_xor_si64(c2[771],simde_mm_xor_si64(c2[118],simde_mm_xor_si64(c2[336],simde_mm_xor_si64(c2[1431],simde_mm_xor_si64(c2[578],simde_mm_xor_si64(c2[134],simde_mm_xor_si64(c2[1673],simde_mm_xor_si64(c2[360],simde_mm_xor_si64(c2[1455],simde_mm_xor_si64(c2[795],simde_mm_xor_si64(c2[142],simde_mm_xor_si64(c2[1699],simde_mm_xor_si64(c2[1035],simde_mm_xor_si64(c2[815],simde_mm_xor_si64(c2[162],simde_mm_xor_si64(c2[1276],simde_mm_xor_si64(c2[623],simde_mm_xor_si64(c2[184],simde_mm_xor_si64(c2[1279],simde_mm_xor_si64(c2[839],simde_mm_xor_si64(c2[186],simde_mm_xor_si64(c2[1502],simde_mm_xor_si64(c2[1303],simde_mm_xor_si64(c2[859],simde_mm_xor_si64(c2[639],simde_mm_xor_si64(c2[1744],simde_mm_xor_si64(c2[1080],simde_mm_xor_si64(c2[861],c2[208])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 10
-     d2[110]=_mm_xor_si64(c2[1542],_mm_xor_si64(c2[245],_mm_xor_si64(c2[1462],c2[1696])));
+     d2[110]=simde_mm_xor_si64(c2[1542],simde_mm_xor_si64(c2[245],simde_mm_xor_si64(c2[1462],c2[1696])));
 
 //row: 11
-     d2[121]=_mm_xor_si64(c2[4],_mm_xor_si64(c2[1108],_mm_xor_si64(c2[1327],_mm_xor_si64(c2[660],_mm_xor_si64(c2[684],_mm_xor_si64(c2[907],_mm_xor_si64(c2[464],_mm_xor_si64(c2[244],_mm_xor_si64(c2[492],_mm_xor_si64(c2[710],_mm_xor_si64(c2[490],_mm_xor_si64(c2[1612],_mm_xor_si64(c2[1169],_mm_xor_si64(c2[1614],_mm_xor_si64(c2[751],_mm_xor_si64(c2[1413],_mm_xor_si64(c2[754],_mm_xor_si64(c2[534],_mm_xor_si64(c2[1216],_mm_xor_si64(c2[990],_mm_xor_si64(c2[770],_mm_xor_si64(c2[1012],_mm_xor_si64(c2[794],_mm_xor_si64(c2[1460],_mm_xor_si64(c2[1240],_mm_xor_si64(c2[374],_mm_xor_si64(c2[1480],_mm_xor_si64(c2[1260],_mm_xor_si64(c2[1704],_mm_xor_si64(c2[1721],_mm_xor_si64(c2[618],_mm_xor_si64(c2[1504],_mm_xor_si64(c2[1284],_mm_xor_si64(c2[1748],_mm_xor_si64(c2[419],_mm_xor_si64(c2[1526],_mm_xor_si64(c2[1306],c2[201])))))))))))))))))))))))))))))))))))));
+     d2[121]=simde_mm_xor_si64(c2[4],simde_mm_xor_si64(c2[1108],simde_mm_xor_si64(c2[1327],simde_mm_xor_si64(c2[660],simde_mm_xor_si64(c2[684],simde_mm_xor_si64(c2[907],simde_mm_xor_si64(c2[464],simde_mm_xor_si64(c2[244],simde_mm_xor_si64(c2[492],simde_mm_xor_si64(c2[710],simde_mm_xor_si64(c2[490],simde_mm_xor_si64(c2[1612],simde_mm_xor_si64(c2[1169],simde_mm_xor_si64(c2[1614],simde_mm_xor_si64(c2[751],simde_mm_xor_si64(c2[1413],simde_mm_xor_si64(c2[754],simde_mm_xor_si64(c2[534],simde_mm_xor_si64(c2[1216],simde_mm_xor_si64(c2[990],simde_mm_xor_si64(c2[770],simde_mm_xor_si64(c2[1012],simde_mm_xor_si64(c2[794],simde_mm_xor_si64(c2[1460],simde_mm_xor_si64(c2[1240],simde_mm_xor_si64(c2[374],simde_mm_xor_si64(c2[1480],simde_mm_xor_si64(c2[1260],simde_mm_xor_si64(c2[1704],simde_mm_xor_si64(c2[1721],simde_mm_xor_si64(c2[618],simde_mm_xor_si64(c2[1504],simde_mm_xor_si64(c2[1284],simde_mm_xor_si64(c2[1748],simde_mm_xor_si64(c2[419],simde_mm_xor_si64(c2[1526],simde_mm_xor_si64(c2[1306],c2[201])))))))))))))))))))))))))))))))))))));
 
 //row: 12
-     d2[132]=_mm_xor_si64(c2[9],_mm_xor_si64(c2[1548],_mm_xor_si64(c2[882],_mm_xor_si64(c2[1101],_mm_xor_si64(c2[689],_mm_xor_si64(c2[469],_mm_xor_si64(c2[692],_mm_xor_si64(c2[29],_mm_xor_si64(c2[466],_mm_xor_si64(c2[486],_mm_xor_si64(c2[266],_mm_xor_si64(c2[264],_mm_xor_si64(c2[1606],_mm_xor_si64(c2[1386],_mm_xor_si64(c2[954],_mm_xor_si64(c2[1388],_mm_xor_si64(c2[727],_mm_xor_si64(c2[536],_mm_xor_si64(c2[1198],_mm_xor_si64(c2[308],_mm_xor_si64(c2[990],_mm_xor_si64(c2[555],_mm_xor_si64(c2[1017],_mm_xor_si64(c2[797],_mm_xor_si64(c2[579],_mm_xor_si64(c2[1014],_mm_xor_si64(c2[159],_mm_xor_si64(c2[1034],_mm_xor_si64(c2[1506],_mm_xor_si64(c2[403],_mm_xor_si64(c2[1058],_mm_xor_si64(c2[1742],_mm_xor_si64(c2[1522],_mm_xor_si64(c2[204],c2[1080]))))))))))))))))))))))))))))))))));
+     d2[132]=simde_mm_xor_si64(c2[9],simde_mm_xor_si64(c2[1548],simde_mm_xor_si64(c2[882],simde_mm_xor_si64(c2[1101],simde_mm_xor_si64(c2[689],simde_mm_xor_si64(c2[469],simde_mm_xor_si64(c2[692],simde_mm_xor_si64(c2[29],simde_mm_xor_si64(c2[466],simde_mm_xor_si64(c2[486],simde_mm_xor_si64(c2[266],simde_mm_xor_si64(c2[264],simde_mm_xor_si64(c2[1606],simde_mm_xor_si64(c2[1386],simde_mm_xor_si64(c2[954],simde_mm_xor_si64(c2[1388],simde_mm_xor_si64(c2[727],simde_mm_xor_si64(c2[536],simde_mm_xor_si64(c2[1198],simde_mm_xor_si64(c2[308],simde_mm_xor_si64(c2[990],simde_mm_xor_si64(c2[555],simde_mm_xor_si64(c2[1017],simde_mm_xor_si64(c2[797],simde_mm_xor_si64(c2[579],simde_mm_xor_si64(c2[1014],simde_mm_xor_si64(c2[159],simde_mm_xor_si64(c2[1034],simde_mm_xor_si64(c2[1506],simde_mm_xor_si64(c2[403],simde_mm_xor_si64(c2[1058],simde_mm_xor_si64(c2[1742],simde_mm_xor_si64(c2[1522],simde_mm_xor_si64(c2[204],c2[1080]))))))))))))))))))))))))))))))))));
 
 //row: 13
-     d2[143]=_mm_xor_si64(c2[442],_mm_xor_si64(c2[1546],_mm_xor_si64(c2[6],_mm_xor_si64(c2[1103],_mm_xor_si64(c2[1122],_mm_xor_si64(c2[1345],_mm_xor_si64(c2[902],_mm_xor_si64(c2[682],_mm_xor_si64(c2[682],_mm_xor_si64(c2[930],_mm_xor_si64(c2[1148],_mm_xor_si64(c2[928],_mm_xor_si64(c2[291],_mm_xor_si64(c2[1607],_mm_xor_si64(c2[293],_mm_xor_si64(c2[1189],_mm_xor_si64(c2[92],_mm_xor_si64(c2[1192],_mm_xor_si64(c2[972],_mm_xor_si64(c2[1654],_mm_xor_si64(c2[1439],_mm_xor_si64(c2[1219],_mm_xor_si64(c2[1461],_mm_xor_si64(c2[1232],_mm_xor_si64(c2[139],_mm_xor_si64(c2[1678],_mm_xor_si64(c2[823],_mm_xor_si64(c2[159],_mm_xor_si64(c2[1698],_mm_xor_si64(c2[400],_mm_xor_si64(c2[1056],_mm_xor_si64(c2[183],_mm_xor_si64(c2[1722],_mm_xor_si64(c2[839],_mm_xor_si64(c2[427],_mm_xor_si64(c2[868],_mm_xor_si64(c2[205],c2[1744])))))))))))))))))))))))))))))))))))));
+     d2[143]=simde_mm_xor_si64(c2[442],simde_mm_xor_si64(c2[1546],simde_mm_xor_si64(c2[6],simde_mm_xor_si64(c2[1103],simde_mm_xor_si64(c2[1122],simde_mm_xor_si64(c2[1345],simde_mm_xor_si64(c2[902],simde_mm_xor_si64(c2[682],simde_mm_xor_si64(c2[682],simde_mm_xor_si64(c2[930],simde_mm_xor_si64(c2[1148],simde_mm_xor_si64(c2[928],simde_mm_xor_si64(c2[291],simde_mm_xor_si64(c2[1607],simde_mm_xor_si64(c2[293],simde_mm_xor_si64(c2[1189],simde_mm_xor_si64(c2[92],simde_mm_xor_si64(c2[1192],simde_mm_xor_si64(c2[972],simde_mm_xor_si64(c2[1654],simde_mm_xor_si64(c2[1439],simde_mm_xor_si64(c2[1219],simde_mm_xor_si64(c2[1461],simde_mm_xor_si64(c2[1232],simde_mm_xor_si64(c2[139],simde_mm_xor_si64(c2[1678],simde_mm_xor_si64(c2[823],simde_mm_xor_si64(c2[159],simde_mm_xor_si64(c2[1698],simde_mm_xor_si64(c2[400],simde_mm_xor_si64(c2[1056],simde_mm_xor_si64(c2[183],simde_mm_xor_si64(c2[1722],simde_mm_xor_si64(c2[839],simde_mm_xor_si64(c2[427],simde_mm_xor_si64(c2[868],simde_mm_xor_si64(c2[205],c2[1744])))))))))))))))))))))))))))))))))))));
 
 //row: 14
-     d2[154]=_mm_xor_si64(c2[889],_mm_xor_si64(c2[669],_mm_xor_si64(c2[227],_mm_xor_si64(c2[3],_mm_xor_si64(c2[1320],_mm_xor_si64(c2[222],_mm_xor_si64(c2[1550],_mm_xor_si64(c2[1569],_mm_xor_si64(c2[1349],_mm_xor_si64(c2[907],_mm_xor_si64(c2[1572],_mm_xor_si64(c2[1130],_mm_xor_si64(c2[909],_mm_xor_si64(c2[687],_mm_xor_si64(c2[467],_mm_xor_si64(c2[683],_mm_xor_si64(c2[1366],_mm_xor_si64(c2[1146],_mm_xor_si64(c2[704],_mm_xor_si64(c2[1144],_mm_xor_si64(c2[933],_mm_xor_si64(c2[713],_mm_xor_si64(c2[727],_mm_xor_si64(c2[507],_mm_xor_si64(c2[76],_mm_xor_si64(c2[75],_mm_xor_si64(c2[1392],_mm_xor_si64(c2[509],_mm_xor_si64(c2[67],_mm_xor_si64(c2[1416],_mm_xor_si64(c2[974],_mm_xor_si64(c2[308],_mm_xor_si64(c2[1636],_mm_xor_si64(c2[1188],_mm_xor_si64(c2[977],_mm_xor_si64(c2[757],_mm_xor_si64(c2[111],_mm_xor_si64(c2[1439],_mm_xor_si64(c2[1435],_mm_xor_si64(c2[1213],_mm_xor_si64(c2[993],_mm_xor_si64(c2[138],_mm_xor_si64(c2[1677],_mm_xor_si64(c2[1235],_mm_xor_si64(c2[1459],_mm_xor_si64(c2[1017],_mm_xor_si64(c2[135],_mm_xor_si64(c2[1672],_mm_xor_si64(c2[1452],_mm_xor_si64(c2[359],_mm_xor_si64(c2[1039],_mm_xor_si64(c2[597],_mm_xor_si64(c2[155],_mm_xor_si64(c2[1703],_mm_xor_si64(c2[1483],_mm_xor_si64(c2[616],_mm_xor_si64(c2[185],_mm_xor_si64(c2[1283],_mm_xor_si64(c2[841],_mm_xor_si64(c2[179],_mm_xor_si64(c2[1716],_mm_xor_si64(c2[1496],_mm_xor_si64(c2[863],_mm_xor_si64(c2[643],_mm_xor_si64(c2[201],_mm_xor_si64(c2[1084],_mm_xor_si64(c2[642],_mm_xor_si64(c2[201],_mm_xor_si64(c2[1738],c2[1518])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[154]=simde_mm_xor_si64(c2[889],simde_mm_xor_si64(c2[669],simde_mm_xor_si64(c2[227],simde_mm_xor_si64(c2[3],simde_mm_xor_si64(c2[1320],simde_mm_xor_si64(c2[222],simde_mm_xor_si64(c2[1550],simde_mm_xor_si64(c2[1569],simde_mm_xor_si64(c2[1349],simde_mm_xor_si64(c2[907],simde_mm_xor_si64(c2[1572],simde_mm_xor_si64(c2[1130],simde_mm_xor_si64(c2[909],simde_mm_xor_si64(c2[687],simde_mm_xor_si64(c2[467],simde_mm_xor_si64(c2[683],simde_mm_xor_si64(c2[1366],simde_mm_xor_si64(c2[1146],simde_mm_xor_si64(c2[704],simde_mm_xor_si64(c2[1144],simde_mm_xor_si64(c2[933],simde_mm_xor_si64(c2[713],simde_mm_xor_si64(c2[727],simde_mm_xor_si64(c2[507],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[75],simde_mm_xor_si64(c2[1392],simde_mm_xor_si64(c2[509],simde_mm_xor_si64(c2[67],simde_mm_xor_si64(c2[1416],simde_mm_xor_si64(c2[974],simde_mm_xor_si64(c2[308],simde_mm_xor_si64(c2[1636],simde_mm_xor_si64(c2[1188],simde_mm_xor_si64(c2[977],simde_mm_xor_si64(c2[757],simde_mm_xor_si64(c2[111],simde_mm_xor_si64(c2[1439],simde_mm_xor_si64(c2[1435],simde_mm_xor_si64(c2[1213],simde_mm_xor_si64(c2[993],simde_mm_xor_si64(c2[138],simde_mm_xor_si64(c2[1677],simde_mm_xor_si64(c2[1235],simde_mm_xor_si64(c2[1459],simde_mm_xor_si64(c2[1017],simde_mm_xor_si64(c2[135],simde_mm_xor_si64(c2[1672],simde_mm_xor_si64(c2[1452],simde_mm_xor_si64(c2[359],simde_mm_xor_si64(c2[1039],simde_mm_xor_si64(c2[597],simde_mm_xor_si64(c2[155],simde_mm_xor_si64(c2[1703],simde_mm_xor_si64(c2[1483],simde_mm_xor_si64(c2[616],simde_mm_xor_si64(c2[185],simde_mm_xor_si64(c2[1283],simde_mm_xor_si64(c2[841],simde_mm_xor_si64(c2[179],simde_mm_xor_si64(c2[1716],simde_mm_xor_si64(c2[1496],simde_mm_xor_si64(c2[863],simde_mm_xor_si64(c2[643],simde_mm_xor_si64(c2[201],simde_mm_xor_si64(c2[1084],simde_mm_xor_si64(c2[642],simde_mm_xor_si64(c2[201],simde_mm_xor_si64(c2[1738],c2[1518])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 15
-     d2[165]=_mm_xor_si64(c2[450],_mm_xor_si64(c2[882],_mm_xor_si64(c2[662],_mm_xor_si64(c2[1543],_mm_xor_si64(c2[7],_mm_xor_si64(c2[3],_mm_xor_si64(c2[226],_mm_xor_si64(c2[226],_mm_xor_si64(c2[1130],_mm_xor_si64(c2[1562],_mm_xor_si64(c2[1342],_mm_xor_si64(c2[1342],_mm_xor_si64(c2[1565],_mm_xor_si64(c2[690],_mm_xor_si64(c2[902],_mm_xor_si64(c2[927],_mm_xor_si64(c2[1370],_mm_xor_si64(c2[1150],_mm_xor_si64(c2[925],_mm_xor_si64(c2[1148],_mm_xor_si64(c2[288],_mm_xor_si64(c2[731],_mm_xor_si64(c2[511],_mm_xor_si64(c2[1615],_mm_xor_si64(c2[68],_mm_xor_si64(c2[290],_mm_xor_si64(c2[513],_mm_xor_si64(c2[1197],_mm_xor_si64(c2[1409],_mm_xor_si64(c2[89],_mm_xor_si64(c2[312],_mm_xor_si64(c2[969],_mm_xor_si64(c2[1192],_mm_xor_si64(c2[1651],_mm_xor_si64(c2[115],_mm_xor_si64(c2[1216],_mm_xor_si64(c2[1439],_mm_xor_si64(c2[1458],_mm_xor_si64(c2[142],_mm_xor_si64(c2[1681],_mm_xor_si64(c2[1240],_mm_xor_si64(c2[1452],_mm_xor_si64(c2[1675],_mm_xor_si64(c2[139],_mm_xor_si64(c2[820],_mm_xor_si64(c2[1043],_mm_xor_si64(c2[1695],_mm_xor_si64(c2[159],_mm_xor_si64(c2[397],_mm_xor_si64(c2[620],_mm_xor_si64(c2[1064],_mm_xor_si64(c2[1276],_mm_xor_si64(c2[1719],_mm_xor_si64(c2[183],_mm_xor_si64(c2[424],_mm_xor_si64(c2[867],_mm_xor_si64(c2[647],_mm_xor_si64(c2[865],_mm_xor_si64(c2[1088],_mm_xor_si64(c2[1741],c2[205]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[165]=simde_mm_xor_si64(c2[450],simde_mm_xor_si64(c2[882],simde_mm_xor_si64(c2[662],simde_mm_xor_si64(c2[1543],simde_mm_xor_si64(c2[7],simde_mm_xor_si64(c2[3],simde_mm_xor_si64(c2[226],simde_mm_xor_si64(c2[226],simde_mm_xor_si64(c2[1130],simde_mm_xor_si64(c2[1562],simde_mm_xor_si64(c2[1342],simde_mm_xor_si64(c2[1342],simde_mm_xor_si64(c2[1565],simde_mm_xor_si64(c2[690],simde_mm_xor_si64(c2[902],simde_mm_xor_si64(c2[927],simde_mm_xor_si64(c2[1370],simde_mm_xor_si64(c2[1150],simde_mm_xor_si64(c2[925],simde_mm_xor_si64(c2[1148],simde_mm_xor_si64(c2[288],simde_mm_xor_si64(c2[731],simde_mm_xor_si64(c2[511],simde_mm_xor_si64(c2[1615],simde_mm_xor_si64(c2[68],simde_mm_xor_si64(c2[290],simde_mm_xor_si64(c2[513],simde_mm_xor_si64(c2[1197],simde_mm_xor_si64(c2[1409],simde_mm_xor_si64(c2[89],simde_mm_xor_si64(c2[312],simde_mm_xor_si64(c2[969],simde_mm_xor_si64(c2[1192],simde_mm_xor_si64(c2[1651],simde_mm_xor_si64(c2[115],simde_mm_xor_si64(c2[1216],simde_mm_xor_si64(c2[1439],simde_mm_xor_si64(c2[1458],simde_mm_xor_si64(c2[142],simde_mm_xor_si64(c2[1681],simde_mm_xor_si64(c2[1240],simde_mm_xor_si64(c2[1452],simde_mm_xor_si64(c2[1675],simde_mm_xor_si64(c2[139],simde_mm_xor_si64(c2[820],simde_mm_xor_si64(c2[1043],simde_mm_xor_si64(c2[1695],simde_mm_xor_si64(c2[159],simde_mm_xor_si64(c2[397],simde_mm_xor_si64(c2[620],simde_mm_xor_si64(c2[1064],simde_mm_xor_si64(c2[1276],simde_mm_xor_si64(c2[1719],simde_mm_xor_si64(c2[183],simde_mm_xor_si64(c2[424],simde_mm_xor_si64(c2[867],simde_mm_xor_si64(c2[647],simde_mm_xor_si64(c2[865],simde_mm_xor_si64(c2[1088],simde_mm_xor_si64(c2[1741],c2[205]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 16
-     d2[176]=_mm_xor_si64(c2[1110],_mm_xor_si64(c2[890],_mm_xor_si64(c2[227],_mm_xor_si64(c2[7],_mm_xor_si64(c2[224],_mm_xor_si64(c2[1320],_mm_xor_si64(c2[1100],_mm_xor_si64(c2[443],_mm_xor_si64(c2[1330],_mm_xor_si64(c2[31],_mm_xor_si64(c2[1570],_mm_xor_si64(c2[907],_mm_xor_si64(c2[687],_mm_xor_si64(c2[23],_mm_xor_si64(c2[910],_mm_xor_si64(c2[1130],_mm_xor_si64(c2[247],_mm_xor_si64(c2[1351],_mm_xor_si64(c2[1587],_mm_xor_si64(c2[1367],_mm_xor_si64(c2[704],_mm_xor_si64(c2[484],_mm_xor_si64(c2[1365],_mm_xor_si64(c2[493],_mm_xor_si64(c2[948],_mm_xor_si64(c2[728],_mm_xor_si64(c2[76],_mm_xor_si64(c2[1615],_mm_xor_si64(c2[296],_mm_xor_si64(c2[1392],_mm_xor_si64(c2[1172],_mm_xor_si64(c2[730],_mm_xor_si64(c2[1606],_mm_xor_si64(c2[1637],_mm_xor_si64(c2[974],_mm_xor_si64(c2[754],_mm_xor_si64(c2[529],_mm_xor_si64(c2[1416],_mm_xor_si64(c2[1409],_mm_xor_si64(c2[537],_mm_xor_si64(c2[332],_mm_xor_si64(c2[1439],_mm_xor_si64(c2[1219],_mm_xor_si64(c2[1656],_mm_xor_si64(c2[773],_mm_xor_si64(c2[359],_mm_xor_si64(c2[139],_mm_xor_si64(c2[1235],_mm_xor_si64(c2[1015],_mm_xor_si64(c2[1680],_mm_xor_si64(c2[1017],_mm_xor_si64(c2[797],_mm_xor_si64(c2[356],_mm_xor_si64(c2[1232],_mm_xor_si64(c2[1260],_mm_xor_si64(c2[597],_mm_xor_si64(c2[377],_mm_xor_si64(c2[376],_mm_xor_si64(c2[1263],_mm_xor_si64(c2[837],_mm_xor_si64(c2[185],_mm_xor_si64(c2[1724],_mm_xor_si64(c2[1504],_mm_xor_si64(c2[621],_mm_xor_si64(c2[400],_mm_xor_si64(c2[1276],_mm_xor_si64(c2[1084],_mm_xor_si64(c2[864],_mm_xor_si64(c2[201],_mm_xor_si64(c2[1740],_mm_xor_si64(c2[1305],_mm_xor_si64(c2[642],_mm_xor_si64(c2[422],_mm_xor_si64(c2[422],_mm_xor_si64(c2[1298],c2[1526])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[176]=simde_mm_xor_si64(c2[1110],simde_mm_xor_si64(c2[890],simde_mm_xor_si64(c2[227],simde_mm_xor_si64(c2[7],simde_mm_xor_si64(c2[224],simde_mm_xor_si64(c2[1320],simde_mm_xor_si64(c2[1100],simde_mm_xor_si64(c2[443],simde_mm_xor_si64(c2[1330],simde_mm_xor_si64(c2[31],simde_mm_xor_si64(c2[1570],simde_mm_xor_si64(c2[907],simde_mm_xor_si64(c2[687],simde_mm_xor_si64(c2[23],simde_mm_xor_si64(c2[910],simde_mm_xor_si64(c2[1130],simde_mm_xor_si64(c2[247],simde_mm_xor_si64(c2[1351],simde_mm_xor_si64(c2[1587],simde_mm_xor_si64(c2[1367],simde_mm_xor_si64(c2[704],simde_mm_xor_si64(c2[484],simde_mm_xor_si64(c2[1365],simde_mm_xor_si64(c2[493],simde_mm_xor_si64(c2[948],simde_mm_xor_si64(c2[728],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[1615],simde_mm_xor_si64(c2[296],simde_mm_xor_si64(c2[1392],simde_mm_xor_si64(c2[1172],simde_mm_xor_si64(c2[730],simde_mm_xor_si64(c2[1606],simde_mm_xor_si64(c2[1637],simde_mm_xor_si64(c2[974],simde_mm_xor_si64(c2[754],simde_mm_xor_si64(c2[529],simde_mm_xor_si64(c2[1416],simde_mm_xor_si64(c2[1409],simde_mm_xor_si64(c2[537],simde_mm_xor_si64(c2[332],simde_mm_xor_si64(c2[1439],simde_mm_xor_si64(c2[1219],simde_mm_xor_si64(c2[1656],simde_mm_xor_si64(c2[773],simde_mm_xor_si64(c2[359],simde_mm_xor_si64(c2[139],simde_mm_xor_si64(c2[1235],simde_mm_xor_si64(c2[1015],simde_mm_xor_si64(c2[1680],simde_mm_xor_si64(c2[1017],simde_mm_xor_si64(c2[797],simde_mm_xor_si64(c2[356],simde_mm_xor_si64(c2[1232],simde_mm_xor_si64(c2[1260],simde_mm_xor_si64(c2[597],simde_mm_xor_si64(c2[377],simde_mm_xor_si64(c2[376],simde_mm_xor_si64(c2[1263],simde_mm_xor_si64(c2[837],simde_mm_xor_si64(c2[185],simde_mm_xor_si64(c2[1724],simde_mm_xor_si64(c2[1504],simde_mm_xor_si64(c2[621],simde_mm_xor_si64(c2[400],simde_mm_xor_si64(c2[1276],simde_mm_xor_si64(c2[1084],simde_mm_xor_si64(c2[864],simde_mm_xor_si64(c2[201],simde_mm_xor_si64(c2[1740],simde_mm_xor_si64(c2[1305],simde_mm_xor_si64(c2[642],simde_mm_xor_si64(c2[422],simde_mm_xor_si64(c2[422],simde_mm_xor_si64(c2[1298],c2[1526])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 17
-     d2[187]=_mm_xor_si64(c2[445],_mm_xor_si64(c2[225],_mm_xor_si64(c2[1546],_mm_xor_si64(c2[1326],_mm_xor_si64(c2[1329],_mm_xor_si64(c2[880],_mm_xor_si64(c2[660],_mm_xor_si64(c2[1548],_mm_xor_si64(c2[890],_mm_xor_si64(c2[1125],_mm_xor_si64(c2[905],_mm_xor_si64(c2[467],_mm_xor_si64(c2[247],_mm_xor_si64(c2[1128],_mm_xor_si64(c2[470],_mm_xor_si64(c2[465],_mm_xor_si64(c2[1566],_mm_xor_si64(c2[1347],_mm_xor_si64(c2[933],_mm_xor_si64(c2[713],_mm_xor_si64(c2[264],_mm_xor_si64(c2[44],_mm_xor_si64(c2[711],_mm_xor_si64(c2[53],_mm_xor_si64(c2[294],_mm_xor_si64(c2[74],_mm_xor_si64(c2[1395],_mm_xor_si64(c2[1175],_mm_xor_si64(c2[1390],_mm_xor_si64(c2[952],_mm_xor_si64(c2[732],_mm_xor_si64(c2[76],_mm_xor_si64(c2[1166],_mm_xor_si64(c2[972],_mm_xor_si64(c2[534],_mm_xor_si64(c2[314],_mm_xor_si64(c2[1634],_mm_xor_si64(c2[976],_mm_xor_si64(c2[755],_mm_xor_si64(c2[97],_mm_xor_si64(c2[1437],_mm_xor_si64(c2[999],_mm_xor_si64(c2[779],_mm_xor_si64(c2[991],_mm_xor_si64(c2[333],_mm_xor_si64(c2[1652],_mm_xor_si64(c2[1453],_mm_xor_si64(c2[1233],_mm_xor_si64(c2[795],_mm_xor_si64(c2[575],_mm_xor_si64(c2[1015],_mm_xor_si64(c2[577],_mm_xor_si64(c2[357],_mm_xor_si64(c2[1461],_mm_xor_si64(c2[792],_mm_xor_si64(c2[595],_mm_xor_si64(c2[157],_mm_xor_si64(c2[1696],_mm_xor_si64(c2[1481],_mm_xor_si64(c2[823],_mm_xor_si64(c2[183],_mm_xor_si64(c2[1504],_mm_xor_si64(c2[1284],_mm_xor_si64(c2[839],_mm_xor_si64(c2[181],_mm_xor_si64(c2[1505],_mm_xor_si64(c2[836],_mm_xor_si64(c2[419],_mm_xor_si64(c2[199],_mm_xor_si64(c2[1520],_mm_xor_si64(c2[1300],_mm_xor_si64(c2[640],_mm_xor_si64(c2[202],_mm_xor_si64(c2[1741],_mm_xor_si64(c2[1527],c2[858])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[187]=simde_mm_xor_si64(c2[445],simde_mm_xor_si64(c2[225],simde_mm_xor_si64(c2[1546],simde_mm_xor_si64(c2[1326],simde_mm_xor_si64(c2[1329],simde_mm_xor_si64(c2[880],simde_mm_xor_si64(c2[660],simde_mm_xor_si64(c2[1548],simde_mm_xor_si64(c2[890],simde_mm_xor_si64(c2[1125],simde_mm_xor_si64(c2[905],simde_mm_xor_si64(c2[467],simde_mm_xor_si64(c2[247],simde_mm_xor_si64(c2[1128],simde_mm_xor_si64(c2[470],simde_mm_xor_si64(c2[465],simde_mm_xor_si64(c2[1566],simde_mm_xor_si64(c2[1347],simde_mm_xor_si64(c2[933],simde_mm_xor_si64(c2[713],simde_mm_xor_si64(c2[264],simde_mm_xor_si64(c2[44],simde_mm_xor_si64(c2[711],simde_mm_xor_si64(c2[53],simde_mm_xor_si64(c2[294],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[1395],simde_mm_xor_si64(c2[1175],simde_mm_xor_si64(c2[1390],simde_mm_xor_si64(c2[952],simde_mm_xor_si64(c2[732],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[1166],simde_mm_xor_si64(c2[972],simde_mm_xor_si64(c2[534],simde_mm_xor_si64(c2[314],simde_mm_xor_si64(c2[1634],simde_mm_xor_si64(c2[976],simde_mm_xor_si64(c2[755],simde_mm_xor_si64(c2[97],simde_mm_xor_si64(c2[1437],simde_mm_xor_si64(c2[999],simde_mm_xor_si64(c2[779],simde_mm_xor_si64(c2[991],simde_mm_xor_si64(c2[333],simde_mm_xor_si64(c2[1652],simde_mm_xor_si64(c2[1453],simde_mm_xor_si64(c2[1233],simde_mm_xor_si64(c2[795],simde_mm_xor_si64(c2[575],simde_mm_xor_si64(c2[1015],simde_mm_xor_si64(c2[577],simde_mm_xor_si64(c2[357],simde_mm_xor_si64(c2[1461],simde_mm_xor_si64(c2[792],simde_mm_xor_si64(c2[595],simde_mm_xor_si64(c2[157],simde_mm_xor_si64(c2[1696],simde_mm_xor_si64(c2[1481],simde_mm_xor_si64(c2[823],simde_mm_xor_si64(c2[183],simde_mm_xor_si64(c2[1504],simde_mm_xor_si64(c2[1284],simde_mm_xor_si64(c2[839],simde_mm_xor_si64(c2[181],simde_mm_xor_si64(c2[1505],simde_mm_xor_si64(c2[836],simde_mm_xor_si64(c2[419],simde_mm_xor_si64(c2[199],simde_mm_xor_si64(c2[1520],simde_mm_xor_si64(c2[1300],simde_mm_xor_si64(c2[640],simde_mm_xor_si64(c2[202],simde_mm_xor_si64(c2[1741],simde_mm_xor_si64(c2[1527],c2[858])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 18
-     d2[198]=_mm_xor_si64(c2[1110],_mm_xor_si64(c2[1675],c2[1476]));
+     d2[198]=simde_mm_xor_si64(c2[1110],simde_mm_xor_si64(c2[1675],c2[1476]));
 
 //row: 19
-     d2[209]=_mm_xor_si64(c2[884],_mm_xor_si64(c2[229],_mm_xor_si64(c2[448],_mm_xor_si64(c2[1101],_mm_xor_si64(c2[1564],_mm_xor_si64(c2[28],_mm_xor_si64(c2[1124],_mm_xor_si64(c2[1567],_mm_xor_si64(c2[1372],_mm_xor_si64(c2[1370],_mm_xor_si64(c2[733],_mm_xor_si64(c2[290],_mm_xor_si64(c2[735],_mm_xor_si64(c2[1631],_mm_xor_si64(c2[534],_mm_xor_si64(c2[1414],_mm_xor_si64(c2[337],_mm_xor_si64(c2[1650],_mm_xor_si64(c2[133],_mm_xor_si64(c2[1674],_mm_xor_si64(c2[361],_mm_xor_si64(c2[1254],_mm_xor_si64(c2[381],_mm_xor_si64(c2[842],_mm_xor_si64(c2[1498],_mm_xor_si64(c2[405],_mm_xor_si64(c2[858],_mm_xor_si64(c2[1299],c2[427]))))))))))))))))))))))))))));
+     d2[209]=simde_mm_xor_si64(c2[884],simde_mm_xor_si64(c2[229],simde_mm_xor_si64(c2[448],simde_mm_xor_si64(c2[1101],simde_mm_xor_si64(c2[1564],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[1124],simde_mm_xor_si64(c2[1567],simde_mm_xor_si64(c2[1372],simde_mm_xor_si64(c2[1370],simde_mm_xor_si64(c2[733],simde_mm_xor_si64(c2[290],simde_mm_xor_si64(c2[735],simde_mm_xor_si64(c2[1631],simde_mm_xor_si64(c2[534],simde_mm_xor_si64(c2[1414],simde_mm_xor_si64(c2[337],simde_mm_xor_si64(c2[1650],simde_mm_xor_si64(c2[133],simde_mm_xor_si64(c2[1674],simde_mm_xor_si64(c2[361],simde_mm_xor_si64(c2[1254],simde_mm_xor_si64(c2[381],simde_mm_xor_si64(c2[842],simde_mm_xor_si64(c2[1498],simde_mm_xor_si64(c2[405],simde_mm_xor_si64(c2[858],simde_mm_xor_si64(c2[1299],c2[427]))))))))))))))))))))))))))));
 
 //row: 20
-     d2[220]=_mm_xor_si64(c2[228],_mm_xor_si64(c2[8],_mm_xor_si64(c2[1101],_mm_xor_si64(c2[1320],_mm_xor_si64(c2[908],_mm_xor_si64(c2[688],_mm_xor_si64(c2[911],_mm_xor_si64(c2[248],_mm_xor_si64(c2[27],_mm_xor_si64(c2[705],_mm_xor_si64(c2[485],_mm_xor_si64(c2[494],_mm_xor_si64(c2[66],_mm_xor_si64(c2[1616],_mm_xor_si64(c2[1173],_mm_xor_si64(c2[1607],_mm_xor_si64(c2[755],_mm_xor_si64(c2[1417],_mm_xor_si64(c2[538],_mm_xor_si64(c2[974],_mm_xor_si64(c2[1220],_mm_xor_si64(c2[774],_mm_xor_si64(c2[1236],_mm_xor_si64(c2[1016],_mm_xor_si64(c2[798],_mm_xor_si64(c2[1233],_mm_xor_si64(c2[378],_mm_xor_si64(c2[1264],_mm_xor_si64(c2[1725],_mm_xor_si64(c2[622],_mm_xor_si64(c2[1277],_mm_xor_si64(c2[202],_mm_xor_si64(c2[1741],_mm_xor_si64(c2[423],c2[1299]))))))))))))))))))))))))))))))))));
+     d2[220]=simde_mm_xor_si64(c2[228],simde_mm_xor_si64(c2[8],simde_mm_xor_si64(c2[1101],simde_mm_xor_si64(c2[1320],simde_mm_xor_si64(c2[908],simde_mm_xor_si64(c2[688],simde_mm_xor_si64(c2[911],simde_mm_xor_si64(c2[248],simde_mm_xor_si64(c2[27],simde_mm_xor_si64(c2[705],simde_mm_xor_si64(c2[485],simde_mm_xor_si64(c2[494],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[1616],simde_mm_xor_si64(c2[1173],simde_mm_xor_si64(c2[1607],simde_mm_xor_si64(c2[755],simde_mm_xor_si64(c2[1417],simde_mm_xor_si64(c2[538],simde_mm_xor_si64(c2[974],simde_mm_xor_si64(c2[1220],simde_mm_xor_si64(c2[774],simde_mm_xor_si64(c2[1236],simde_mm_xor_si64(c2[1016],simde_mm_xor_si64(c2[798],simde_mm_xor_si64(c2[1233],simde_mm_xor_si64(c2[378],simde_mm_xor_si64(c2[1264],simde_mm_xor_si64(c2[1725],simde_mm_xor_si64(c2[622],simde_mm_xor_si64(c2[1277],simde_mm_xor_si64(c2[202],simde_mm_xor_si64(c2[1741],simde_mm_xor_si64(c2[423],c2[1299]))))))))))))))))))))))))))))))))));
 
 //row: 21
-     d2[231]=_mm_xor_si64(c2[662],_mm_xor_si64(c2[7],_mm_xor_si64(c2[226],_mm_xor_si64(c2[888],_mm_xor_si64(c2[1342],_mm_xor_si64(c2[1565],_mm_xor_si64(c2[1122],_mm_xor_si64(c2[902],_mm_xor_si64(c2[1150],_mm_xor_si64(c2[1368],_mm_xor_si64(c2[1148],_mm_xor_si64(c2[511],_mm_xor_si64(c2[68],_mm_xor_si64(c2[513],_mm_xor_si64(c2[1409],_mm_xor_si64(c2[312],_mm_xor_si64(c2[1412],_mm_xor_si64(c2[1192],_mm_xor_si64(c2[115],_mm_xor_si64(c2[1659],_mm_xor_si64(c2[1439],_mm_xor_si64(c2[1681],_mm_xor_si64(c2[1452],_mm_xor_si64(c2[359],_mm_xor_si64(c2[139],_mm_xor_si64(c2[1043],_mm_xor_si64(c2[379],_mm_xor_si64(c2[159],_mm_xor_si64(c2[620],_mm_xor_si64(c2[1276],_mm_xor_si64(c2[403],_mm_xor_si64(c2[183],_mm_xor_si64(c2[1505],_mm_xor_si64(c2[647],_mm_xor_si64(c2[1088],_mm_xor_si64(c2[425],c2[205]))))))))))))))))))))))))))))))))))));
+     d2[231]=simde_mm_xor_si64(c2[662],simde_mm_xor_si64(c2[7],simde_mm_xor_si64(c2[226],simde_mm_xor_si64(c2[888],simde_mm_xor_si64(c2[1342],simde_mm_xor_si64(c2[1565],simde_mm_xor_si64(c2[1122],simde_mm_xor_si64(c2[902],simde_mm_xor_si64(c2[1150],simde_mm_xor_si64(c2[1368],simde_mm_xor_si64(c2[1148],simde_mm_xor_si64(c2[511],simde_mm_xor_si64(c2[68],simde_mm_xor_si64(c2[513],simde_mm_xor_si64(c2[1409],simde_mm_xor_si64(c2[312],simde_mm_xor_si64(c2[1412],simde_mm_xor_si64(c2[1192],simde_mm_xor_si64(c2[115],simde_mm_xor_si64(c2[1659],simde_mm_xor_si64(c2[1439],simde_mm_xor_si64(c2[1681],simde_mm_xor_si64(c2[1452],simde_mm_xor_si64(c2[359],simde_mm_xor_si64(c2[139],simde_mm_xor_si64(c2[1043],simde_mm_xor_si64(c2[379],simde_mm_xor_si64(c2[159],simde_mm_xor_si64(c2[620],simde_mm_xor_si64(c2[1276],simde_mm_xor_si64(c2[403],simde_mm_xor_si64(c2[183],simde_mm_xor_si64(c2[1505],simde_mm_xor_si64(c2[647],simde_mm_xor_si64(c2[1088],simde_mm_xor_si64(c2[425],c2[205]))))))))))))))))))))))))))))))))))));
 
 //row: 22
-     d2[242]=_mm_xor_si64(c2[464],c2[713]);
+     d2[242]=simde_mm_xor_si64(c2[464],c2[713]);
 
 //row: 23
-     d2[253]=_mm_xor_si64(c2[1322],_mm_xor_si64(c2[951],c2[1437]));
+     d2[253]=simde_mm_xor_si64(c2[1322],simde_mm_xor_si64(c2[951],c2[1437]));
 
 //row: 24
-     d2[264]=_mm_xor_si64(c2[245],_mm_xor_si64(c2[926],c2[1305]));
+     d2[264]=simde_mm_xor_si64(c2[245],simde_mm_xor_si64(c2[926],c2[1305]));
 
 //row: 25
-     d2[275]=_mm_xor_si64(c2[9],c2[116]);
+     d2[275]=simde_mm_xor_si64(c2[9],c2[116]);
 
 //row: 26
-     d2[286]=_mm_xor_si64(c2[224],_mm_xor_si64(c2[4],_mm_xor_si64(c2[884],_mm_xor_si64(c2[1328],_mm_xor_si64(c2[1108],_mm_xor_si64(c2[229],_mm_xor_si64(c2[1327],_mm_xor_si64(c2[448],_mm_xor_si64(c2[904],_mm_xor_si64(c2[684],_mm_xor_si64(c2[1564],_mm_xor_si64(c2[907],_mm_xor_si64(c2[28],_mm_xor_si64(c2[244],_mm_xor_si64(c2[1344],_mm_xor_si64(c2[1124],_mm_xor_si64(c2[712],_mm_xor_si64(c2[492],_mm_xor_si64(c2[1372],_mm_xor_si64(c2[490],_mm_xor_si64(c2[1590],_mm_xor_si64(c2[1370],_mm_xor_si64(c2[928],_mm_xor_si64(c2[73],_mm_xor_si64(c2[1612],_mm_xor_si64(c2[733],_mm_xor_si64(c2[1389],_mm_xor_si64(c2[1169],_mm_xor_si64(c2[290],_mm_xor_si64(c2[1614],_mm_xor_si64(c2[735],_mm_xor_si64(c2[971],_mm_xor_si64(c2[751],_mm_xor_si64(c2[1631],_mm_xor_si64(c2[1413],_mm_xor_si64(c2[534],_mm_xor_si64(c2[534],_mm_xor_si64(c2[1634],_mm_xor_si64(c2[1414],_mm_xor_si64(c2[1436],_mm_xor_si64(c2[1216],_mm_xor_si64(c2[337],_mm_xor_si64(c2[770],_mm_xor_si64(c2[111],_mm_xor_si64(c2[1650],_mm_xor_si64(c2[1232],_mm_xor_si64(c2[1012],_mm_xor_si64(c2[133],_mm_xor_si64(c2[1014],_mm_xor_si64(c2[794],_mm_xor_si64(c2[1674],_mm_xor_si64(c2[1240],_mm_xor_si64(c2[581],_mm_xor_si64(c2[361],_mm_xor_si64(c2[594],_mm_xor_si64(c2[374],_mm_xor_si64(c2[1254],_mm_xor_si64(c2[1260],_mm_xor_si64(c2[601],_mm_xor_si64(c2[381],_mm_xor_si64(c2[1478],_mm_xor_si64(c2[182],_mm_xor_si64(c2[1721],_mm_xor_si64(c2[842],_mm_xor_si64(c2[618],_mm_xor_si64(c2[1498],_mm_xor_si64(c2[1284],_mm_xor_si64(c2[625],_mm_xor_si64(c2[405],_mm_xor_si64(c2[198],_mm_xor_si64(c2[1748],_mm_xor_si64(c2[858],_mm_xor_si64(c2[639],_mm_xor_si64(c2[419],_mm_xor_si64(c2[1299],_mm_xor_si64(c2[1306],_mm_xor_si64(c2[647],c2[427])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[286]=simde_mm_xor_si64(c2[224],simde_mm_xor_si64(c2[4],simde_mm_xor_si64(c2[884],simde_mm_xor_si64(c2[1328],simde_mm_xor_si64(c2[1108],simde_mm_xor_si64(c2[229],simde_mm_xor_si64(c2[1327],simde_mm_xor_si64(c2[448],simde_mm_xor_si64(c2[904],simde_mm_xor_si64(c2[684],simde_mm_xor_si64(c2[1564],simde_mm_xor_si64(c2[907],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[244],simde_mm_xor_si64(c2[1344],simde_mm_xor_si64(c2[1124],simde_mm_xor_si64(c2[712],simde_mm_xor_si64(c2[492],simde_mm_xor_si64(c2[1372],simde_mm_xor_si64(c2[490],simde_mm_xor_si64(c2[1590],simde_mm_xor_si64(c2[1370],simde_mm_xor_si64(c2[928],simde_mm_xor_si64(c2[73],simde_mm_xor_si64(c2[1612],simde_mm_xor_si64(c2[733],simde_mm_xor_si64(c2[1389],simde_mm_xor_si64(c2[1169],simde_mm_xor_si64(c2[290],simde_mm_xor_si64(c2[1614],simde_mm_xor_si64(c2[735],simde_mm_xor_si64(c2[971],simde_mm_xor_si64(c2[751],simde_mm_xor_si64(c2[1631],simde_mm_xor_si64(c2[1413],simde_mm_xor_si64(c2[534],simde_mm_xor_si64(c2[534],simde_mm_xor_si64(c2[1634],simde_mm_xor_si64(c2[1414],simde_mm_xor_si64(c2[1436],simde_mm_xor_si64(c2[1216],simde_mm_xor_si64(c2[337],simde_mm_xor_si64(c2[770],simde_mm_xor_si64(c2[111],simde_mm_xor_si64(c2[1650],simde_mm_xor_si64(c2[1232],simde_mm_xor_si64(c2[1012],simde_mm_xor_si64(c2[133],simde_mm_xor_si64(c2[1014],simde_mm_xor_si64(c2[794],simde_mm_xor_si64(c2[1674],simde_mm_xor_si64(c2[1240],simde_mm_xor_si64(c2[581],simde_mm_xor_si64(c2[361],simde_mm_xor_si64(c2[594],simde_mm_xor_si64(c2[374],simde_mm_xor_si64(c2[1254],simde_mm_xor_si64(c2[1260],simde_mm_xor_si64(c2[601],simde_mm_xor_si64(c2[381],simde_mm_xor_si64(c2[1478],simde_mm_xor_si64(c2[182],simde_mm_xor_si64(c2[1721],simde_mm_xor_si64(c2[842],simde_mm_xor_si64(c2[618],simde_mm_xor_si64(c2[1498],simde_mm_xor_si64(c2[1284],simde_mm_xor_si64(c2[625],simde_mm_xor_si64(c2[405],simde_mm_xor_si64(c2[198],simde_mm_xor_si64(c2[1748],simde_mm_xor_si64(c2[858],simde_mm_xor_si64(c2[639],simde_mm_xor_si64(c2[419],simde_mm_xor_si64(c2[1299],simde_mm_xor_si64(c2[1306],simde_mm_xor_si64(c2[647],c2[427])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 27
-     d2[297]=_mm_xor_si64(c2[445],c2[134]);
+     d2[297]=simde_mm_xor_si64(c2[445],c2[134]);
 
 //row: 28
-     d2[308]=_mm_xor_si64(c2[30],_mm_xor_si64(c2[47],c2[1217]));
+     d2[308]=simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[47],c2[1217]));
 
 //row: 29
-     d2[319]=_mm_xor_si64(c2[666],c2[317]);
+     d2[319]=simde_mm_xor_si64(c2[666],c2[317]);
 
 //row: 30
-     d2[330]=_mm_xor_si64(c2[934],_mm_xor_si64(c2[1220],_mm_xor_si64(c2[1254],c2[1301])));
+     d2[330]=simde_mm_xor_si64(c2[934],simde_mm_xor_si64(c2[1220],simde_mm_xor_si64(c2[1254],c2[1301])));
 
 //row: 31
-     d2[341]=_mm_xor_si64(c2[1544],_mm_xor_si64(c2[889],_mm_xor_si64(c2[1108],_mm_xor_si64(c2[465],_mm_xor_si64(c2[688],_mm_xor_si64(c2[245],_mm_xor_si64(c2[25],_mm_xor_si64(c2[1125],_mm_xor_si64(c2[273],_mm_xor_si64(c2[491],_mm_xor_si64(c2[271],_mm_xor_si64(c2[1393],_mm_xor_si64(c2[950],_mm_xor_si64(c2[1395],_mm_xor_si64(c2[532],_mm_xor_si64(c2[1194],_mm_xor_si64(c2[535],_mm_xor_si64(c2[315],_mm_xor_si64(c2[997],_mm_xor_si64(c2[771],_mm_xor_si64(c2[551],_mm_xor_si64(c2[793],_mm_xor_si64(c2[575],_mm_xor_si64(c2[1241],_mm_xor_si64(c2[1021],_mm_xor_si64(c2[155],_mm_xor_si64(c2[1261],_mm_xor_si64(c2[1041],_mm_xor_si64(c2[1502],_mm_xor_si64(c2[399],_mm_xor_si64(c2[1285],_mm_xor_si64(c2[1065],_mm_xor_si64(c2[1518],_mm_xor_si64(c2[200],_mm_xor_si64(c2[1307],c2[1087])))))))))))))))))))))))))))))))))));
+     d2[341]=simde_mm_xor_si64(c2[1544],simde_mm_xor_si64(c2[889],simde_mm_xor_si64(c2[1108],simde_mm_xor_si64(c2[465],simde_mm_xor_si64(c2[688],simde_mm_xor_si64(c2[245],simde_mm_xor_si64(c2[25],simde_mm_xor_si64(c2[1125],simde_mm_xor_si64(c2[273],simde_mm_xor_si64(c2[491],simde_mm_xor_si64(c2[271],simde_mm_xor_si64(c2[1393],simde_mm_xor_si64(c2[950],simde_mm_xor_si64(c2[1395],simde_mm_xor_si64(c2[532],simde_mm_xor_si64(c2[1194],simde_mm_xor_si64(c2[535],simde_mm_xor_si64(c2[315],simde_mm_xor_si64(c2[997],simde_mm_xor_si64(c2[771],simde_mm_xor_si64(c2[551],simde_mm_xor_si64(c2[793],simde_mm_xor_si64(c2[575],simde_mm_xor_si64(c2[1241],simde_mm_xor_si64(c2[1021],simde_mm_xor_si64(c2[155],simde_mm_xor_si64(c2[1261],simde_mm_xor_si64(c2[1041],simde_mm_xor_si64(c2[1502],simde_mm_xor_si64(c2[399],simde_mm_xor_si64(c2[1285],simde_mm_xor_si64(c2[1065],simde_mm_xor_si64(c2[1518],simde_mm_xor_si64(c2[200],simde_mm_xor_si64(c2[1307],c2[1087])))))))))))))))))))))))))))))))))));
 
 //row: 32
-     d2[352]=_mm_xor_si64(c2[444],_mm_xor_si64(c2[224],_mm_xor_si64(c2[1548],_mm_xor_si64(c2[1328],_mm_xor_si64(c2[1547],_mm_xor_si64(c2[883],_mm_xor_si64(c2[1124],_mm_xor_si64(c2[904],_mm_xor_si64(c2[1127],_mm_xor_si64(c2[464],_mm_xor_si64(c2[932],_mm_xor_si64(c2[712],_mm_xor_si64(c2[710],_mm_xor_si64(c2[293],_mm_xor_si64(c2[73],_mm_xor_si64(c2[1609],_mm_xor_si64(c2[1389],_mm_xor_si64(c2[75],_mm_xor_si64(c2[1191],_mm_xor_si64(c2[971],_mm_xor_si64(c2[1633],_mm_xor_si64(c2[754],_mm_xor_si64(c2[1656],_mm_xor_si64(c2[1436],_mm_xor_si64(c2[990],_mm_xor_si64(c2[339],_mm_xor_si64(c2[1452],_mm_xor_si64(c2[1232],_mm_xor_si64(c2[1234],_mm_xor_si64(c2[1014],_mm_xor_si64(c2[1460],_mm_xor_si64(c2[814],_mm_xor_si64(c2[594],_mm_xor_si64(c2[1480],_mm_xor_si64(c2[402],_mm_xor_si64(c2[182],_mm_xor_si64(c2[838],_mm_xor_si64(c2[1504],_mm_xor_si64(c2[418],_mm_xor_si64(c2[198],_mm_xor_si64(c2[859],_mm_xor_si64(c2[639],c2[1526]))))))))))))))))))))))))))))))))))))))))));
+     d2[352]=simde_mm_xor_si64(c2[444],simde_mm_xor_si64(c2[224],simde_mm_xor_si64(c2[1548],simde_mm_xor_si64(c2[1328],simde_mm_xor_si64(c2[1547],simde_mm_xor_si64(c2[883],simde_mm_xor_si64(c2[1124],simde_mm_xor_si64(c2[904],simde_mm_xor_si64(c2[1127],simde_mm_xor_si64(c2[464],simde_mm_xor_si64(c2[932],simde_mm_xor_si64(c2[712],simde_mm_xor_si64(c2[710],simde_mm_xor_si64(c2[293],simde_mm_xor_si64(c2[73],simde_mm_xor_si64(c2[1609],simde_mm_xor_si64(c2[1389],simde_mm_xor_si64(c2[75],simde_mm_xor_si64(c2[1191],simde_mm_xor_si64(c2[971],simde_mm_xor_si64(c2[1633],simde_mm_xor_si64(c2[754],simde_mm_xor_si64(c2[1656],simde_mm_xor_si64(c2[1436],simde_mm_xor_si64(c2[990],simde_mm_xor_si64(c2[339],simde_mm_xor_si64(c2[1452],simde_mm_xor_si64(c2[1232],simde_mm_xor_si64(c2[1234],simde_mm_xor_si64(c2[1014],simde_mm_xor_si64(c2[1460],simde_mm_xor_si64(c2[814],simde_mm_xor_si64(c2[594],simde_mm_xor_si64(c2[1480],simde_mm_xor_si64(c2[402],simde_mm_xor_si64(c2[182],simde_mm_xor_si64(c2[838],simde_mm_xor_si64(c2[1504],simde_mm_xor_si64(c2[418],simde_mm_xor_si64(c2[198],simde_mm_xor_si64(c2[859],simde_mm_xor_si64(c2[639],c2[1526]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 33
-     d2[363]=_mm_xor_si64(c2[1],_mm_xor_si64(c2[1105],_mm_xor_si64(c2[1324],_mm_xor_si64(c2[692],_mm_xor_si64(c2[904],_mm_xor_si64(c2[252],_mm_xor_si64(c2[489],_mm_xor_si64(c2[487],_mm_xor_si64(c2[266],_mm_xor_si64(c2[1609],_mm_xor_si64(c2[1166],_mm_xor_si64(c2[1611],_mm_xor_si64(c2[748],_mm_xor_si64(c2[1410],_mm_xor_si64(c2[531],_mm_xor_si64(c2[1213],_mm_xor_si64(c2[778],_mm_xor_si64(c2[1020],_mm_xor_si64(c2[802],_mm_xor_si64(c2[1237],_mm_xor_si64(c2[382],_mm_xor_si64(c2[1257],_mm_xor_si64(c2[380],_mm_xor_si64(c2[1718],_mm_xor_si64(c2[626],_mm_xor_si64(c2[1281],_mm_xor_si64(c2[1745],_mm_xor_si64(c2[427],c2[1303]))))))))))))))))))))))))))));
+     d2[363]=simde_mm_xor_si64(c2[1],simde_mm_xor_si64(c2[1105],simde_mm_xor_si64(c2[1324],simde_mm_xor_si64(c2[692],simde_mm_xor_si64(c2[904],simde_mm_xor_si64(c2[252],simde_mm_xor_si64(c2[489],simde_mm_xor_si64(c2[487],simde_mm_xor_si64(c2[266],simde_mm_xor_si64(c2[1609],simde_mm_xor_si64(c2[1166],simde_mm_xor_si64(c2[1611],simde_mm_xor_si64(c2[748],simde_mm_xor_si64(c2[1410],simde_mm_xor_si64(c2[531],simde_mm_xor_si64(c2[1213],simde_mm_xor_si64(c2[778],simde_mm_xor_si64(c2[1020],simde_mm_xor_si64(c2[802],simde_mm_xor_si64(c2[1237],simde_mm_xor_si64(c2[382],simde_mm_xor_si64(c2[1257],simde_mm_xor_si64(c2[380],simde_mm_xor_si64(c2[1718],simde_mm_xor_si64(c2[626],simde_mm_xor_si64(c2[1281],simde_mm_xor_si64(c2[1745],simde_mm_xor_si64(c2[427],c2[1303]))))))))))))))))))))))))))));
 
 //row: 34
-     d2[374]=_mm_xor_si64(c2[1102],_mm_xor_si64(c2[882],_mm_xor_si64(c2[1106],_mm_xor_si64(c2[447],_mm_xor_si64(c2[227],_mm_xor_si64(c2[440],_mm_xor_si64(c2[446],_mm_xor_si64(c2[670],_mm_xor_si64(c2[661],_mm_xor_si64(c2[23],_mm_xor_si64(c2[1562],_mm_xor_si64(c2[27],_mm_xor_si64(c2[26],_mm_xor_si64(c2[250],_mm_xor_si64(c2[1122],_mm_xor_si64(c2[1566],_mm_xor_si64(c2[1346],_mm_xor_si64(c2[1590],_mm_xor_si64(c2[1370],_mm_xor_si64(c2[1594],_mm_xor_si64(c2[1368],_mm_xor_si64(c2[53],_mm_xor_si64(c2[1592],_mm_xor_si64(c2[951],_mm_xor_si64(c2[731],_mm_xor_si64(c2[955],_mm_xor_si64(c2[508],_mm_xor_si64(c2[288],_mm_xor_si64(c2[512],_mm_xor_si64(c2[733],_mm_xor_si64(c2[946],_mm_xor_si64(c2[90],_mm_xor_si64(c2[1629],_mm_xor_si64(c2[94],_mm_xor_si64(c2[532],_mm_xor_si64(c2[756],_mm_xor_si64(c2[1412],_mm_xor_si64(c2[97],_mm_xor_si64(c2[1636],_mm_xor_si64(c2[555],_mm_xor_si64(c2[335],_mm_xor_si64(c2[559],_mm_xor_si64(c2[1659],_mm_xor_si64(c2[333],_mm_xor_si64(c2[113],_mm_xor_si64(c2[362],_mm_xor_si64(c2[142],_mm_xor_si64(c2[355],_mm_xor_si64(c2[133],_mm_xor_si64(c2[1672],_mm_xor_si64(c2[137],_mm_xor_si64(c2[359],_mm_xor_si64(c2[792],_mm_xor_si64(c2[572],_mm_xor_si64(c2[1483],_mm_xor_si64(c2[1263],_mm_xor_si64(c2[1476],_mm_xor_si64(c2[379],_mm_xor_si64(c2[823],_mm_xor_si64(c2[603],_mm_xor_si64(c2[1060],_mm_xor_si64(c2[840],_mm_xor_si64(c2[1064],_mm_xor_si64(c2[1496],_mm_xor_si64(c2[1720],_mm_xor_si64(c2[403],_mm_xor_si64(c2[836],_mm_xor_si64(c2[616],_mm_xor_si64(c2[1087],_mm_xor_si64(c2[867],_mm_xor_si64(c2[1080],_mm_xor_si64(c2[1528],_mm_xor_si64(c2[1308],_mm_xor_si64(c2[1521],_mm_xor_si64(c2[425],_mm_xor_si64(c2[858],c2[638]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[374]=simde_mm_xor_si64(c2[1102],simde_mm_xor_si64(c2[882],simde_mm_xor_si64(c2[1106],simde_mm_xor_si64(c2[447],simde_mm_xor_si64(c2[227],simde_mm_xor_si64(c2[440],simde_mm_xor_si64(c2[446],simde_mm_xor_si64(c2[670],simde_mm_xor_si64(c2[661],simde_mm_xor_si64(c2[23],simde_mm_xor_si64(c2[1562],simde_mm_xor_si64(c2[27],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[250],simde_mm_xor_si64(c2[1122],simde_mm_xor_si64(c2[1566],simde_mm_xor_si64(c2[1346],simde_mm_xor_si64(c2[1590],simde_mm_xor_si64(c2[1370],simde_mm_xor_si64(c2[1594],simde_mm_xor_si64(c2[1368],simde_mm_xor_si64(c2[53],simde_mm_xor_si64(c2[1592],simde_mm_xor_si64(c2[951],simde_mm_xor_si64(c2[731],simde_mm_xor_si64(c2[955],simde_mm_xor_si64(c2[508],simde_mm_xor_si64(c2[288],simde_mm_xor_si64(c2[512],simde_mm_xor_si64(c2[733],simde_mm_xor_si64(c2[946],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[1629],simde_mm_xor_si64(c2[94],simde_mm_xor_si64(c2[532],simde_mm_xor_si64(c2[756],simde_mm_xor_si64(c2[1412],simde_mm_xor_si64(c2[97],simde_mm_xor_si64(c2[1636],simde_mm_xor_si64(c2[555],simde_mm_xor_si64(c2[335],simde_mm_xor_si64(c2[559],simde_mm_xor_si64(c2[1659],simde_mm_xor_si64(c2[333],simde_mm_xor_si64(c2[113],simde_mm_xor_si64(c2[362],simde_mm_xor_si64(c2[142],simde_mm_xor_si64(c2[355],simde_mm_xor_si64(c2[133],simde_mm_xor_si64(c2[1672],simde_mm_xor_si64(c2[137],simde_mm_xor_si64(c2[359],simde_mm_xor_si64(c2[792],simde_mm_xor_si64(c2[572],simde_mm_xor_si64(c2[1483],simde_mm_xor_si64(c2[1263],simde_mm_xor_si64(c2[1476],simde_mm_xor_si64(c2[379],simde_mm_xor_si64(c2[823],simde_mm_xor_si64(c2[603],simde_mm_xor_si64(c2[1060],simde_mm_xor_si64(c2[840],simde_mm_xor_si64(c2[1064],simde_mm_xor_si64(c2[1496],simde_mm_xor_si64(c2[1720],simde_mm_xor_si64(c2[403],simde_mm_xor_si64(c2[836],simde_mm_xor_si64(c2[616],simde_mm_xor_si64(c2[1087],simde_mm_xor_si64(c2[867],simde_mm_xor_si64(c2[1080],simde_mm_xor_si64(c2[1528],simde_mm_xor_si64(c2[1308],simde_mm_xor_si64(c2[1521],simde_mm_xor_si64(c2[425],simde_mm_xor_si64(c2[858],c2[638]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 35
-     d2[385]=_mm_xor_si64(c2[8],_mm_xor_si64(c2[1547],_mm_xor_si64(c2[881],_mm_xor_si64(c2[1100],_mm_xor_si64(c2[688],_mm_xor_si64(c2[468],_mm_xor_si64(c2[691],_mm_xor_si64(c2[28],_mm_xor_si64(c2[1346],_mm_xor_si64(c2[485],_mm_xor_si64(c2[265],_mm_xor_si64(c2[274],_mm_xor_si64(c2[1616],_mm_xor_si64(c2[1396],_mm_xor_si64(c2[953],_mm_xor_si64(c2[1387],_mm_xor_si64(c2[535],_mm_xor_si64(c2[1197],_mm_xor_si64(c2[318],_mm_xor_si64(c2[1000],_mm_xor_si64(c2[554],_mm_xor_si64(c2[118],_mm_xor_si64(c2[1016],_mm_xor_si64(c2[796],_mm_xor_si64(c2[578],_mm_xor_si64(c2[1013],_mm_xor_si64(c2[158],_mm_xor_si64(c2[1044],_mm_xor_si64(c2[1505],_mm_xor_si64(c2[402],_mm_xor_si64(c2[1057],_mm_xor_si64(c2[1741],_mm_xor_si64(c2[1521],_mm_xor_si64(c2[203],c2[1079]))))))))))))))))))))))))))))))))));
+     d2[385]=simde_mm_xor_si64(c2[8],simde_mm_xor_si64(c2[1547],simde_mm_xor_si64(c2[881],simde_mm_xor_si64(c2[1100],simde_mm_xor_si64(c2[688],simde_mm_xor_si64(c2[468],simde_mm_xor_si64(c2[691],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[1346],simde_mm_xor_si64(c2[485],simde_mm_xor_si64(c2[265],simde_mm_xor_si64(c2[274],simde_mm_xor_si64(c2[1616],simde_mm_xor_si64(c2[1396],simde_mm_xor_si64(c2[953],simde_mm_xor_si64(c2[1387],simde_mm_xor_si64(c2[535],simde_mm_xor_si64(c2[1197],simde_mm_xor_si64(c2[318],simde_mm_xor_si64(c2[1000],simde_mm_xor_si64(c2[554],simde_mm_xor_si64(c2[118],simde_mm_xor_si64(c2[1016],simde_mm_xor_si64(c2[796],simde_mm_xor_si64(c2[578],simde_mm_xor_si64(c2[1013],simde_mm_xor_si64(c2[158],simde_mm_xor_si64(c2[1044],simde_mm_xor_si64(c2[1505],simde_mm_xor_si64(c2[402],simde_mm_xor_si64(c2[1057],simde_mm_xor_si64(c2[1741],simde_mm_xor_si64(c2[1521],simde_mm_xor_si64(c2[203],c2[1079]))))))))))))))))))))))))))))))))));
 
 //row: 36
-     d2[396]=_mm_xor_si64(c2[229],_mm_xor_si64(c2[492],c2[379]));
+     d2[396]=simde_mm_xor_si64(c2[229],simde_mm_xor_si64(c2[492],c2[379]));
 
 //row: 37
-     d2[407]=_mm_xor_si64(c2[447],_mm_xor_si64(c2[222],_mm_xor_si64(c2[1540],_mm_xor_si64(c2[1326],_mm_xor_si64(c2[0],_mm_xor_si64(c2[1545],_mm_xor_si64(c2[1127],_mm_xor_si64(c2[902],_mm_xor_si64(c2[1350],_mm_xor_si64(c2[1125],_mm_xor_si64(c2[687],_mm_xor_si64(c2[682],_mm_xor_si64(c2[462],_mm_xor_si64(c2[924],_mm_xor_si64(c2[710],_mm_xor_si64(c2[933],_mm_xor_si64(c2[928],_mm_xor_si64(c2[708],_mm_xor_si64(c2[296],_mm_xor_si64(c2[71],_mm_xor_si64(c2[1612],_mm_xor_si64(c2[1387],_mm_xor_si64(c2[287],_mm_xor_si64(c2[73],_mm_xor_si64(c2[1194],_mm_xor_si64(c2[969],_mm_xor_si64(c2[97],_mm_xor_si64(c2[1631],_mm_xor_si64(c2[977],_mm_xor_si64(c2[972],_mm_xor_si64(c2[752],_mm_xor_si64(c2[1659],_mm_xor_si64(c2[1434],_mm_xor_si64(c2[1213],_mm_xor_si64(c2[1219],_mm_xor_si64(c2[999],_mm_xor_si64(c2[1455],_mm_xor_si64(c2[1241],_mm_xor_si64(c2[1237],_mm_xor_si64(c2[1012],_mm_xor_si64(c2[1672],_mm_xor_si64(c2[1678],_mm_xor_si64(c2[1458],_mm_xor_si64(c2[817],_mm_xor_si64(c2[603],_mm_xor_si64(c2[1703],_mm_xor_si64(c2[1698],_mm_xor_si64(c2[1478],_mm_xor_si64(c2[405],_mm_xor_si64(c2[180],_mm_xor_si64(c2[1061],_mm_xor_si64(c2[836],_mm_xor_si64(c2[1716],_mm_xor_si64(c2[1722],_mm_xor_si64(c2[1502],_mm_xor_si64(c2[421],_mm_xor_si64(c2[207],_mm_xor_si64(c2[862],_mm_xor_si64(c2[648],_mm_xor_si64(c2[1738],_mm_xor_si64(c2[1744],c2[1524])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[407]=simde_mm_xor_si64(c2[447],simde_mm_xor_si64(c2[222],simde_mm_xor_si64(c2[1540],simde_mm_xor_si64(c2[1326],simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[1545],simde_mm_xor_si64(c2[1127],simde_mm_xor_si64(c2[902],simde_mm_xor_si64(c2[1350],simde_mm_xor_si64(c2[1125],simde_mm_xor_si64(c2[687],simde_mm_xor_si64(c2[682],simde_mm_xor_si64(c2[462],simde_mm_xor_si64(c2[924],simde_mm_xor_si64(c2[710],simde_mm_xor_si64(c2[933],simde_mm_xor_si64(c2[928],simde_mm_xor_si64(c2[708],simde_mm_xor_si64(c2[296],simde_mm_xor_si64(c2[71],simde_mm_xor_si64(c2[1612],simde_mm_xor_si64(c2[1387],simde_mm_xor_si64(c2[287],simde_mm_xor_si64(c2[73],simde_mm_xor_si64(c2[1194],simde_mm_xor_si64(c2[969],simde_mm_xor_si64(c2[97],simde_mm_xor_si64(c2[1631],simde_mm_xor_si64(c2[977],simde_mm_xor_si64(c2[972],simde_mm_xor_si64(c2[752],simde_mm_xor_si64(c2[1659],simde_mm_xor_si64(c2[1434],simde_mm_xor_si64(c2[1213],simde_mm_xor_si64(c2[1219],simde_mm_xor_si64(c2[999],simde_mm_xor_si64(c2[1455],simde_mm_xor_si64(c2[1241],simde_mm_xor_si64(c2[1237],simde_mm_xor_si64(c2[1012],simde_mm_xor_si64(c2[1672],simde_mm_xor_si64(c2[1678],simde_mm_xor_si64(c2[1458],simde_mm_xor_si64(c2[817],simde_mm_xor_si64(c2[603],simde_mm_xor_si64(c2[1703],simde_mm_xor_si64(c2[1698],simde_mm_xor_si64(c2[1478],simde_mm_xor_si64(c2[405],simde_mm_xor_si64(c2[180],simde_mm_xor_si64(c2[1061],simde_mm_xor_si64(c2[836],simde_mm_xor_si64(c2[1716],simde_mm_xor_si64(c2[1722],simde_mm_xor_si64(c2[1502],simde_mm_xor_si64(c2[421],simde_mm_xor_si64(c2[207],simde_mm_xor_si64(c2[862],simde_mm_xor_si64(c2[648],simde_mm_xor_si64(c2[1738],simde_mm_xor_si64(c2[1744],c2[1524])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 38
-     d2[418]=_mm_xor_si64(c2[1550],_mm_xor_si64(c2[1330],_mm_xor_si64(c2[664],_mm_xor_si64(c2[883],_mm_xor_si64(c2[471],_mm_xor_si64(c2[251],_mm_xor_si64(c2[463],_mm_xor_si64(c2[1570],_mm_xor_si64(c2[25],_mm_xor_si64(c2[268],_mm_xor_si64(c2[48],_mm_xor_si64(c2[46],_mm_xor_si64(c2[1388],_mm_xor_si64(c2[1168],_mm_xor_si64(c2[736],_mm_xor_si64(c2[1170],_mm_xor_si64(c2[318],_mm_xor_si64(c2[969],_mm_xor_si64(c2[90],_mm_xor_si64(c2[772],_mm_xor_si64(c2[337],_mm_xor_si64(c2[1650],_mm_xor_si64(c2[799],_mm_xor_si64(c2[579],_mm_xor_si64(c2[361],_mm_xor_si64(c2[796],_mm_xor_si64(c2[1700],_mm_xor_si64(c2[816],_mm_xor_si64(c2[1277],_mm_xor_si64(c2[185],_mm_xor_si64(c2[840],_mm_xor_si64(c2[1524],_mm_xor_si64(c2[1304],_mm_xor_si64(c2[1745],c2[862]))))))))))))))))))))))))))))))))));
+     d2[418]=simde_mm_xor_si64(c2[1550],simde_mm_xor_si64(c2[1330],simde_mm_xor_si64(c2[664],simde_mm_xor_si64(c2[883],simde_mm_xor_si64(c2[471],simde_mm_xor_si64(c2[251],simde_mm_xor_si64(c2[463],simde_mm_xor_si64(c2[1570],simde_mm_xor_si64(c2[25],simde_mm_xor_si64(c2[268],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[46],simde_mm_xor_si64(c2[1388],simde_mm_xor_si64(c2[1168],simde_mm_xor_si64(c2[736],simde_mm_xor_si64(c2[1170],simde_mm_xor_si64(c2[318],simde_mm_xor_si64(c2[969],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[772],simde_mm_xor_si64(c2[337],simde_mm_xor_si64(c2[1650],simde_mm_xor_si64(c2[799],simde_mm_xor_si64(c2[579],simde_mm_xor_si64(c2[361],simde_mm_xor_si64(c2[796],simde_mm_xor_si64(c2[1700],simde_mm_xor_si64(c2[816],simde_mm_xor_si64(c2[1277],simde_mm_xor_si64(c2[185],simde_mm_xor_si64(c2[840],simde_mm_xor_si64(c2[1524],simde_mm_xor_si64(c2[1304],simde_mm_xor_si64(c2[1745],c2[862]))))))))))))))))))))))))))))))))));
 
 //row: 39
-     d2[429]=_mm_xor_si64(c2[1110],_mm_xor_si64(c2[890],_mm_xor_si64(c2[444],_mm_xor_si64(c2[224],_mm_xor_si64(c2[443],_mm_xor_si64(c2[1102],_mm_xor_si64(c2[31],_mm_xor_si64(c2[1570],_mm_xor_si64(c2[23],_mm_xor_si64(c2[1130],_mm_xor_si64(c2[1587],_mm_xor_si64(c2[1367],_mm_xor_si64(c2[1365],_mm_xor_si64(c2[948],_mm_xor_si64(c2[728],_mm_xor_si64(c2[516],_mm_xor_si64(c2[296],_mm_xor_si64(c2[730],_mm_xor_si64(c2[98],_mm_xor_si64(c2[1637],_mm_xor_si64(c2[529],_mm_xor_si64(c2[1409],_mm_xor_si64(c2[552],_mm_xor_si64(c2[332],_mm_xor_si64(c2[1656],_mm_xor_si64(c2[359],_mm_xor_si64(c2[139],_mm_xor_si64(c2[141],_mm_xor_si64(c2[1680],_mm_xor_si64(c2[356],_mm_xor_si64(c2[1480],_mm_xor_si64(c2[1260],_mm_xor_si64(c2[376],_mm_xor_si64(c2[1474],_mm_xor_si64(c2[1057],_mm_xor_si64(c2[837],_mm_xor_si64(c2[1504],_mm_xor_si64(c2[400],_mm_xor_si64(c2[1084],_mm_xor_si64(c2[864],_mm_xor_si64(c2[1525],_mm_xor_si64(c2[1305],c2[422]))))))))))))))))))))))))))))))))))))))))));
+     d2[429]=simde_mm_xor_si64(c2[1110],simde_mm_xor_si64(c2[890],simde_mm_xor_si64(c2[444],simde_mm_xor_si64(c2[224],simde_mm_xor_si64(c2[443],simde_mm_xor_si64(c2[1102],simde_mm_xor_si64(c2[31],simde_mm_xor_si64(c2[1570],simde_mm_xor_si64(c2[23],simde_mm_xor_si64(c2[1130],simde_mm_xor_si64(c2[1587],simde_mm_xor_si64(c2[1367],simde_mm_xor_si64(c2[1365],simde_mm_xor_si64(c2[948],simde_mm_xor_si64(c2[728],simde_mm_xor_si64(c2[516],simde_mm_xor_si64(c2[296],simde_mm_xor_si64(c2[730],simde_mm_xor_si64(c2[98],simde_mm_xor_si64(c2[1637],simde_mm_xor_si64(c2[529],simde_mm_xor_si64(c2[1409],simde_mm_xor_si64(c2[552],simde_mm_xor_si64(c2[332],simde_mm_xor_si64(c2[1656],simde_mm_xor_si64(c2[359],simde_mm_xor_si64(c2[139],simde_mm_xor_si64(c2[141],simde_mm_xor_si64(c2[1680],simde_mm_xor_si64(c2[356],simde_mm_xor_si64(c2[1480],simde_mm_xor_si64(c2[1260],simde_mm_xor_si64(c2[376],simde_mm_xor_si64(c2[1474],simde_mm_xor_si64(c2[1057],simde_mm_xor_si64(c2[837],simde_mm_xor_si64(c2[1504],simde_mm_xor_si64(c2[400],simde_mm_xor_si64(c2[1084],simde_mm_xor_si64(c2[864],simde_mm_xor_si64(c2[1525],simde_mm_xor_si64(c2[1305],c2[422]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 40
-     d2[440]=_mm_xor_si64(c2[1545],_mm_xor_si64(c2[1107],_mm_xor_si64(c2[890],_mm_xor_si64(c2[441],_mm_xor_si64(c2[1109],_mm_xor_si64(c2[660],_mm_xor_si64(c2[466],_mm_xor_si64(c2[28],_mm_xor_si64(c2[689],_mm_xor_si64(c2[251],_mm_xor_si64(c2[26],_mm_xor_si64(c2[1567],_mm_xor_si64(c2[1347],_mm_xor_si64(c2[274],_mm_xor_si64(c2[1584],_mm_xor_si64(c2[272],_mm_xor_si64(c2[54],_mm_xor_si64(c2[1593],_mm_xor_si64(c2[53],_mm_xor_si64(c2[1394],_mm_xor_si64(c2[956],_mm_xor_si64(c2[951],_mm_xor_si64(c2[513],_mm_xor_si64(c2[1396],_mm_xor_si64(c2[947],_mm_xor_si64(c2[533],_mm_xor_si64(c2[95],_mm_xor_si64(c2[1195],_mm_xor_si64(c2[757],_mm_xor_si64(c2[316],_mm_xor_si64(c2[98],_mm_xor_si64(c2[1637],_mm_xor_si64(c2[998],_mm_xor_si64(c2[560],_mm_xor_si64(c2[552],_mm_xor_si64(c2[334],_mm_xor_si64(c2[114],_mm_xor_si64(c2[794],_mm_xor_si64(c2[356],_mm_xor_si64(c2[576],_mm_xor_si64(c2[138],_mm_xor_si64(c2[1022],_mm_xor_si64(c2[793],_mm_xor_si64(c2[573],_mm_xor_si64(c2[156],_mm_xor_si64(c2[1477],_mm_xor_si64(c2[1042],_mm_xor_si64(c2[824],_mm_xor_si64(c2[604],_mm_xor_si64(c2[1503],_mm_xor_si64(c2[1065],_mm_xor_si64(c2[400],_mm_xor_si64(c2[1721],_mm_xor_si64(c2[1066],_mm_xor_si64(c2[837],_mm_xor_si64(c2[617],_mm_xor_si64(c2[1519],_mm_xor_si64(c2[1081],_mm_xor_si64(c2[201],_mm_xor_si64(c2[1522],_mm_xor_si64(c2[1088],_mm_xor_si64(c2[859],c2[639]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[440]=simde_mm_xor_si64(c2[1545],simde_mm_xor_si64(c2[1107],simde_mm_xor_si64(c2[890],simde_mm_xor_si64(c2[441],simde_mm_xor_si64(c2[1109],simde_mm_xor_si64(c2[660],simde_mm_xor_si64(c2[466],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[689],simde_mm_xor_si64(c2[251],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[1567],simde_mm_xor_si64(c2[1347],simde_mm_xor_si64(c2[274],simde_mm_xor_si64(c2[1584],simde_mm_xor_si64(c2[272],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[1593],simde_mm_xor_si64(c2[53],simde_mm_xor_si64(c2[1394],simde_mm_xor_si64(c2[956],simde_mm_xor_si64(c2[951],simde_mm_xor_si64(c2[513],simde_mm_xor_si64(c2[1396],simde_mm_xor_si64(c2[947],simde_mm_xor_si64(c2[533],simde_mm_xor_si64(c2[95],simde_mm_xor_si64(c2[1195],simde_mm_xor_si64(c2[757],simde_mm_xor_si64(c2[316],simde_mm_xor_si64(c2[98],simde_mm_xor_si64(c2[1637],simde_mm_xor_si64(c2[998],simde_mm_xor_si64(c2[560],simde_mm_xor_si64(c2[552],simde_mm_xor_si64(c2[334],simde_mm_xor_si64(c2[114],simde_mm_xor_si64(c2[794],simde_mm_xor_si64(c2[356],simde_mm_xor_si64(c2[576],simde_mm_xor_si64(c2[138],simde_mm_xor_si64(c2[1022],simde_mm_xor_si64(c2[793],simde_mm_xor_si64(c2[573],simde_mm_xor_si64(c2[156],simde_mm_xor_si64(c2[1477],simde_mm_xor_si64(c2[1042],simde_mm_xor_si64(c2[824],simde_mm_xor_si64(c2[604],simde_mm_xor_si64(c2[1503],simde_mm_xor_si64(c2[1065],simde_mm_xor_si64(c2[400],simde_mm_xor_si64(c2[1721],simde_mm_xor_si64(c2[1066],simde_mm_xor_si64(c2[837],simde_mm_xor_si64(c2[617],simde_mm_xor_si64(c2[1519],simde_mm_xor_si64(c2[1081],simde_mm_xor_si64(c2[201],simde_mm_xor_si64(c2[1522],simde_mm_xor_si64(c2[1088],simde_mm_xor_si64(c2[859],c2[639]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 41
-     d2[451]=_mm_xor_si64(c2[889],_mm_xor_si64(c2[669],_mm_xor_si64(c2[3],_mm_xor_si64(c2[222],_mm_xor_si64(c2[1569],_mm_xor_si64(c2[1349],_mm_xor_si64(c2[1572],_mm_xor_si64(c2[909],_mm_xor_si64(c2[907],_mm_xor_si64(c2[1366],_mm_xor_si64(c2[1146],_mm_xor_si64(c2[1144],_mm_xor_si64(c2[727],_mm_xor_si64(c2[507],_mm_xor_si64(c2[75],_mm_xor_si64(c2[509],_mm_xor_si64(c2[1416],_mm_xor_si64(c2[308],_mm_xor_si64(c2[1188],_mm_xor_si64(c2[111],_mm_xor_si64(c2[1435],_mm_xor_si64(c2[1430],_mm_xor_si64(c2[138],_mm_xor_si64(c2[1677],_mm_xor_si64(c2[1459],_mm_xor_si64(c2[135],_mm_xor_si64(c2[1039],_mm_xor_si64(c2[155],_mm_xor_si64(c2[616],_mm_xor_si64(c2[1283],_mm_xor_si64(c2[179],_mm_xor_si64(c2[863],_mm_xor_si64(c2[643],_mm_xor_si64(c2[1084],c2[201]))))))))))))))))))))))))))))))))));
+     d2[451]=simde_mm_xor_si64(c2[889],simde_mm_xor_si64(c2[669],simde_mm_xor_si64(c2[3],simde_mm_xor_si64(c2[222],simde_mm_xor_si64(c2[1569],simde_mm_xor_si64(c2[1349],simde_mm_xor_si64(c2[1572],simde_mm_xor_si64(c2[909],simde_mm_xor_si64(c2[907],simde_mm_xor_si64(c2[1366],simde_mm_xor_si64(c2[1146],simde_mm_xor_si64(c2[1144],simde_mm_xor_si64(c2[727],simde_mm_xor_si64(c2[507],simde_mm_xor_si64(c2[75],simde_mm_xor_si64(c2[509],simde_mm_xor_si64(c2[1416],simde_mm_xor_si64(c2[308],simde_mm_xor_si64(c2[1188],simde_mm_xor_si64(c2[111],simde_mm_xor_si64(c2[1435],simde_mm_xor_si64(c2[1430],simde_mm_xor_si64(c2[138],simde_mm_xor_si64(c2[1677],simde_mm_xor_si64(c2[1459],simde_mm_xor_si64(c2[135],simde_mm_xor_si64(c2[1039],simde_mm_xor_si64(c2[155],simde_mm_xor_si64(c2[616],simde_mm_xor_si64(c2[1283],simde_mm_xor_si64(c2[179],simde_mm_xor_si64(c2[863],simde_mm_xor_si64(c2[643],simde_mm_xor_si64(c2[1084],c2[201]))))))))))))))))))))))))))))))))));
   }
 }
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc8_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc8_byte.c
index 0a65a403346c399c5974bdfab84bf69d37776a81..f5517d000e0406f93908295d67830a6eaa2bb9d5 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc8_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc8_byte.c
@@ -1,9 +1,9 @@
 #include "PHY/sse_intrin.h"
 // generated code for Zc=8, byte encoding
 static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG2_Zc8_byte(uint8_t *c,uint8_t *d) {
-  __m64i *csimd=(__m64i *)c,*dsimd=(__m64i *)d;
+  simde__m64i *csimd=(simde__m64i *)c,*dsimd=(simde__m64i *)d;
 
-  __m64i *c2,*d2;
+  simde__m64i *c2,*d2;
 
   int i2;
   for (i2=0; i2<1; i2++) {
@@ -11,129 +11,129 @@ static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG
      d2=&dsimd[i2];
 
 //row: 0
-     d2[0]=_mm_xor_si64(c2[0],_mm_xor_si64(c2[72],_mm_xor_si64(c2[0],_mm_xor_si64(c2[50],_mm_xor_si64(c2[14],_mm_xor_si64(c2[86],_mm_xor_si64(c2[40],_mm_xor_si64(c2[16],_mm_xor_si64(c2[18],_mm_xor_si64(c2[66],_mm_xor_si64(c2[42],_mm_xor_si64(c2[56],_mm_xor_si64(c2[44],_mm_xor_si64(c2[68],_mm_xor_si64(c2[58],_mm_xor_si64(c2[94],_mm_xor_si64(c2[60],_mm_xor_si64(c2[24],_mm_xor_si64(c2[12],_mm_xor_si64(c2[50],_mm_xor_si64(c2[74],_mm_xor_si64(c2[100],_mm_xor_si64(c2[100],_mm_xor_si64(c2[28],_mm_xor_si64(c2[66],_mm_xor_si64(c2[54],c2[102]))))))))))))))))))))))))));
+     d2[0]=simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[56],simde_mm_xor_si64(c2[44],simde_mm_xor_si64(c2[68],simde_mm_xor_si64(c2[58],simde_mm_xor_si64(c2[94],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[100],simde_mm_xor_si64(c2[100],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[54],c2[102]))))))))))))))))))))))))));
 
 //row: 1
-     d2[1]=_mm_xor_si64(c2[12],_mm_xor_si64(c2[0],_mm_xor_si64(c2[72],_mm_xor_si64(c2[0],_mm_xor_si64(c2[62],_mm_xor_si64(c2[50],_mm_xor_si64(c2[14],_mm_xor_si64(c2[86],_mm_xor_si64(c2[52],_mm_xor_si64(c2[40],_mm_xor_si64(c2[16],_mm_xor_si64(c2[30],_mm_xor_si64(c2[18],_mm_xor_si64(c2[66],_mm_xor_si64(c2[42],_mm_xor_si64(c2[56],_mm_xor_si64(c2[44],_mm_xor_si64(c2[68],_mm_xor_si64(c2[58],_mm_xor_si64(c2[94],_mm_xor_si64(c2[72],_mm_xor_si64(c2[60],_mm_xor_si64(c2[24],_mm_xor_si64(c2[12],_mm_xor_si64(c2[50],_mm_xor_si64(c2[74],_mm_xor_si64(c2[100],_mm_xor_si64(c2[100],_mm_xor_si64(c2[28],_mm_xor_si64(c2[78],_mm_xor_si64(c2[66],_mm_xor_si64(c2[54],c2[102]))))))))))))))))))))))))))))))));
+     d2[1]=simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[52],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[56],simde_mm_xor_si64(c2[44],simde_mm_xor_si64(c2[68],simde_mm_xor_si64(c2[58],simde_mm_xor_si64(c2[94],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[100],simde_mm_xor_si64(c2[100],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[78],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[54],c2[102]))))))))))))))))))))))))))))))));
 
 //row: 2
-     d2[2]=_mm_xor_si64(c2[12],_mm_xor_si64(c2[0],_mm_xor_si64(c2[84],_mm_xor_si64(c2[72],_mm_xor_si64(c2[0],_mm_xor_si64(c2[62],_mm_xor_si64(c2[50],_mm_xor_si64(c2[14],_mm_xor_si64(c2[86],_mm_xor_si64(c2[52],_mm_xor_si64(c2[40],_mm_xor_si64(c2[16],_mm_xor_si64(c2[30],_mm_xor_si64(c2[18],_mm_xor_si64(c2[78],_mm_xor_si64(c2[66],_mm_xor_si64(c2[42],_mm_xor_si64(c2[68],_mm_xor_si64(c2[56],_mm_xor_si64(c2[44],_mm_xor_si64(c2[68],_mm_xor_si64(c2[70],_mm_xor_si64(c2[58],_mm_xor_si64(c2[94],_mm_xor_si64(c2[72],_mm_xor_si64(c2[60],_mm_xor_si64(c2[36],_mm_xor_si64(c2[24],_mm_xor_si64(c2[12],_mm_xor_si64(c2[62],_mm_xor_si64(c2[50],_mm_xor_si64(c2[74],_mm_xor_si64(c2[16],_mm_xor_si64(c2[100],_mm_xor_si64(c2[100],_mm_xor_si64(c2[28],_mm_xor_si64(c2[78],_mm_xor_si64(c2[66],_mm_xor_si64(c2[66],_mm_xor_si64(c2[54],c2[102]))))))))))))))))))))))))))))))))))))))));
+     d2[2]=simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[52],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[78],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[68],simde_mm_xor_si64(c2[56],simde_mm_xor_si64(c2[44],simde_mm_xor_si64(c2[68],simde_mm_xor_si64(c2[70],simde_mm_xor_si64(c2[58],simde_mm_xor_si64(c2[94],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[100],simde_mm_xor_si64(c2[100],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[78],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[54],c2[102]))))))))))))))))))))))))))))))))))))))));
 
 //row: 3
-     d2[3]=_mm_xor_si64(c2[0],_mm_xor_si64(c2[72],_mm_xor_si64(c2[0],_mm_xor_si64(c2[50],_mm_xor_si64(c2[14],_mm_xor_si64(c2[2],_mm_xor_si64(c2[86],_mm_xor_si64(c2[40],_mm_xor_si64(c2[28],_mm_xor_si64(c2[16],_mm_xor_si64(c2[18],_mm_xor_si64(c2[66],_mm_xor_si64(c2[42],_mm_xor_si64(c2[56],_mm_xor_si64(c2[44],_mm_xor_si64(c2[80],_mm_xor_si64(c2[68],_mm_xor_si64(c2[58],_mm_xor_si64(c2[10],_mm_xor_si64(c2[94],_mm_xor_si64(c2[60],_mm_xor_si64(c2[24],_mm_xor_si64(c2[24],_mm_xor_si64(c2[12],_mm_xor_si64(c2[50],_mm_xor_si64(c2[86],_mm_xor_si64(c2[74],_mm_xor_si64(c2[100],_mm_xor_si64(c2[100],_mm_xor_si64(c2[40],_mm_xor_si64(c2[28],_mm_xor_si64(c2[66],_mm_xor_si64(c2[54],_mm_xor_si64(c2[18],c2[102]))))))))))))))))))))))))))))))))));
+     d2[3]=simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[2],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[56],simde_mm_xor_si64(c2[44],simde_mm_xor_si64(c2[80],simde_mm_xor_si64(c2[68],simde_mm_xor_si64(c2[58],simde_mm_xor_si64(c2[10],simde_mm_xor_si64(c2[94],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[100],simde_mm_xor_si64(c2[100],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[18],c2[102]))))))))))))))))))))))))))))))))));
 
 //row: 4
-     d2[4]=_mm_xor_si64(c2[0],_mm_xor_si64(c2[84],_mm_xor_si64(c2[60],_mm_xor_si64(c2[84],_mm_xor_si64(c2[36],_mm_xor_si64(c2[50],_mm_xor_si64(c2[38],_mm_xor_si64(c2[2],_mm_xor_si64(c2[74],_mm_xor_si64(c2[74],_mm_xor_si64(c2[40],_mm_xor_si64(c2[28],_mm_xor_si64(c2[4],_mm_xor_si64(c2[18],_mm_xor_si64(c2[6],_mm_xor_si64(c2[54],_mm_xor_si64(c2[30],_mm_xor_si64(c2[44],_mm_xor_si64(c2[32],_mm_xor_si64(c2[56],_mm_xor_si64(c2[46],_mm_xor_si64(c2[82],_mm_xor_si64(c2[60],_mm_xor_si64(c2[48],_mm_xor_si64(c2[12],_mm_xor_si64(c2[96],_mm_xor_si64(c2[38],_mm_xor_si64(c2[62],_mm_xor_si64(c2[88],_mm_xor_si64(c2[88],_mm_xor_si64(c2[16],_mm_xor_si64(c2[66],_mm_xor_si64(c2[54],_mm_xor_si64(c2[42],c2[90]))))))))))))))))))))))))))))))))));
+     d2[4]=simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[2],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[4],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[6],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[44],simde_mm_xor_si64(c2[32],simde_mm_xor_si64(c2[56],simde_mm_xor_si64(c2[46],simde_mm_xor_si64(c2[82],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[96],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[42],c2[90]))))))))))))))))))))))))))))))))));
 
 //row: 5
-     d2[5]=_mm_xor_si64(c2[0],_mm_xor_si64(c2[84],_mm_xor_si64(c2[60],_mm_xor_si64(c2[84],_mm_xor_si64(c2[84],_mm_xor_si64(c2[50],_mm_xor_si64(c2[38],_mm_xor_si64(c2[2],_mm_xor_si64(c2[74],_mm_xor_si64(c2[14],_mm_xor_si64(c2[40],_mm_xor_si64(c2[28],_mm_xor_si64(c2[4],_mm_xor_si64(c2[18],_mm_xor_si64(c2[6],_mm_xor_si64(c2[54],_mm_xor_si64(c2[30],_mm_xor_si64(c2[44],_mm_xor_si64(c2[32],_mm_xor_si64(c2[56],_mm_xor_si64(c2[46],_mm_xor_si64(c2[82],_mm_xor_si64(c2[34],_mm_xor_si64(c2[60],_mm_xor_si64(c2[48],_mm_xor_si64(c2[12],_mm_xor_si64(c2[96],_mm_xor_si64(c2[38],_mm_xor_si64(c2[62],_mm_xor_si64(c2[98],_mm_xor_si64(c2[88],_mm_xor_si64(c2[88],_mm_xor_si64(c2[16],_mm_xor_si64(c2[66],_mm_xor_si64(c2[54],_mm_xor_si64(c2[42],c2[90]))))))))))))))))))))))))))))))))))));
+     d2[5]=simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[2],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[4],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[6],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[44],simde_mm_xor_si64(c2[32],simde_mm_xor_si64(c2[56],simde_mm_xor_si64(c2[46],simde_mm_xor_si64(c2[82],simde_mm_xor_si64(c2[34],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[96],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[98],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[42],c2[90]))))))))))))))))))))))))))))))))))));
 
 //row: 6
-     d2[6]=_mm_xor_si64(c2[84],_mm_xor_si64(c2[72],_mm_xor_si64(c2[48],_mm_xor_si64(c2[72],_mm_xor_si64(c2[36],_mm_xor_si64(c2[38],_mm_xor_si64(c2[26],_mm_xor_si64(c2[86],_mm_xor_si64(c2[62],_mm_xor_si64(c2[28],_mm_xor_si64(c2[16],_mm_xor_si64(c2[88],_mm_xor_si64(c2[6],_mm_xor_si64(c2[90],_mm_xor_si64(c2[42],_mm_xor_si64(c2[18],_mm_xor_si64(c2[32],_mm_xor_si64(c2[20],_mm_xor_si64(c2[44],_mm_xor_si64(c2[34],_mm_xor_si64(c2[70],_mm_xor_si64(c2[58],_mm_xor_si64(c2[48],_mm_xor_si64(c2[36],_mm_xor_si64(c2[96],_mm_xor_si64(c2[84],_mm_xor_si64(c2[26],_mm_xor_si64(c2[50],_mm_xor_si64(c2[74],_mm_xor_si64(c2[76],_mm_xor_si64(c2[76],_mm_xor_si64(c2[100],_mm_xor_si64(c2[54],_mm_xor_si64(c2[42],_mm_xor_si64(c2[30],_mm_xor_si64(c2[78],c2[66]))))))))))))))))))))))))))))))))))));
+     d2[6]=simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[6],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[32],simde_mm_xor_si64(c2[20],simde_mm_xor_si64(c2[44],simde_mm_xor_si64(c2[34],simde_mm_xor_si64(c2[70],simde_mm_xor_si64(c2[58],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[96],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[100],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[78],c2[66]))))))))))))))))))))))))))))))))))));
 
 //row: 7
-     d2[7]=_mm_xor_si64(c2[48],_mm_xor_si64(c2[36],_mm_xor_si64(c2[48],_mm_xor_si64(c2[12],_mm_xor_si64(c2[24],_mm_xor_si64(c2[36],_mm_xor_si64(c2[48],_mm_xor_si64(c2[2],_mm_xor_si64(c2[86],_mm_xor_si64(c2[2],_mm_xor_si64(c2[50],_mm_xor_si64(c2[62],_mm_xor_si64(c2[50],_mm_xor_si64(c2[26],_mm_xor_si64(c2[38],_mm_xor_si64(c2[14],_mm_xor_si64(c2[88],_mm_xor_si64(c2[76],_mm_xor_si64(c2[88],_mm_xor_si64(c2[76],_mm_xor_si64(c2[52],_mm_xor_si64(c2[64],_mm_xor_si64(c2[66],_mm_xor_si64(c2[54],_mm_xor_si64(c2[66],_mm_xor_si64(c2[6],_mm_xor_si64(c2[18],_mm_xor_si64(c2[78],_mm_xor_si64(c2[90],_mm_xor_si64(c2[92],_mm_xor_si64(c2[8],_mm_xor_si64(c2[80],_mm_xor_si64(c2[92],_mm_xor_si64(c2[32],_mm_xor_si64(c2[8],_mm_xor_si64(c2[20],_mm_xor_si64(c2[94],_mm_xor_si64(c2[10],_mm_xor_si64(c2[58],_mm_xor_si64(c2[34],_mm_xor_si64(c2[46],_mm_xor_si64(c2[46],_mm_xor_si64(c2[12],_mm_xor_si64(c2[96],_mm_xor_si64(c2[12],_mm_xor_si64(c2[60],_mm_xor_si64(c2[72],_mm_xor_si64(c2[72],_mm_xor_si64(c2[48],_mm_xor_si64(c2[60],_mm_xor_si64(c2[86],_mm_xor_si64(c2[98],_mm_xor_si64(c2[38],_mm_xor_si64(c2[14],_mm_xor_si64(c2[26],_mm_xor_si64(c2[62],_mm_xor_si64(c2[40],_mm_xor_si64(c2[52],_mm_xor_si64(c2[40],_mm_xor_si64(c2[52],_mm_xor_si64(c2[88],_mm_xor_si64(c2[64],_mm_xor_si64(c2[76],_mm_xor_si64(c2[18],_mm_xor_si64(c2[102],_mm_xor_si64(c2[18],_mm_xor_si64(c2[90],_mm_xor_si64(c2[102],_mm_xor_si64(c2[66],_mm_xor_si64(c2[42],c2[54]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[7]=simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[2],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[2],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[52],simde_mm_xor_si64(c2[64],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[6],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[78],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[92],simde_mm_xor_si64(c2[8],simde_mm_xor_si64(c2[80],simde_mm_xor_si64(c2[92],simde_mm_xor_si64(c2[32],simde_mm_xor_si64(c2[8],simde_mm_xor_si64(c2[20],simde_mm_xor_si64(c2[94],simde_mm_xor_si64(c2[10],simde_mm_xor_si64(c2[58],simde_mm_xor_si64(c2[34],simde_mm_xor_si64(c2[46],simde_mm_xor_si64(c2[46],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[96],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[98],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[52],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[52],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[64],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[102],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[102],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[42],c2[54]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 8
-     d2[8]=_mm_xor_si64(c2[84],_mm_xor_si64(c2[72],_mm_xor_si64(c2[60],_mm_xor_si64(c2[48],_mm_xor_si64(c2[72],_mm_xor_si64(c2[72],_mm_xor_si64(c2[38],_mm_xor_si64(c2[26],_mm_xor_si64(c2[86],_mm_xor_si64(c2[62],_mm_xor_si64(c2[74],_mm_xor_si64(c2[28],_mm_xor_si64(c2[16],_mm_xor_si64(c2[88],_mm_xor_si64(c2[6],_mm_xor_si64(c2[90],_mm_xor_si64(c2[54],_mm_xor_si64(c2[42],_mm_xor_si64(c2[18],_mm_xor_si64(c2[44],_mm_xor_si64(c2[32],_mm_xor_si64(c2[20],_mm_xor_si64(c2[44],_mm_xor_si64(c2[46],_mm_xor_si64(c2[34],_mm_xor_si64(c2[70],_mm_xor_si64(c2[48],_mm_xor_si64(c2[36],_mm_xor_si64(c2[12],_mm_xor_si64(c2[96],_mm_xor_si64(c2[84],_mm_xor_si64(c2[38],_mm_xor_si64(c2[26],_mm_xor_si64(c2[50],_mm_xor_si64(c2[88],_mm_xor_si64(c2[76],_mm_xor_si64(c2[76],_mm_xor_si64(c2[100],_mm_xor_si64(c2[54],_mm_xor_si64(c2[42],_mm_xor_si64(c2[42],_mm_xor_si64(c2[30],c2[78]))))))))))))))))))))))))))))))))))))))))));
+     d2[8]=simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[6],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[44],simde_mm_xor_si64(c2[32],simde_mm_xor_si64(c2[20],simde_mm_xor_si64(c2[44],simde_mm_xor_si64(c2[46],simde_mm_xor_si64(c2[34],simde_mm_xor_si64(c2[70],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[96],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[100],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[30],c2[78]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 9
-     d2[9]=_mm_xor_si64(c2[0],_mm_xor_si64(c2[60],_mm_xor_si64(c2[84],_mm_xor_si64(c2[36],_mm_xor_si64(c2[60],_mm_xor_si64(c2[60],_mm_xor_si64(c2[84],_mm_xor_si64(c2[50],_mm_xor_si64(c2[14],_mm_xor_si64(c2[38],_mm_xor_si64(c2[74],_mm_xor_si64(c2[2],_mm_xor_si64(c2[50],_mm_xor_si64(c2[74],_mm_xor_si64(c2[38],_mm_xor_si64(c2[40],_mm_xor_si64(c2[4],_mm_xor_si64(c2[28],_mm_xor_si64(c2[76],_mm_xor_si64(c2[4],_mm_xor_si64(c2[18],_mm_xor_si64(c2[78],_mm_xor_si64(c2[6],_mm_xor_si64(c2[30],_mm_xor_si64(c2[54],_mm_xor_si64(c2[6],_mm_xor_si64(c2[30],_mm_xor_si64(c2[20],_mm_xor_si64(c2[44],_mm_xor_si64(c2[8],_mm_xor_si64(c2[32],_mm_xor_si64(c2[32],_mm_xor_si64(c2[56],_mm_xor_si64(c2[22],_mm_xor_si64(c2[46],_mm_xor_si64(c2[58],_mm_xor_si64(c2[82],_mm_xor_si64(c2[60],_mm_xor_si64(c2[24],_mm_xor_si64(c2[48],_mm_xor_si64(c2[84],_mm_xor_si64(c2[12],_mm_xor_si64(c2[72],_mm_xor_si64(c2[96],_mm_xor_si64(c2[14],_mm_xor_si64(c2[38],_mm_xor_si64(c2[38],_mm_xor_si64(c2[62],_mm_xor_si64(c2[64],_mm_xor_si64(c2[88],_mm_xor_si64(c2[64],_mm_xor_si64(c2[88],_mm_xor_si64(c2[88],_mm_xor_si64(c2[16],_mm_xor_si64(c2[76],_mm_xor_si64(c2[66],_mm_xor_si64(c2[30],_mm_xor_si64(c2[54],_mm_xor_si64(c2[18],_mm_xor_si64(c2[42],_mm_xor_si64(c2[66],c2[90])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[9]=simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[2],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[4],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[4],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[78],simde_mm_xor_si64(c2[6],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[6],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[20],simde_mm_xor_si64(c2[44],simde_mm_xor_si64(c2[8],simde_mm_xor_si64(c2[32],simde_mm_xor_si64(c2[32],simde_mm_xor_si64(c2[56],simde_mm_xor_si64(c2[22],simde_mm_xor_si64(c2[46],simde_mm_xor_si64(c2[58],simde_mm_xor_si64(c2[82],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[96],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[64],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[64],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[66],c2[90])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 10
-     d2[10]=_mm_xor_si64(c2[36],_mm_xor_si64(c2[14],_mm_xor_si64(c2[12],c2[74])));
+     d2[10]=simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[12],c2[74])));
 
 //row: 11
-     d2[11]=_mm_xor_si64(c2[0],_mm_xor_si64(c2[72],_mm_xor_si64(c2[0],_mm_xor_si64(c2[36],_mm_xor_si64(c2[50],_mm_xor_si64(c2[14],_mm_xor_si64(c2[2],_mm_xor_si64(c2[86],_mm_xor_si64(c2[40],_mm_xor_si64(c2[28],_mm_xor_si64(c2[16],_mm_xor_si64(c2[18],_mm_xor_si64(c2[66],_mm_xor_si64(c2[42],_mm_xor_si64(c2[56],_mm_xor_si64(c2[44],_mm_xor_si64(c2[80],_mm_xor_si64(c2[68],_mm_xor_si64(c2[58],_mm_xor_si64(c2[10],_mm_xor_si64(c2[94],_mm_xor_si64(c2[60],_mm_xor_si64(c2[24],_mm_xor_si64(c2[24],_mm_xor_si64(c2[12],_mm_xor_si64(c2[50],_mm_xor_si64(c2[86],_mm_xor_si64(c2[74],_mm_xor_si64(c2[62],_mm_xor_si64(c2[100],_mm_xor_si64(c2[100],_mm_xor_si64(c2[40],_mm_xor_si64(c2[28],_mm_xor_si64(c2[66],_mm_xor_si64(c2[54],_mm_xor_si64(c2[18],_mm_xor_si64(c2[102],c2[42])))))))))))))))))))))))))))))))))))));
+     d2[11]=simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[2],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[56],simde_mm_xor_si64(c2[44],simde_mm_xor_si64(c2[80],simde_mm_xor_si64(c2[68],simde_mm_xor_si64(c2[58],simde_mm_xor_si64(c2[10],simde_mm_xor_si64(c2[94],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[100],simde_mm_xor_si64(c2[100],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[102],c2[42])))))))))))))))))))))))))))))))))))));
 
 //row: 12
-     d2[12]=_mm_xor_si64(c2[84],_mm_xor_si64(c2[72],_mm_xor_si64(c2[48],_mm_xor_si64(c2[72],_mm_xor_si64(c2[38],_mm_xor_si64(c2[26],_mm_xor_si64(c2[86],_mm_xor_si64(c2[62],_mm_xor_si64(c2[86],_mm_xor_si64(c2[28],_mm_xor_si64(c2[16],_mm_xor_si64(c2[88],_mm_xor_si64(c2[6],_mm_xor_si64(c2[90],_mm_xor_si64(c2[42],_mm_xor_si64(c2[18],_mm_xor_si64(c2[90],_mm_xor_si64(c2[32],_mm_xor_si64(c2[20],_mm_xor_si64(c2[44],_mm_xor_si64(c2[34],_mm_xor_si64(c2[70],_mm_xor_si64(c2[48],_mm_xor_si64(c2[36],_mm_xor_si64(c2[96],_mm_xor_si64(c2[84],_mm_xor_si64(c2[26],_mm_xor_si64(c2[50],_mm_xor_si64(c2[76],_mm_xor_si64(c2[76],_mm_xor_si64(c2[100],_mm_xor_si64(c2[54],_mm_xor_si64(c2[42],_mm_xor_si64(c2[30],c2[78]))))))))))))))))))))))))))))))))));
+     d2[12]=simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[6],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[32],simde_mm_xor_si64(c2[20],simde_mm_xor_si64(c2[44],simde_mm_xor_si64(c2[34],simde_mm_xor_si64(c2[70],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[96],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[100],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[30],c2[78]))))))))))))))))))))))))))))))))));
 
 //row: 13
-     d2[13]=_mm_xor_si64(c2[72],_mm_xor_si64(c2[48],_mm_xor_si64(c2[72],_mm_xor_si64(c2[36],_mm_xor_si64(c2[26],_mm_xor_si64(c2[86],_mm_xor_si64(c2[74],_mm_xor_si64(c2[62],_mm_xor_si64(c2[26],_mm_xor_si64(c2[16],_mm_xor_si64(c2[4],_mm_xor_si64(c2[88],_mm_xor_si64(c2[90],_mm_xor_si64(c2[42],_mm_xor_si64(c2[18],_mm_xor_si64(c2[32],_mm_xor_si64(c2[20],_mm_xor_si64(c2[56],_mm_xor_si64(c2[44],_mm_xor_si64(c2[34],_mm_xor_si64(c2[82],_mm_xor_si64(c2[70],_mm_xor_si64(c2[36],_mm_xor_si64(c2[96],_mm_xor_si64(c2[96],_mm_xor_si64(c2[84],_mm_xor_si64(c2[26],_mm_xor_si64(c2[62],_mm_xor_si64(c2[50],_mm_xor_si64(c2[76],_mm_xor_si64(c2[76],_mm_xor_si64(c2[16],_mm_xor_si64(c2[100],_mm_xor_si64(c2[88],_mm_xor_si64(c2[42],_mm_xor_si64(c2[30],_mm_xor_si64(c2[90],c2[78])))))))))))))))))))))))))))))))))))));
+     d2[13]=simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[4],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[32],simde_mm_xor_si64(c2[20],simde_mm_xor_si64(c2[56],simde_mm_xor_si64(c2[44],simde_mm_xor_si64(c2[34],simde_mm_xor_si64(c2[82],simde_mm_xor_si64(c2[70],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[96],simde_mm_xor_si64(c2[96],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[100],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[90],c2[78])))))))))))))))))))))))))))))))))))));
 
 //row: 14
-     d2[14]=_mm_xor_si64(c2[48],_mm_xor_si64(c2[36],_mm_xor_si64(c2[0],_mm_xor_si64(c2[12],_mm_xor_si64(c2[72],_mm_xor_si64(c2[36],_mm_xor_si64(c2[0],_mm_xor_si64(c2[2],_mm_xor_si64(c2[86],_mm_xor_si64(c2[50],_mm_xor_si64(c2[50],_mm_xor_si64(c2[14],_mm_xor_si64(c2[2],_mm_xor_si64(c2[26],_mm_xor_si64(c2[86],_mm_xor_si64(c2[38],_mm_xor_si64(c2[88],_mm_xor_si64(c2[76],_mm_xor_si64(c2[40],_mm_xor_si64(c2[28],_mm_xor_si64(c2[52],_mm_xor_si64(c2[16],_mm_xor_si64(c2[66],_mm_xor_si64(c2[54],_mm_xor_si64(c2[18],_mm_xor_si64(c2[6],_mm_xor_si64(c2[66],_mm_xor_si64(c2[78],_mm_xor_si64(c2[42],_mm_xor_si64(c2[92],_mm_xor_si64(c2[56],_mm_xor_si64(c2[80],_mm_xor_si64(c2[44],_mm_xor_si64(c2[80],_mm_xor_si64(c2[8],_mm_xor_si64(c2[68],_mm_xor_si64(c2[94],_mm_xor_si64(c2[58],_mm_xor_si64(c2[10],_mm_xor_si64(c2[34],_mm_xor_si64(c2[94],_mm_xor_si64(c2[12],_mm_xor_si64(c2[96],_mm_xor_si64(c2[60],_mm_xor_si64(c2[60],_mm_xor_si64(c2[24],_mm_xor_si64(c2[24],_mm_xor_si64(c2[48],_mm_xor_si64(c2[12],_mm_xor_si64(c2[24],_mm_xor_si64(c2[86],_mm_xor_si64(c2[50],_mm_xor_si64(c2[86],_mm_xor_si64(c2[14],_mm_xor_si64(c2[74],_mm_xor_si64(c2[40],_mm_xor_si64(c2[100],_mm_xor_si64(c2[40],_mm_xor_si64(c2[100],_mm_xor_si64(c2[40],_mm_xor_si64(c2[64],_mm_xor_si64(c2[28],_mm_xor_si64(c2[18],_mm_xor_si64(c2[102],_mm_xor_si64(c2[66],_mm_xor_si64(c2[90],_mm_xor_si64(c2[54],_mm_xor_si64(c2[18],_mm_xor_si64(c2[42],c2[102])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[14]=simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[2],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[2],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[52],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[6],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[78],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[92],simde_mm_xor_si64(c2[56],simde_mm_xor_si64(c2[80],simde_mm_xor_si64(c2[44],simde_mm_xor_si64(c2[80],simde_mm_xor_si64(c2[8],simde_mm_xor_si64(c2[68],simde_mm_xor_si64(c2[94],simde_mm_xor_si64(c2[58],simde_mm_xor_si64(c2[10],simde_mm_xor_si64(c2[34],simde_mm_xor_si64(c2[94],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[96],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[100],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[100],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[64],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[102],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[42],c2[102])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 15
-     d2[15]=_mm_xor_si64(c2[72],_mm_xor_si64(c2[84],_mm_xor_si64(c2[60],_mm_xor_si64(c2[60],_mm_xor_si64(c2[36],_mm_xor_si64(c2[84],_mm_xor_si64(c2[60],_mm_xor_si64(c2[36],_mm_xor_si64(c2[26],_mm_xor_si64(c2[38],_mm_xor_si64(c2[14],_mm_xor_si64(c2[2],_mm_xor_si64(c2[74],_mm_xor_si64(c2[74],_mm_xor_si64(c2[50],_mm_xor_si64(c2[16],_mm_xor_si64(c2[28],_mm_xor_si64(c2[4],_mm_xor_si64(c2[4],_mm_xor_si64(c2[76],_mm_xor_si64(c2[90],_mm_xor_si64(c2[6],_mm_xor_si64(c2[78],_mm_xor_si64(c2[54],_mm_xor_si64(c2[30],_mm_xor_si64(c2[30],_mm_xor_si64(c2[6],_mm_xor_si64(c2[44],_mm_xor_si64(c2[20],_mm_xor_si64(c2[32],_mm_xor_si64(c2[8],_mm_xor_si64(c2[56],_mm_xor_si64(c2[32],_mm_xor_si64(c2[46],_mm_xor_si64(c2[22],_mm_xor_si64(c2[82],_mm_xor_si64(c2[58],_mm_xor_si64(c2[36],_mm_xor_si64(c2[48],_mm_xor_si64(c2[24],_mm_xor_si64(c2[12],_mm_xor_si64(c2[84],_mm_xor_si64(c2[96],_mm_xor_si64(c2[72],_mm_xor_si64(c2[38],_mm_xor_si64(c2[14],_mm_xor_si64(c2[62],_mm_xor_si64(c2[38],_mm_xor_si64(c2[88],_mm_xor_si64(c2[64],_mm_xor_si64(c2[88],_mm_xor_si64(c2[64],_mm_xor_si64(c2[16],_mm_xor_si64(c2[88],_mm_xor_si64(c2[42],_mm_xor_si64(c2[54],_mm_xor_si64(c2[30],_mm_xor_si64(c2[42],_mm_xor_si64(c2[18],_mm_xor_si64(c2[90],c2[66]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[15]=simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[2],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[4],simde_mm_xor_si64(c2[4],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[6],simde_mm_xor_si64(c2[78],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[6],simde_mm_xor_si64(c2[44],simde_mm_xor_si64(c2[20],simde_mm_xor_si64(c2[32],simde_mm_xor_si64(c2[8],simde_mm_xor_si64(c2[56],simde_mm_xor_si64(c2[32],simde_mm_xor_si64(c2[46],simde_mm_xor_si64(c2[22],simde_mm_xor_si64(c2[82],simde_mm_xor_si64(c2[58],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[96],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[64],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[64],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[90],c2[66]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 16
-     d2[16]=_mm_xor_si64(c2[12],_mm_xor_si64(c2[36],_mm_xor_si64(c2[0],_mm_xor_si64(c2[24],_mm_xor_si64(c2[12],_mm_xor_si64(c2[72],_mm_xor_si64(c2[0],_mm_xor_si64(c2[0],_mm_xor_si64(c2[24],_mm_xor_si64(c2[62],_mm_xor_si64(c2[86],_mm_xor_si64(c2[50],_mm_xor_si64(c2[74],_mm_xor_si64(c2[14],_mm_xor_si64(c2[38],_mm_xor_si64(c2[86],_mm_xor_si64(c2[14],_mm_xor_si64(c2[38],_mm_xor_si64(c2[52],_mm_xor_si64(c2[76],_mm_xor_si64(c2[40],_mm_xor_si64(c2[64],_mm_xor_si64(c2[16],_mm_xor_si64(c2[40],_mm_xor_si64(c2[30],_mm_xor_si64(c2[54],_mm_xor_si64(c2[18],_mm_xor_si64(c2[42],_mm_xor_si64(c2[6],_mm_xor_si64(c2[66],_mm_xor_si64(c2[90],_mm_xor_si64(c2[42],_mm_xor_si64(c2[66],_mm_xor_si64(c2[92],_mm_xor_si64(c2[56],_mm_xor_si64(c2[80],_mm_xor_si64(c2[44],_mm_xor_si64(c2[68],_mm_xor_si64(c2[68],_mm_xor_si64(c2[92],_mm_xor_si64(c2[94],_mm_xor_si64(c2[58],_mm_xor_si64(c2[82],_mm_xor_si64(c2[94],_mm_xor_si64(c2[22],_mm_xor_si64(c2[72],_mm_xor_si64(c2[96],_mm_xor_si64(c2[60],_mm_xor_si64(c2[84],_mm_xor_si64(c2[60],_mm_xor_si64(c2[24],_mm_xor_si64(c2[48],_mm_xor_si64(c2[12],_mm_xor_si64(c2[36],_mm_xor_si64(c2[86],_mm_xor_si64(c2[50],_mm_xor_si64(c2[74],_mm_xor_si64(c2[74],_mm_xor_si64(c2[98],_mm_xor_si64(c2[40],_mm_xor_si64(c2[100],_mm_xor_si64(c2[28],_mm_xor_si64(c2[100],_mm_xor_si64(c2[28],_mm_xor_si64(c2[28],_mm_xor_si64(c2[52],_mm_xor_si64(c2[78],_mm_xor_si64(c2[102],_mm_xor_si64(c2[66],_mm_xor_si64(c2[90],_mm_xor_si64(c2[90],_mm_xor_si64(c2[54],_mm_xor_si64(c2[78],_mm_xor_si64(c2[102],_mm_xor_si64(c2[30],c2[90])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[16]=simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[52],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[64],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[6],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[92],simde_mm_xor_si64(c2[56],simde_mm_xor_si64(c2[80],simde_mm_xor_si64(c2[44],simde_mm_xor_si64(c2[68],simde_mm_xor_si64(c2[68],simde_mm_xor_si64(c2[92],simde_mm_xor_si64(c2[94],simde_mm_xor_si64(c2[58],simde_mm_xor_si64(c2[82],simde_mm_xor_si64(c2[94],simde_mm_xor_si64(c2[22],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[96],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[98],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[100],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[100],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[52],simde_mm_xor_si64(c2[78],simde_mm_xor_si64(c2[102],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[78],simde_mm_xor_si64(c2[102],simde_mm_xor_si64(c2[30],c2[90])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 17
-     d2[17]=_mm_xor_si64(c2[36],_mm_xor_si64(c2[12],_mm_xor_si64(c2[24],_mm_xor_si64(c2[0],_mm_xor_si64(c2[84],_mm_xor_si64(c2[0],_mm_xor_si64(c2[72],_mm_xor_si64(c2[24],_mm_xor_si64(c2[0],_mm_xor_si64(c2[86],_mm_xor_si64(c2[62],_mm_xor_si64(c2[74],_mm_xor_si64(c2[50],_mm_xor_si64(c2[38],_mm_xor_si64(c2[14],_mm_xor_si64(c2[14],_mm_xor_si64(c2[86],_mm_xor_si64(c2[74],_mm_xor_si64(c2[76],_mm_xor_si64(c2[52],_mm_xor_si64(c2[64],_mm_xor_si64(c2[40],_mm_xor_si64(c2[40],_mm_xor_si64(c2[16],_mm_xor_si64(c2[54],_mm_xor_si64(c2[30],_mm_xor_si64(c2[42],_mm_xor_si64(c2[18],_mm_xor_si64(c2[78],_mm_xor_si64(c2[90],_mm_xor_si64(c2[66],_mm_xor_si64(c2[66],_mm_xor_si64(c2[42],_mm_xor_si64(c2[68],_mm_xor_si64(c2[80],_mm_xor_si64(c2[56],_mm_xor_si64(c2[68],_mm_xor_si64(c2[44],_mm_xor_si64(c2[92],_mm_xor_si64(c2[68],_mm_xor_si64(c2[70],_mm_xor_si64(c2[82],_mm_xor_si64(c2[58],_mm_xor_si64(c2[22],_mm_xor_si64(c2[94],_mm_xor_si64(c2[58],_mm_xor_si64(c2[96],_mm_xor_si64(c2[72],_mm_xor_si64(c2[84],_mm_xor_si64(c2[60],_mm_xor_si64(c2[36],_mm_xor_si64(c2[48],_mm_xor_si64(c2[24],_mm_xor_si64(c2[36],_mm_xor_si64(c2[12],_mm_xor_si64(c2[62],_mm_xor_si64(c2[74],_mm_xor_si64(c2[50],_mm_xor_si64(c2[98],_mm_xor_si64(c2[74],_mm_xor_si64(c2[16],_mm_xor_si64(c2[28],_mm_xor_si64(c2[100],_mm_xor_si64(c2[28],_mm_xor_si64(c2[100],_mm_xor_si64(c2[52],_mm_xor_si64(c2[28],_mm_xor_si64(c2[102],_mm_xor_si64(c2[78],_mm_xor_si64(c2[90],_mm_xor_si64(c2[66],_mm_xor_si64(c2[66],_mm_xor_si64(c2[78],_mm_xor_si64(c2[54],_mm_xor_si64(c2[30],c2[102])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[17]=simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[52],simde_mm_xor_si64(c2[64],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[78],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[68],simde_mm_xor_si64(c2[80],simde_mm_xor_si64(c2[56],simde_mm_xor_si64(c2[68],simde_mm_xor_si64(c2[44],simde_mm_xor_si64(c2[92],simde_mm_xor_si64(c2[68],simde_mm_xor_si64(c2[70],simde_mm_xor_si64(c2[82],simde_mm_xor_si64(c2[58],simde_mm_xor_si64(c2[22],simde_mm_xor_si64(c2[94],simde_mm_xor_si64(c2[58],simde_mm_xor_si64(c2[96],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[98],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[100],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[100],simde_mm_xor_si64(c2[52],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[102],simde_mm_xor_si64(c2[78],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[78],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[30],c2[102])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 18
-     d2[18]=_mm_xor_si64(c2[48],_mm_xor_si64(c2[36],c2[38]));
+     d2[18]=simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[36],c2[38]));
 
 //row: 19
-     d2[19]=_mm_xor_si64(c2[12],_mm_xor_si64(c2[84],_mm_xor_si64(c2[12],_mm_xor_si64(c2[84],_mm_xor_si64(c2[62],_mm_xor_si64(c2[26],_mm_xor_si64(c2[2],_mm_xor_si64(c2[50],_mm_xor_si64(c2[52],_mm_xor_si64(c2[28],_mm_xor_si64(c2[30],_mm_xor_si64(c2[78],_mm_xor_si64(c2[54],_mm_xor_si64(c2[68],_mm_xor_si64(c2[56],_mm_xor_si64(c2[80],_mm_xor_si64(c2[70],_mm_xor_si64(c2[10],_mm_xor_si64(c2[72],_mm_xor_si64(c2[36],_mm_xor_si64(c2[24],_mm_xor_si64(c2[62],_mm_xor_si64(c2[86],_mm_xor_si64(c2[16],_mm_xor_si64(c2[16],_mm_xor_si64(c2[40],_mm_xor_si64(c2[78],_mm_xor_si64(c2[66],c2[18]))))))))))))))))))))))))))));
+     d2[19]=simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[2],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[52],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[78],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[68],simde_mm_xor_si64(c2[56],simde_mm_xor_si64(c2[80],simde_mm_xor_si64(c2[70],simde_mm_xor_si64(c2[10],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[78],simde_mm_xor_si64(c2[66],c2[18]))))))))))))))))))))))))))));
 
 //row: 20
-     d2[20]=_mm_xor_si64(c2[72],_mm_xor_si64(c2[60],_mm_xor_si64(c2[36],_mm_xor_si64(c2[60],_mm_xor_si64(c2[26],_mm_xor_si64(c2[14],_mm_xor_si64(c2[74],_mm_xor_si64(c2[50],_mm_xor_si64(c2[26],_mm_xor_si64(c2[16],_mm_xor_si64(c2[4],_mm_xor_si64(c2[76],_mm_xor_si64(c2[90],_mm_xor_si64(c2[78],_mm_xor_si64(c2[30],_mm_xor_si64(c2[6],_mm_xor_si64(c2[20],_mm_xor_si64(c2[8],_mm_xor_si64(c2[32],_mm_xor_si64(c2[20],_mm_xor_si64(c2[22],_mm_xor_si64(c2[58],_mm_xor_si64(c2[36],_mm_xor_si64(c2[24],_mm_xor_si64(c2[84],_mm_xor_si64(c2[72],_mm_xor_si64(c2[14],_mm_xor_si64(c2[38],_mm_xor_si64(c2[64],_mm_xor_si64(c2[64],_mm_xor_si64(c2[88],_mm_xor_si64(c2[42],_mm_xor_si64(c2[30],_mm_xor_si64(c2[18],c2[66]))))))))))))))))))))))))))))))))));
+     d2[20]=simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[4],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[78],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[6],simde_mm_xor_si64(c2[20],simde_mm_xor_si64(c2[8],simde_mm_xor_si64(c2[32],simde_mm_xor_si64(c2[20],simde_mm_xor_si64(c2[22],simde_mm_xor_si64(c2[58],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[64],simde_mm_xor_si64(c2[64],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[18],c2[66]))))))))))))))))))))))))))))))))));
 
 //row: 21
-     d2[21]=_mm_xor_si64(c2[24],_mm_xor_si64(c2[0],_mm_xor_si64(c2[24],_mm_xor_si64(c2[48],_mm_xor_si64(c2[74],_mm_xor_si64(c2[38],_mm_xor_si64(c2[26],_mm_xor_si64(c2[14],_mm_xor_si64(c2[64],_mm_xor_si64(c2[52],_mm_xor_si64(c2[40],_mm_xor_si64(c2[42],_mm_xor_si64(c2[90],_mm_xor_si64(c2[66],_mm_xor_si64(c2[80],_mm_xor_si64(c2[68],_mm_xor_si64(c2[8],_mm_xor_si64(c2[92],_mm_xor_si64(c2[82],_mm_xor_si64(c2[34],_mm_xor_si64(c2[22],_mm_xor_si64(c2[84],_mm_xor_si64(c2[48],_mm_xor_si64(c2[48],_mm_xor_si64(c2[36],_mm_xor_si64(c2[74],_mm_xor_si64(c2[14],_mm_xor_si64(c2[98],_mm_xor_si64(c2[28],_mm_xor_si64(c2[28],_mm_xor_si64(c2[64],_mm_xor_si64(c2[52],_mm_xor_si64(c2[40],_mm_xor_si64(c2[90],_mm_xor_si64(c2[78],_mm_xor_si64(c2[42],c2[30]))))))))))))))))))))))))))))))))))));
+     d2[21]=simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[64],simde_mm_xor_si64(c2[52],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[80],simde_mm_xor_si64(c2[68],simde_mm_xor_si64(c2[8],simde_mm_xor_si64(c2[92],simde_mm_xor_si64(c2[82],simde_mm_xor_si64(c2[34],simde_mm_xor_si64(c2[22],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[98],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[64],simde_mm_xor_si64(c2[52],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[78],simde_mm_xor_si64(c2[42],c2[30]))))))))))))))))))))))))))))))))))));
 
 //row: 22
-     d2[22]=_mm_xor_si64(c2[74],c2[88]);
+     d2[22]=simde_mm_xor_si64(c2[74],c2[88]);
 
 //row: 23
-     d2[23]=_mm_xor_si64(c2[84],_mm_xor_si64(c2[42],c2[82]));
+     d2[23]=simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[42],c2[82]));
 
 //row: 24
-     d2[24]=_mm_xor_si64(c2[74],_mm_xor_si64(c2[40],c2[18]));
+     d2[24]=simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[40],c2[18]));
 
 //row: 25
-     d2[25]=_mm_xor_si64(c2[48],c2[58]);
+     d2[25]=simde_mm_xor_si64(c2[48],c2[58]);
 
 //row: 26
-     d2[26]=_mm_xor_si64(c2[12],_mm_xor_si64(c2[0],_mm_xor_si64(c2[24],_mm_xor_si64(c2[84],_mm_xor_si64(c2[72],_mm_xor_si64(c2[0],_mm_xor_si64(c2[0],_mm_xor_si64(c2[24],_mm_xor_si64(c2[62],_mm_xor_si64(c2[50],_mm_xor_si64(c2[74],_mm_xor_si64(c2[14],_mm_xor_si64(c2[38],_mm_xor_si64(c2[26],_mm_xor_si64(c2[86],_mm_xor_si64(c2[14],_mm_xor_si64(c2[52],_mm_xor_si64(c2[40],_mm_xor_si64(c2[64],_mm_xor_si64(c2[52],_mm_xor_si64(c2[16],_mm_xor_si64(c2[40],_mm_xor_si64(c2[64],_mm_xor_si64(c2[30],_mm_xor_si64(c2[18],_mm_xor_si64(c2[42],_mm_xor_si64(c2[78],_mm_xor_si64(c2[66],_mm_xor_si64(c2[90],_mm_xor_si64(c2[42],_mm_xor_si64(c2[66],_mm_xor_si64(c2[68],_mm_xor_si64(c2[56],_mm_xor_si64(c2[80],_mm_xor_si64(c2[44],_mm_xor_si64(c2[68],_mm_xor_si64(c2[8],_mm_xor_si64(c2[68],_mm_xor_si64(c2[92],_mm_xor_si64(c2[70],_mm_xor_si64(c2[58],_mm_xor_si64(c2[82],_mm_xor_si64(c2[34],_mm_xor_si64(c2[94],_mm_xor_si64(c2[22],_mm_xor_si64(c2[72],_mm_xor_si64(c2[60],_mm_xor_si64(c2[84],_mm_xor_si64(c2[36],_mm_xor_si64(c2[24],_mm_xor_si64(c2[48],_mm_xor_si64(c2[48],_mm_xor_si64(c2[12],_mm_xor_si64(c2[36],_mm_xor_si64(c2[62],_mm_xor_si64(c2[50],_mm_xor_si64(c2[74],_mm_xor_si64(c2[14],_mm_xor_si64(c2[74],_mm_xor_si64(c2[98],_mm_xor_si64(c2[98],_mm_xor_si64(c2[16],_mm_xor_si64(c2[100],_mm_xor_si64(c2[28],_mm_xor_si64(c2[100],_mm_xor_si64(c2[28],_mm_xor_si64(c2[64],_mm_xor_si64(c2[28],_mm_xor_si64(c2[52],_mm_xor_si64(c2[78],_mm_xor_si64(c2[66],_mm_xor_si64(c2[90],_mm_xor_si64(c2[66],_mm_xor_si64(c2[54],_mm_xor_si64(c2[78],_mm_xor_si64(c2[42],_mm_xor_si64(c2[102],c2[30])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[26]=simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[52],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[64],simde_mm_xor_si64(c2[52],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[64],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[78],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[68],simde_mm_xor_si64(c2[56],simde_mm_xor_si64(c2[80],simde_mm_xor_si64(c2[44],simde_mm_xor_si64(c2[68],simde_mm_xor_si64(c2[8],simde_mm_xor_si64(c2[68],simde_mm_xor_si64(c2[92],simde_mm_xor_si64(c2[70],simde_mm_xor_si64(c2[58],simde_mm_xor_si64(c2[82],simde_mm_xor_si64(c2[34],simde_mm_xor_si64(c2[94],simde_mm_xor_si64(c2[22],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[98],simde_mm_xor_si64(c2[98],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[100],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[100],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[64],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[52],simde_mm_xor_si64(c2[78],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[78],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[102],c2[30])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 27
-     d2[27]=_mm_xor_si64(c2[0],c2[96]);
+     d2[27]=simde_mm_xor_si64(c2[0],c2[96]);
 
 //row: 28
-     d2[28]=_mm_xor_si64(c2[26],_mm_xor_si64(c2[64],c2[94]));
+     d2[28]=simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[64],c2[94]));
 
 //row: 29
-     d2[29]=_mm_xor_si64(c2[24],c2[56]);
+     d2[29]=simde_mm_xor_si64(c2[24],c2[56]);
 
 //row: 30
-     d2[30]=_mm_xor_si64(c2[88],_mm_xor_si64(c2[10],_mm_xor_si64(c2[26],c2[66])));
+     d2[30]=simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[10],simde_mm_xor_si64(c2[26],c2[66])));
 
 //row: 31
-     d2[31]=_mm_xor_si64(c2[12],_mm_xor_si64(c2[84],_mm_xor_si64(c2[12],_mm_xor_si64(c2[62],_mm_xor_si64(c2[26],_mm_xor_si64(c2[14],_mm_xor_si64(c2[2],_mm_xor_si64(c2[26],_mm_xor_si64(c2[52],_mm_xor_si64(c2[40],_mm_xor_si64(c2[28],_mm_xor_si64(c2[30],_mm_xor_si64(c2[78],_mm_xor_si64(c2[54],_mm_xor_si64(c2[68],_mm_xor_si64(c2[56],_mm_xor_si64(c2[92],_mm_xor_si64(c2[80],_mm_xor_si64(c2[70],_mm_xor_si64(c2[22],_mm_xor_si64(c2[10],_mm_xor_si64(c2[72],_mm_xor_si64(c2[36],_mm_xor_si64(c2[36],_mm_xor_si64(c2[24],_mm_xor_si64(c2[62],_mm_xor_si64(c2[98],_mm_xor_si64(c2[86],_mm_xor_si64(c2[16],_mm_xor_si64(c2[16],_mm_xor_si64(c2[52],_mm_xor_si64(c2[40],_mm_xor_si64(c2[78],_mm_xor_si64(c2[66],_mm_xor_si64(c2[30],c2[18])))))))))))))))))))))))))))))))))));
+     d2[31]=simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[2],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[52],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[78],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[68],simde_mm_xor_si64(c2[56],simde_mm_xor_si64(c2[92],simde_mm_xor_si64(c2[80],simde_mm_xor_si64(c2[70],simde_mm_xor_si64(c2[22],simde_mm_xor_si64(c2[10],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[98],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[52],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[78],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[30],c2[18])))))))))))))))))))))))))))))))))));
 
 //row: 32
-     d2[32]=_mm_xor_si64(c2[84],_mm_xor_si64(c2[72],_mm_xor_si64(c2[60],_mm_xor_si64(c2[48],_mm_xor_si64(c2[72],_mm_xor_si64(c2[24],_mm_xor_si64(c2[38],_mm_xor_si64(c2[26],_mm_xor_si64(c2[86],_mm_xor_si64(c2[62],_mm_xor_si64(c2[28],_mm_xor_si64(c2[16],_mm_xor_si64(c2[88],_mm_xor_si64(c2[6],_mm_xor_si64(c2[90],_mm_xor_si64(c2[54],_mm_xor_si64(c2[42],_mm_xor_si64(c2[18],_mm_xor_si64(c2[44],_mm_xor_si64(c2[32],_mm_xor_si64(c2[20],_mm_xor_si64(c2[44],_mm_xor_si64(c2[46],_mm_xor_si64(c2[34],_mm_xor_si64(c2[70],_mm_xor_si64(c2[58],_mm_xor_si64(c2[48],_mm_xor_si64(c2[36],_mm_xor_si64(c2[12],_mm_xor_si64(c2[96],_mm_xor_si64(c2[84],_mm_xor_si64(c2[38],_mm_xor_si64(c2[26],_mm_xor_si64(c2[50],_mm_xor_si64(c2[88],_mm_xor_si64(c2[76],_mm_xor_si64(c2[76],_mm_xor_si64(c2[100],_mm_xor_si64(c2[54],_mm_xor_si64(c2[42],_mm_xor_si64(c2[42],_mm_xor_si64(c2[30],c2[78]))))))))))))))))))))))))))))))))))))))))));
+     d2[32]=simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[6],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[44],simde_mm_xor_si64(c2[32],simde_mm_xor_si64(c2[20],simde_mm_xor_si64(c2[44],simde_mm_xor_si64(c2[46],simde_mm_xor_si64(c2[34],simde_mm_xor_si64(c2[70],simde_mm_xor_si64(c2[58],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[96],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[100],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[30],c2[78]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 33
-     d2[33]=_mm_xor_si64(c2[36],_mm_xor_si64(c2[12],_mm_xor_si64(c2[36],_mm_xor_si64(c2[86],_mm_xor_si64(c2[50],_mm_xor_si64(c2[26],_mm_xor_si64(c2[76],_mm_xor_si64(c2[52],_mm_xor_si64(c2[52],_mm_xor_si64(c2[54],_mm_xor_si64(c2[6],_mm_xor_si64(c2[78],_mm_xor_si64(c2[92],_mm_xor_si64(c2[80],_mm_xor_si64(c2[8],_mm_xor_si64(c2[94],_mm_xor_si64(c2[34],_mm_xor_si64(c2[96],_mm_xor_si64(c2[60],_mm_xor_si64(c2[48],_mm_xor_si64(c2[86],_mm_xor_si64(c2[14],_mm_xor_si64(c2[62],_mm_xor_si64(c2[40],_mm_xor_si64(c2[40],_mm_xor_si64(c2[64],_mm_xor_si64(c2[102],_mm_xor_si64(c2[90],c2[42]))))))))))))))))))))))))))));
+     d2[33]=simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[52],simde_mm_xor_si64(c2[52],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[6],simde_mm_xor_si64(c2[78],simde_mm_xor_si64(c2[92],simde_mm_xor_si64(c2[80],simde_mm_xor_si64(c2[8],simde_mm_xor_si64(c2[94],simde_mm_xor_si64(c2[34],simde_mm_xor_si64(c2[96],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[64],simde_mm_xor_si64(c2[102],simde_mm_xor_si64(c2[90],c2[42]))))))))))))))))))))))))))));
 
 //row: 34
-     d2[34]=_mm_xor_si64(c2[72],_mm_xor_si64(c2[60],_mm_xor_si64(c2[48],_mm_xor_si64(c2[48],_mm_xor_si64(c2[36],_mm_xor_si64(c2[24],_mm_xor_si64(c2[60],_mm_xor_si64(c2[48],_mm_xor_si64(c2[36],_mm_xor_si64(c2[26],_mm_xor_si64(c2[14],_mm_xor_si64(c2[2],_mm_xor_si64(c2[74],_mm_xor_si64(c2[62],_mm_xor_si64(c2[50],_mm_xor_si64(c2[50],_mm_xor_si64(c2[38],_mm_xor_si64(c2[16],_mm_xor_si64(c2[4],_mm_xor_si64(c2[88],_mm_xor_si64(c2[76],_mm_xor_si64(c2[76],_mm_xor_si64(c2[64],_mm_xor_si64(c2[90],_mm_xor_si64(c2[78],_mm_xor_si64(c2[66],_mm_xor_si64(c2[42],_mm_xor_si64(c2[30],_mm_xor_si64(c2[18],_mm_xor_si64(c2[6],_mm_xor_si64(c2[90],_mm_xor_si64(c2[32],_mm_xor_si64(c2[20],_mm_xor_si64(c2[8],_mm_xor_si64(c2[8],_mm_xor_si64(c2[92],_mm_xor_si64(c2[32],_mm_xor_si64(c2[32],_mm_xor_si64(c2[20],_mm_xor_si64(c2[34],_mm_xor_si64(c2[22],_mm_xor_si64(c2[10],_mm_xor_si64(c2[58],_mm_xor_si64(c2[58],_mm_xor_si64(c2[46],_mm_xor_si64(c2[36],_mm_xor_si64(c2[24],_mm_xor_si64(c2[12],_mm_xor_si64(c2[96],_mm_xor_si64(c2[84],_mm_xor_si64(c2[72],_mm_xor_si64(c2[72],_mm_xor_si64(c2[72],_mm_xor_si64(c2[60],_mm_xor_si64(c2[26],_mm_xor_si64(c2[14],_mm_xor_si64(c2[98],_mm_xor_si64(c2[38],_mm_xor_si64(c2[38],_mm_xor_si64(c2[26],_mm_xor_si64(c2[76],_mm_xor_si64(c2[64],_mm_xor_si64(c2[52],_mm_xor_si64(c2[64],_mm_xor_si64(c2[52],_mm_xor_si64(c2[88],_mm_xor_si64(c2[88],_mm_xor_si64(c2[76],_mm_xor_si64(c2[42],_mm_xor_si64(c2[30],_mm_xor_si64(c2[18],_mm_xor_si64(c2[30],_mm_xor_si64(c2[18],_mm_xor_si64(c2[102],_mm_xor_si64(c2[66],_mm_xor_si64(c2[66],c2[54]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[34]=simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[2],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[4],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[64],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[78],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[6],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[32],simde_mm_xor_si64(c2[20],simde_mm_xor_si64(c2[8],simde_mm_xor_si64(c2[8],simde_mm_xor_si64(c2[92],simde_mm_xor_si64(c2[32],simde_mm_xor_si64(c2[32],simde_mm_xor_si64(c2[20],simde_mm_xor_si64(c2[34],simde_mm_xor_si64(c2[22],simde_mm_xor_si64(c2[10],simde_mm_xor_si64(c2[58],simde_mm_xor_si64(c2[58],simde_mm_xor_si64(c2[46],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[96],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[98],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[64],simde_mm_xor_si64(c2[52],simde_mm_xor_si64(c2[64],simde_mm_xor_si64(c2[52],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[102],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[66],c2[54]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 35
-     d2[35]=_mm_xor_si64(c2[0],_mm_xor_si64(c2[84],_mm_xor_si64(c2[60],_mm_xor_si64(c2[84],_mm_xor_si64(c2[50],_mm_xor_si64(c2[38],_mm_xor_si64(c2[2],_mm_xor_si64(c2[74],_mm_xor_si64(c2[14],_mm_xor_si64(c2[40],_mm_xor_si64(c2[28],_mm_xor_si64(c2[4],_mm_xor_si64(c2[18],_mm_xor_si64(c2[6],_mm_xor_si64(c2[54],_mm_xor_si64(c2[30],_mm_xor_si64(c2[44],_mm_xor_si64(c2[32],_mm_xor_si64(c2[56],_mm_xor_si64(c2[46],_mm_xor_si64(c2[82],_mm_xor_si64(c2[10],_mm_xor_si64(c2[60],_mm_xor_si64(c2[48],_mm_xor_si64(c2[12],_mm_xor_si64(c2[96],_mm_xor_si64(c2[38],_mm_xor_si64(c2[62],_mm_xor_si64(c2[88],_mm_xor_si64(c2[88],_mm_xor_si64(c2[16],_mm_xor_si64(c2[66],_mm_xor_si64(c2[54],_mm_xor_si64(c2[42],c2[90]))))))))))))))))))))))))))))))))));
+     d2[35]=simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[2],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[4],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[6],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[44],simde_mm_xor_si64(c2[32],simde_mm_xor_si64(c2[56],simde_mm_xor_si64(c2[46],simde_mm_xor_si64(c2[82],simde_mm_xor_si64(c2[10],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[96],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[42],c2[90]))))))))))))))))))))))))))))))))));
 
 //row: 36
-     d2[36]=_mm_xor_si64(c2[48],_mm_xor_si64(c2[76],c2[38]));
+     d2[36]=simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[76],c2[38]));
 
 //row: 37
-     d2[37]=_mm_xor_si64(c2[36],_mm_xor_si64(c2[84],_mm_xor_si64(c2[12],_mm_xor_si64(c2[60],_mm_xor_si64(c2[36],_mm_xor_si64(c2[84],_mm_xor_si64(c2[86],_mm_xor_si64(c2[38],_mm_xor_si64(c2[50],_mm_xor_si64(c2[2],_mm_xor_si64(c2[86],_mm_xor_si64(c2[26],_mm_xor_si64(c2[74],_mm_xor_si64(c2[76],_mm_xor_si64(c2[28],_mm_xor_si64(c2[16],_mm_xor_si64(c2[52],_mm_xor_si64(c2[4],_mm_xor_si64(c2[54],_mm_xor_si64(c2[6],_mm_xor_si64(c2[6],_mm_xor_si64(c2[54],_mm_xor_si64(c2[78],_mm_xor_si64(c2[30],_mm_xor_si64(c2[92],_mm_xor_si64(c2[44],_mm_xor_si64(c2[80],_mm_xor_si64(c2[32],_mm_xor_si64(c2[68],_mm_xor_si64(c2[8],_mm_xor_si64(c2[56],_mm_xor_si64(c2[94],_mm_xor_si64(c2[46],_mm_xor_si64(c2[94],_mm_xor_si64(c2[34],_mm_xor_si64(c2[82],_mm_xor_si64(c2[96],_mm_xor_si64(c2[48],_mm_xor_si64(c2[60],_mm_xor_si64(c2[12],_mm_xor_si64(c2[12],_mm_xor_si64(c2[48],_mm_xor_si64(c2[96],_mm_xor_si64(c2[86],_mm_xor_si64(c2[38],_mm_xor_si64(c2[74],_mm_xor_si64(c2[14],_mm_xor_si64(c2[62],_mm_xor_si64(c2[40],_mm_xor_si64(c2[88],_mm_xor_si64(c2[40],_mm_xor_si64(c2[88],_mm_xor_si64(c2[28],_mm_xor_si64(c2[64],_mm_xor_si64(c2[16],_mm_xor_si64(c2[102],_mm_xor_si64(c2[54],_mm_xor_si64(c2[90],_mm_xor_si64(c2[42],_mm_xor_si64(c2[102],_mm_xor_si64(c2[42],c2[90])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[37]=simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[2],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[52],simde_mm_xor_si64(c2[4],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[6],simde_mm_xor_si64(c2[6],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[78],simde_mm_xor_si64(c2[30],simde_mm_xor_si64(c2[92],simde_mm_xor_si64(c2[44],simde_mm_xor_si64(c2[80],simde_mm_xor_si64(c2[32],simde_mm_xor_si64(c2[68],simde_mm_xor_si64(c2[8],simde_mm_xor_si64(c2[56],simde_mm_xor_si64(c2[94],simde_mm_xor_si64(c2[46],simde_mm_xor_si64(c2[94],simde_mm_xor_si64(c2[34],simde_mm_xor_si64(c2[82],simde_mm_xor_si64(c2[96],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[96],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[64],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[102],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[102],simde_mm_xor_si64(c2[42],c2[90])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 38
-     d2[38]=_mm_xor_si64(c2[84],_mm_xor_si64(c2[72],_mm_xor_si64(c2[48],_mm_xor_si64(c2[72],_mm_xor_si64(c2[38],_mm_xor_si64(c2[26],_mm_xor_si64(c2[86],_mm_xor_si64(c2[62],_mm_xor_si64(c2[86],_mm_xor_si64(c2[28],_mm_xor_si64(c2[16],_mm_xor_si64(c2[88],_mm_xor_si64(c2[6],_mm_xor_si64(c2[90],_mm_xor_si64(c2[42],_mm_xor_si64(c2[18],_mm_xor_si64(c2[32],_mm_xor_si64(c2[20],_mm_xor_si64(c2[44],_mm_xor_si64(c2[34],_mm_xor_si64(c2[70],_mm_xor_si64(c2[34],_mm_xor_si64(c2[48],_mm_xor_si64(c2[36],_mm_xor_si64(c2[96],_mm_xor_si64(c2[84],_mm_xor_si64(c2[26],_mm_xor_si64(c2[50],_mm_xor_si64(c2[76],_mm_xor_si64(c2[76],_mm_xor_si64(c2[100],_mm_xor_si64(c2[54],_mm_xor_si64(c2[42],_mm_xor_si64(c2[30],c2[78]))))))))))))))))))))))))))))))))));
+     d2[38]=simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[6],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[32],simde_mm_xor_si64(c2[20],simde_mm_xor_si64(c2[44],simde_mm_xor_si64(c2[34],simde_mm_xor_si64(c2[70],simde_mm_xor_si64(c2[34],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[96],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[100],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[30],c2[78]))))))))))))))))))))))))))))))))));
 
 //row: 39
-     d2[39]=_mm_xor_si64(c2[36],_mm_xor_si64(c2[24],_mm_xor_si64(c2[12],_mm_xor_si64(c2[0],_mm_xor_si64(c2[24],_mm_xor_si64(c2[84],_mm_xor_si64(c2[86],_mm_xor_si64(c2[74],_mm_xor_si64(c2[38],_mm_xor_si64(c2[14],_mm_xor_si64(c2[76],_mm_xor_si64(c2[64],_mm_xor_si64(c2[40],_mm_xor_si64(c2[54],_mm_xor_si64(c2[42],_mm_xor_si64(c2[6],_mm_xor_si64(c2[90],_mm_xor_si64(c2[66],_mm_xor_si64(c2[92],_mm_xor_si64(c2[80],_mm_xor_si64(c2[68],_mm_xor_si64(c2[92],_mm_xor_si64(c2[94],_mm_xor_si64(c2[82],_mm_xor_si64(c2[22],_mm_xor_si64(c2[96],_mm_xor_si64(c2[84],_mm_xor_si64(c2[60],_mm_xor_si64(c2[48],_mm_xor_si64(c2[36],_mm_xor_si64(c2[86],_mm_xor_si64(c2[74],_mm_xor_si64(c2[98],_mm_xor_si64(c2[62],_mm_xor_si64(c2[40],_mm_xor_si64(c2[28],_mm_xor_si64(c2[28],_mm_xor_si64(c2[52],_mm_xor_si64(c2[102],_mm_xor_si64(c2[90],_mm_xor_si64(c2[90],_mm_xor_si64(c2[78],c2[30]))))))))))))))))))))))))))))))))))))))))));
+     d2[39]=simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[64],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[6],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[92],simde_mm_xor_si64(c2[80],simde_mm_xor_si64(c2[68],simde_mm_xor_si64(c2[92],simde_mm_xor_si64(c2[94],simde_mm_xor_si64(c2[82],simde_mm_xor_si64(c2[22],simde_mm_xor_si64(c2[96],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[98],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[52],simde_mm_xor_si64(c2[102],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[78],c2[30]))))))))))))))))))))))))))))))))))))))))));
 
 //row: 40
-     d2[40]=_mm_xor_si64(c2[36],_mm_xor_si64(c2[0],_mm_xor_si64(c2[12],_mm_xor_si64(c2[72],_mm_xor_si64(c2[36],_mm_xor_si64(c2[0],_mm_xor_si64(c2[86],_mm_xor_si64(c2[50],_mm_xor_si64(c2[50],_mm_xor_si64(c2[14],_mm_xor_si64(c2[2],_mm_xor_si64(c2[26],_mm_xor_si64(c2[86],_mm_xor_si64(c2[76],_mm_xor_si64(c2[40],_mm_xor_si64(c2[28],_mm_xor_si64(c2[52],_mm_xor_si64(c2[16],_mm_xor_si64(c2[4],_mm_xor_si64(c2[54],_mm_xor_si64(c2[18],_mm_xor_si64(c2[6],_mm_xor_si64(c2[66],_mm_xor_si64(c2[78],_mm_xor_si64(c2[42],_mm_xor_si64(c2[92],_mm_xor_si64(c2[56],_mm_xor_si64(c2[80],_mm_xor_si64(c2[44],_mm_xor_si64(c2[80],_mm_xor_si64(c2[8],_mm_xor_si64(c2[68],_mm_xor_si64(c2[94],_mm_xor_si64(c2[58],_mm_xor_si64(c2[10],_mm_xor_si64(c2[34],_mm_xor_si64(c2[94],_mm_xor_si64(c2[96],_mm_xor_si64(c2[60],_mm_xor_si64(c2[60],_mm_xor_si64(c2[24],_mm_xor_si64(c2[24],_mm_xor_si64(c2[48],_mm_xor_si64(c2[12],_mm_xor_si64(c2[86],_mm_xor_si64(c2[50],_mm_xor_si64(c2[86],_mm_xor_si64(c2[14],_mm_xor_si64(c2[74],_mm_xor_si64(c2[40],_mm_xor_si64(c2[100],_mm_xor_si64(c2[40],_mm_xor_si64(c2[100],_mm_xor_si64(c2[40],_mm_xor_si64(c2[64],_mm_xor_si64(c2[28],_mm_xor_si64(c2[102],_mm_xor_si64(c2[66],_mm_xor_si64(c2[90],_mm_xor_si64(c2[54],_mm_xor_si64(c2[18],_mm_xor_si64(c2[42],c2[102]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+     d2[40]=simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[0],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[2],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[52],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[4],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[6],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[78],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[92],simde_mm_xor_si64(c2[56],simde_mm_xor_si64(c2[80],simde_mm_xor_si64(c2[44],simde_mm_xor_si64(c2[80],simde_mm_xor_si64(c2[8],simde_mm_xor_si64(c2[68],simde_mm_xor_si64(c2[94],simde_mm_xor_si64(c2[58],simde_mm_xor_si64(c2[10],simde_mm_xor_si64(c2[34],simde_mm_xor_si64(c2[94],simde_mm_xor_si64(c2[96],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[60],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[24],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[12],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[74],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[100],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[100],simde_mm_xor_si64(c2[40],simde_mm_xor_si64(c2[64],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[102],simde_mm_xor_si64(c2[66],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[42],c2[102]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
 
 //row: 41
-     d2[41]=_mm_xor_si64(c2[84],_mm_xor_si64(c2[72],_mm_xor_si64(c2[48],_mm_xor_si64(c2[72],_mm_xor_si64(c2[38],_mm_xor_si64(c2[26],_mm_xor_si64(c2[86],_mm_xor_si64(c2[62],_mm_xor_si64(c2[14],_mm_xor_si64(c2[28],_mm_xor_si64(c2[16],_mm_xor_si64(c2[88],_mm_xor_si64(c2[6],_mm_xor_si64(c2[90],_mm_xor_si64(c2[42],_mm_xor_si64(c2[18],_mm_xor_si64(c2[32],_mm_xor_si64(c2[20],_mm_xor_si64(c2[44],_mm_xor_si64(c2[34],_mm_xor_si64(c2[70],_mm_xor_si64(c2[70],_mm_xor_si64(c2[48],_mm_xor_si64(c2[36],_mm_xor_si64(c2[96],_mm_xor_si64(c2[84],_mm_xor_si64(c2[26],_mm_xor_si64(c2[50],_mm_xor_si64(c2[76],_mm_xor_si64(c2[76],_mm_xor_si64(c2[100],_mm_xor_si64(c2[54],_mm_xor_si64(c2[42],_mm_xor_si64(c2[30],c2[78]))))))))))))))))))))))))))))))))));
+     d2[41]=simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[72],simde_mm_xor_si64(c2[38],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[86],simde_mm_xor_si64(c2[62],simde_mm_xor_si64(c2[14],simde_mm_xor_si64(c2[28],simde_mm_xor_si64(c2[16],simde_mm_xor_si64(c2[88],simde_mm_xor_si64(c2[6],simde_mm_xor_si64(c2[90],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[18],simde_mm_xor_si64(c2[32],simde_mm_xor_si64(c2[20],simde_mm_xor_si64(c2[44],simde_mm_xor_si64(c2[34],simde_mm_xor_si64(c2[70],simde_mm_xor_si64(c2[70],simde_mm_xor_si64(c2[48],simde_mm_xor_si64(c2[36],simde_mm_xor_si64(c2[96],simde_mm_xor_si64(c2[84],simde_mm_xor_si64(c2[26],simde_mm_xor_si64(c2[50],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[76],simde_mm_xor_si64(c2[100],simde_mm_xor_si64(c2[54],simde_mm_xor_si64(c2[42],simde_mm_xor_si64(c2[30],c2[78]))))))))))))))))))))))))))))))))));
   }
 }
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc96_byte.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc96_byte.c
index 5b40d902a2ee47cb79efe6db7ca6c28a733e03a1..5c47cf0de3f0edc8faf1d8341687a5c6a541c819 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc96_byte.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc96_byte.c
@@ -1,9 +1,10 @@
+#ifdef __AVX2__
 #include "PHY/sse_intrin.h"
 // generated code for Zc=96, byte encoding
 static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG2_Zc96_byte(uint8_t *c,uint8_t *d) {
-  __m256i *csimd=(__m256i *)c,*dsimd=(__m256i *)d;
+  simde__m256i *csimd=(simde__m256i *)c,*dsimd=(simde__m256i *)d;
 
-  __m256i *c2,*d2;
+  simde__m256i *c2,*d2;
 
   int i2;
   for (i2=0; i2<3; i2++) {
@@ -137,3 +138,4 @@ static inline __attribute__ ((no_sanitize("address", "undefined"))) void ldpc_BG
      d2[123]=simde_mm256_xor_si256(c2[601],simde_mm256_xor_si256(c2[541],simde_mm256_xor_si256(c2[1322],simde_mm256_xor_si256(c2[1202],simde_mm256_xor_si256(c2[1747],simde_mm256_xor_si256(c2[1687],simde_mm256_xor_si256(c2[788],simde_mm256_xor_si256(c2[186],simde_mm256_xor_si256(c2[1147],simde_mm256_xor_si256(c2[133],simde_mm256_xor_si256(c2[73],simde_mm256_xor_si256(c2[613],simde_mm256_xor_si256(c2[1818],simde_mm256_xor_si256(c2[1758],simde_mm256_xor_si256(c2[1880],simde_mm256_xor_si256(c2[980],simde_mm256_xor_si256(c2[684],simde_mm256_xor_si256(c2[566],simde_mm256_xor_si256(c2[746],simde_mm256_xor_si256(c2[1411],simde_mm256_xor_si256(c2[211],simde_mm256_xor_si256(c2[450],simde_mm256_xor_si256(c2[217],simde_mm256_xor_si256(c2[157],simde_mm256_xor_si256(c2[1598],simde_mm256_xor_si256(c2[1358],simde_mm256_xor_si256(c2[1363],simde_mm256_xor_si256(c2[1064],simde_mm256_xor_si256(c2[1249],simde_mm256_xor_si256(c2[830],simde_mm256_xor_si256(c2[1430],simde_mm256_xor_si256(c2[535],simde_mm256_xor_si256(c2[475],simde_mm256_xor_si256(c2[1855],c2[1315]))))))))))))))))))))))))))))))))));
   }
 }
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc96_byte_128.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc96_byte_128.c
new file mode 100644
index 0000000000000000000000000000000000000000..72425752811f0ad238a64b475caea79612cbbd61
--- /dev/null
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_BG2_Zc96_byte_128.c
@@ -0,0 +1,141 @@
+#ifndef __AVX2__
+#include "PHY/sse_intrin.h"
+// generated code for Zc=96, byte encoding
+static inline void ldpc_BG2_Zc96_byte(uint8_t *c,uint8_t *d) {
+  simde__m128i *csimd=(simde__m128i *)c,*dsimd=(simde__m128i *)d;
+
+  simde__m128i *c2,*d2;
+
+  int i2;
+  for (i2=0; i2<6; i2++) {
+     c2=&csimd[i2];
+     d2=&dsimd[i2];
+
+//row: 0
+     d2[0]=simde_mm_xor_si128(c2[1564],simde_mm_xor_si128(c2[1201],simde_mm_xor_si128(c2[961],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[133],simde_mm_xor_si128(c2[854],simde_mm_xor_si128(c2[628],simde_mm_xor_si128(c2[1708],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[398],simde_mm_xor_si128(c2[517],simde_mm_xor_si128(c2[1850],simde_mm_xor_si128(c2[1608],simde_mm_xor_si128(c2[49],simde_mm_xor_si128(c2[1385],simde_mm_xor_si128(c2[904],simde_mm_xor_si128(c2[796],simde_mm_xor_si128(c2[1753],simde_mm_xor_si128(c2[1273],simde_mm_xor_si128(c2[1289],simde_mm_xor_si128(c2[685],simde_mm_xor_si128(c2[1061],simde_mm_xor_si128(c2[217],simde_mm_xor_si128(c2[1417],simde_mm_xor_si128(c2[1432],simde_mm_xor_si128(c2[348],c2[1193]))))))))))))))))))))))))));
+
+//row: 1
+     d2[6]=simde_mm_xor_si128(c2[1684],simde_mm_xor_si128(c2[1564],simde_mm_xor_si128(c2[1201],simde_mm_xor_si128(c2[961],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[133],simde_mm_xor_si128(c2[854],simde_mm_xor_si128(c2[748],simde_mm_xor_si128(c2[628],simde_mm_xor_si128(c2[1708],simde_mm_xor_si128(c2[280],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[398],simde_mm_xor_si128(c2[517],simde_mm_xor_si128(c2[1850],simde_mm_xor_si128(c2[1608],simde_mm_xor_si128(c2[49],simde_mm_xor_si128(c2[1385],simde_mm_xor_si128(c2[904],simde_mm_xor_si128(c2[916],simde_mm_xor_si128(c2[796],simde_mm_xor_si128(c2[1753],simde_mm_xor_si128(c2[1273],simde_mm_xor_si128(c2[1289],simde_mm_xor_si128(c2[685],simde_mm_xor_si128(c2[1061],simde_mm_xor_si128(c2[217],simde_mm_xor_si128(c2[1417],simde_mm_xor_si128(c2[1552],simde_mm_xor_si128(c2[1432],simde_mm_xor_si128(c2[348],c2[1193]))))))))))))))))))))))))))))))));
+
+//row: 2
+     d2[12]=simde_mm_xor_si128(c2[1684],simde_mm_xor_si128(c2[1564],simde_mm_xor_si128(c2[1321],simde_mm_xor_si128(c2[1201],simde_mm_xor_si128(c2[961],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[133],simde_mm_xor_si128(c2[854],simde_mm_xor_si128(c2[748],simde_mm_xor_si128(c2[628],simde_mm_xor_si128(c2[1708],simde_mm_xor_si128(c2[280],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[518],simde_mm_xor_si128(c2[398],simde_mm_xor_si128(c2[517],simde_mm_xor_si128(c2[51],simde_mm_xor_si128(c2[1850],simde_mm_xor_si128(c2[1608],simde_mm_xor_si128(c2[49],simde_mm_xor_si128(c2[1505],simde_mm_xor_si128(c2[1385],simde_mm_xor_si128(c2[904],simde_mm_xor_si128(c2[916],simde_mm_xor_si128(c2[796],simde_mm_xor_si128(c2[1873],simde_mm_xor_si128(c2[1753],simde_mm_xor_si128(c2[1273],simde_mm_xor_si128(c2[1409],simde_mm_xor_si128(c2[1289],simde_mm_xor_si128(c2[685],simde_mm_xor_si128(c2[1181],simde_mm_xor_si128(c2[1061],simde_mm_xor_si128(c2[217],simde_mm_xor_si128(c2[1417],simde_mm_xor_si128(c2[1552],simde_mm_xor_si128(c2[1432],simde_mm_xor_si128(c2[468],simde_mm_xor_si128(c2[348],c2[1193]))))))))))))))))))))))))))))))))))))))));
+
+//row: 3
+     d2[18]=simde_mm_xor_si128(c2[1564],simde_mm_xor_si128(c2[1201],simde_mm_xor_si128(c2[961],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[133],simde_mm_xor_si128(c2[974],simde_mm_xor_si128(c2[854],simde_mm_xor_si128(c2[628],simde_mm_xor_si128(c2[1828],simde_mm_xor_si128(c2[1708],simde_mm_xor_si128(c2[160],simde_mm_xor_si128(c2[398],simde_mm_xor_si128(c2[517],simde_mm_xor_si128(c2[1850],simde_mm_xor_si128(c2[1608],simde_mm_xor_si128(c2[169],simde_mm_xor_si128(c2[49],simde_mm_xor_si128(c2[1385],simde_mm_xor_si128(c2[1024],simde_mm_xor_si128(c2[904],simde_mm_xor_si128(c2[796],simde_mm_xor_si128(c2[1753],simde_mm_xor_si128(c2[1393],simde_mm_xor_si128(c2[1273],simde_mm_xor_si128(c2[1289],simde_mm_xor_si128(c2[805],simde_mm_xor_si128(c2[685],simde_mm_xor_si128(c2[1061],simde_mm_xor_si128(c2[217],simde_mm_xor_si128(c2[1537],simde_mm_xor_si128(c2[1417],simde_mm_xor_si128(c2[1432],simde_mm_xor_si128(c2[348],simde_mm_xor_si128(c2[1313],c2[1193]))))))))))))))))))))))))))))))))));
+
+//row: 4
+     d2[24]=simde_mm_xor_si128(c2[1320],simde_mm_xor_si128(c2[1200],simde_mm_xor_si128(c2[843],simde_mm_xor_si128(c2[603],simde_mm_xor_si128(c2[964],simde_mm_xor_si128(c2[1693],simde_mm_xor_si128(c2[1573],simde_mm_xor_si128(c2[1694],simde_mm_xor_si128(c2[496],simde_mm_xor_si128(c2[1216],simde_mm_xor_si128(c2[384],simde_mm_xor_si128(c2[264],simde_mm_xor_si128(c2[1344],simde_mm_xor_si128(c2[1841],simde_mm_xor_si128(c2[1721],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[159],simde_mm_xor_si128(c2[1492],simde_mm_xor_si128(c2[1250],simde_mm_xor_si128(c2[1610],simde_mm_xor_si128(c2[1021],simde_mm_xor_si128(c2[540],simde_mm_xor_si128(c2[552],simde_mm_xor_si128(c2[432],simde_mm_xor_si128(c2[1395],simde_mm_xor_si128(c2[915],simde_mm_xor_si128(c2[925],simde_mm_xor_si128(c2[327],simde_mm_xor_si128(c2[697],simde_mm_xor_si128(c2[1778],simde_mm_xor_si128(c2[1059],simde_mm_xor_si128(c2[1188],simde_mm_xor_si128(c2[1068],simde_mm_xor_si128(c2[1909],c2[829]))))))))))))))))))))))))))))))))));
+
+//row: 5
+     d2[30]=simde_mm_xor_si128(c2[1681],simde_mm_xor_si128(c2[1561],simde_mm_xor_si128(c2[1204],simde_mm_xor_si128(c2[964],simde_mm_xor_si128(c2[1200],simde_mm_xor_si128(c2[135],simde_mm_xor_si128(c2[15],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[857],simde_mm_xor_si128(c2[1454],simde_mm_xor_si128(c2[745],simde_mm_xor_si128(c2[625],simde_mm_xor_si128(c2[1705],simde_mm_xor_si128(c2[277],simde_mm_xor_si128(c2[157],simde_mm_xor_si128(c2[401],simde_mm_xor_si128(c2[520],simde_mm_xor_si128(c2[1853],simde_mm_xor_si128(c2[1611],simde_mm_xor_si128(c2[52],simde_mm_xor_si128(c2[1382],simde_mm_xor_si128(c2[901],simde_mm_xor_si128(c2[1141],simde_mm_xor_si128(c2[913],simde_mm_xor_si128(c2[793],simde_mm_xor_si128(c2[1756],simde_mm_xor_si128(c2[1276],simde_mm_xor_si128(c2[1286],simde_mm_xor_si128(c2[688],simde_mm_xor_si128(c2[89],simde_mm_xor_si128(c2[1058],simde_mm_xor_si128(c2[220],simde_mm_xor_si128(c2[1420],simde_mm_xor_si128(c2[1549],simde_mm_xor_si128(c2[1429],simde_mm_xor_si128(c2[351],c2[1190]))))))))))))))))))))))))))))))))))));
+
+//row: 6
+     d2[36]=simde_mm_xor_si128(c2[724],simde_mm_xor_si128(c2[604],simde_mm_xor_si128(c2[241],simde_mm_xor_si128(c2[1],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[1097],simde_mm_xor_si128(c2[977],simde_mm_xor_si128(c2[1092],simde_mm_xor_si128(c2[1813],simde_mm_xor_si128(c2[1707],simde_mm_xor_si128(c2[1587],simde_mm_xor_si128(c2[748],simde_mm_xor_si128(c2[1239],simde_mm_xor_si128(c2[1119],simde_mm_xor_si128(c2[1357],simde_mm_xor_si128(c2[1476],simde_mm_xor_si128(c2[890],simde_mm_xor_si128(c2[648],simde_mm_xor_si128(c2[1008],simde_mm_xor_si128(c2[425],simde_mm_xor_si128(c2[1863],simde_mm_xor_si128(c2[1505],simde_mm_xor_si128(c2[1875],simde_mm_xor_si128(c2[1755],simde_mm_xor_si128(c2[793],simde_mm_xor_si128(c2[313],simde_mm_xor_si128(c2[329],simde_mm_xor_si128(c2[1644],simde_mm_xor_si128(c2[564],simde_mm_xor_si128(c2[101],simde_mm_xor_si128(c2[1176],simde_mm_xor_si128(c2[457],simde_mm_xor_si128(c2[592],simde_mm_xor_si128(c2[472],simde_mm_xor_si128(c2[1313],simde_mm_xor_si128(c2[233],c2[231]))))))))))))))))))))))))))))))))))));
+
+//row: 7
+     d2[42]=simde_mm_xor_si128(c2[485],simde_mm_xor_si128(c2[365],simde_mm_xor_si128(c2[1441],simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[1084],simde_mm_xor_si128(c2[1681],simde_mm_xor_si128(c2[844],simde_mm_xor_si128(c2[852],simde_mm_xor_si128(c2[732],simde_mm_xor_si128(c2[1814],simde_mm_xor_si128(c2[853],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[1574],simde_mm_xor_si128(c2[857],simde_mm_xor_si128(c2[737],simde_mm_xor_si128(c2[17],simde_mm_xor_si128(c2[1468],simde_mm_xor_si128(c2[1348],simde_mm_xor_si128(c2[505],simde_mm_xor_si128(c2[509],simde_mm_xor_si128(c2[1705],simde_mm_xor_si128(c2[1585],simde_mm_xor_si128(c2[1000],simde_mm_xor_si128(c2[880],simde_mm_xor_si128(c2[37],simde_mm_xor_si128(c2[1118],simde_mm_xor_si128(c2[281],simde_mm_xor_si128(c2[1237],simde_mm_xor_si128(c2[400],simde_mm_xor_si128(c2[651],simde_mm_xor_si128(c2[1733],simde_mm_xor_si128(c2[409],simde_mm_xor_si128(c2[1491],simde_mm_xor_si128(c2[769],simde_mm_xor_si128(c2[52],simde_mm_xor_si128(c2[1851],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[1262],simde_mm_xor_si128(c2[1624],simde_mm_xor_si128(c2[901],simde_mm_xor_si128(c2[781],simde_mm_xor_si128(c2[1265],simde_mm_xor_si128(c2[1636],simde_mm_xor_si128(c2[1516],simde_mm_xor_si128(c2[673],simde_mm_xor_si128(c2[554],simde_mm_xor_si128(c2[1636],simde_mm_xor_si128(c2[74],simde_mm_xor_si128(c2[1276],simde_mm_xor_si128(c2[1156],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[1166],simde_mm_xor_si128(c2[1405],simde_mm_xor_si128(c2[688],simde_mm_xor_si128(c2[568],simde_mm_xor_si128(c2[85],simde_mm_xor_si128(c2[1781],simde_mm_xor_si128(c2[938],simde_mm_xor_si128(c2[937],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[218],simde_mm_xor_si128(c2[1420],simde_mm_xor_si128(c2[1300],simde_mm_xor_si128(c2[353],simde_mm_xor_si128(c2[233],simde_mm_xor_si128(c2[1309],simde_mm_xor_si128(c2[1068],simde_mm_xor_si128(c2[231],simde_mm_xor_si128(c2[1913],simde_mm_xor_si128(c2[1190],c2[1070]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 8
+     d2[48]=simde_mm_xor_si128(c2[722],simde_mm_xor_si128(c2[602],simde_mm_xor_si128(c2[365],simde_mm_xor_si128(c2[245],simde_mm_xor_si128(c2[5],simde_mm_xor_si128(c2[721],simde_mm_xor_si128(c2[1095],simde_mm_xor_si128(c2[975],simde_mm_xor_si128(c2[1096],simde_mm_xor_si128(c2[1817],simde_mm_xor_si128(c2[736],simde_mm_xor_si128(c2[1705],simde_mm_xor_si128(c2[1585],simde_mm_xor_si128(c2[746],simde_mm_xor_si128(c2[1237],simde_mm_xor_si128(c2[1117],simde_mm_xor_si128(c2[1481],simde_mm_xor_si128(c2[1361],simde_mm_xor_si128(c2[1480],simde_mm_xor_si128(c2[1008],simde_mm_xor_si128(c2[888],simde_mm_xor_si128(c2[652],simde_mm_xor_si128(c2[1012],simde_mm_xor_si128(c2[543],simde_mm_xor_si128(c2[423],simde_mm_xor_si128(c2[1861],simde_mm_xor_si128(c2[1873],simde_mm_xor_si128(c2[1753],simde_mm_xor_si128(c2[917],simde_mm_xor_si128(c2[797],simde_mm_xor_si128(c2[317],simde_mm_xor_si128(c2[447],simde_mm_xor_si128(c2[327],simde_mm_xor_si128(c2[1648],simde_mm_xor_si128(c2[219],simde_mm_xor_si128(c2[99],simde_mm_xor_si128(c2[1180],simde_mm_xor_si128(c2[461],simde_mm_xor_si128(c2[590],simde_mm_xor_si128(c2[470],simde_mm_xor_si128(c2[1431],simde_mm_xor_si128(c2[1311],c2[231]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 9
+     d2[54]=simde_mm_xor_si128(c2[724],simde_mm_xor_si128(c2[4],simde_mm_xor_si128(c2[1803],simde_mm_xor_si128(c2[361],simde_mm_xor_si128(c2[1440],simde_mm_xor_si128(c2[121],simde_mm_xor_si128(c2[1200],simde_mm_xor_si128(c2[1097],simde_mm_xor_si128(c2[377],simde_mm_xor_si128(c2[257],simde_mm_xor_si128(c2[1212],simde_mm_xor_si128(c2[372],simde_mm_xor_si128(c2[14],simde_mm_xor_si128(c2[1093],simde_mm_xor_si128(c2[1453],simde_mm_xor_si128(c2[1707],simde_mm_xor_si128(c2[987],simde_mm_xor_si128(c2[867],simde_mm_xor_si128(c2[868],simde_mm_xor_si128(c2[28],simde_mm_xor_si128(c2[1239],simde_mm_xor_si128(c2[519],simde_mm_xor_si128(c2[399],simde_mm_xor_si128(c2[1477],simde_mm_xor_si128(c2[637],simde_mm_xor_si128(c2[1596],simde_mm_xor_si128(c2[756],simde_mm_xor_si128(c2[1010],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[768],simde_mm_xor_si128(c2[1853],simde_mm_xor_si128(c2[1128],simde_mm_xor_si128(c2[288],simde_mm_xor_si128(c2[545],simde_mm_xor_si128(c2[1624],simde_mm_xor_si128(c2[64],simde_mm_xor_si128(c2[1143],simde_mm_xor_si128(c2[1875],simde_mm_xor_si128(c2[1155],simde_mm_xor_si128(c2[1035],simde_mm_xor_si128(c2[913],simde_mm_xor_si128(c2[73],simde_mm_xor_si128(c2[433],simde_mm_xor_si128(c2[1512],simde_mm_xor_si128(c2[449],simde_mm_xor_si128(c2[1528],simde_mm_xor_si128(c2[1764],simde_mm_xor_si128(c2[924],simde_mm_xor_si128(c2[221],simde_mm_xor_si128(c2[1300],simde_mm_xor_si128(c2[1296],simde_mm_xor_si128(c2[456],simde_mm_xor_si128(c2[577],simde_mm_xor_si128(c2[1656],simde_mm_xor_si128(c2[578],simde_mm_xor_si128(c2[592],simde_mm_xor_si128(c2[1791],simde_mm_xor_si128(c2[1671],simde_mm_xor_si128(c2[1433],simde_mm_xor_si128(c2[593],simde_mm_xor_si128(c2[353],c2[1432])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 10
+     d2[60]=simde_mm_xor_si128(c2[1323],simde_mm_xor_si128(c2[972],simde_mm_xor_si128(c2[793],c2[567])));
+
+//row: 11
+     d2[66]=simde_mm_xor_si128(c2[842],simde_mm_xor_si128(c2[485],simde_mm_xor_si128(c2[245],simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[1215],simde_mm_xor_si128(c2[1336],simde_mm_xor_si128(c2[252],simde_mm_xor_si128(c2[132],simde_mm_xor_si128(c2[1825],simde_mm_xor_si128(c2[1106],simde_mm_xor_si128(c2[986],simde_mm_xor_si128(c2[1357],simde_mm_xor_si128(c2[1601],simde_mm_xor_si128(c2[1720],simde_mm_xor_si128(c2[1128],simde_mm_xor_si128(c2[892],simde_mm_xor_si128(c2[1372],simde_mm_xor_si128(c2[1252],simde_mm_xor_si128(c2[663],simde_mm_xor_si128(c2[302],simde_mm_xor_si128(c2[182],simde_mm_xor_si128(c2[74],simde_mm_xor_si128(c2[1037],simde_mm_xor_si128(c2[677],simde_mm_xor_si128(c2[557],simde_mm_xor_si128(c2[567],simde_mm_xor_si128(c2[89],simde_mm_xor_si128(c2[1888],simde_mm_xor_si128(c2[1529],simde_mm_xor_si128(c2[339],simde_mm_xor_si128(c2[1420],simde_mm_xor_si128(c2[821],simde_mm_xor_si128(c2[701],simde_mm_xor_si128(c2[710],simde_mm_xor_si128(c2[1551],simde_mm_xor_si128(c2[591],simde_mm_xor_si128(c2[471],c2[1792])))))))))))))))))))))))))))))))))))));
+
+//row: 12
+     d2[72]=simde_mm_xor_si128(c2[1085],simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[602],simde_mm_xor_si128(c2[362],simde_mm_xor_si128(c2[1452],simde_mm_xor_si128(c2[1332],simde_mm_xor_si128(c2[1453],simde_mm_xor_si128(c2[255],simde_mm_xor_si128(c2[854],simde_mm_xor_si128(c2[149],simde_mm_xor_si128(c2[29],simde_mm_xor_si128(c2[1109],simde_mm_xor_si128(c2[1600],simde_mm_xor_si128(c2[1480],simde_mm_xor_si128(c2[1718],simde_mm_xor_si128(c2[1837],simde_mm_xor_si128(c2[1601],simde_mm_xor_si128(c2[1251],simde_mm_xor_si128(c2[1009],simde_mm_xor_si128(c2[1369],simde_mm_xor_si128(c2[780],simde_mm_xor_si128(c2[305],simde_mm_xor_si128(c2[317],simde_mm_xor_si128(c2[197],simde_mm_xor_si128(c2[1154],simde_mm_xor_si128(c2[674],simde_mm_xor_si128(c2[684],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[456],simde_mm_xor_si128(c2[1537],simde_mm_xor_si128(c2[818],simde_mm_xor_si128(c2[953],simde_mm_xor_si128(c2[833],simde_mm_xor_si128(c2[1668],c2[588]))))))))))))))))))))))))))))))))));
+
+//row: 13
+     d2[78]=simde_mm_xor_si128(c2[363],simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[1685],simde_mm_xor_si128(c2[123],simde_mm_xor_si128(c2[736],simde_mm_xor_si128(c2[857],simde_mm_xor_si128(c2[1692],simde_mm_xor_si128(c2[1572],simde_mm_xor_si128(c2[1573],simde_mm_xor_si128(c2[1346],simde_mm_xor_si128(c2[627],simde_mm_xor_si128(c2[507],simde_mm_xor_si128(c2[878],simde_mm_xor_si128(c2[1116],simde_mm_xor_si128(c2[1241],simde_mm_xor_si128(c2[649],simde_mm_xor_si128(c2[413],simde_mm_xor_si128(c2[893],simde_mm_xor_si128(c2[773],simde_mm_xor_si128(c2[184],simde_mm_xor_si128(c2[1742],simde_mm_xor_si128(c2[1622],simde_mm_xor_si128(c2[1514],simde_mm_xor_si128(c2[552],simde_mm_xor_si128(c2[192],simde_mm_xor_si128(c2[72],simde_mm_xor_si128(c2[88],simde_mm_xor_si128(c2[1529],simde_mm_xor_si128(c2[1409],simde_mm_xor_si128(c2[1779],simde_mm_xor_si128(c2[941],simde_mm_xor_si128(c2[336],simde_mm_xor_si128(c2[216],simde_mm_xor_si128(c2[458],simde_mm_xor_si128(c2[231],simde_mm_xor_si128(c2[1072],simde_mm_xor_si128(c2[112],c2[1911])))))))))))))))))))))))))))))))))))));
+
+//row: 14
+     d2[84]=simde_mm_xor_si128(c2[360],simde_mm_xor_si128(c2[240],simde_mm_xor_si128(c2[3],simde_mm_xor_si128(c2[1802],simde_mm_xor_si128(c2[1565],simde_mm_xor_si128(c2[1562],simde_mm_xor_si128(c2[1325],simde_mm_xor_si128(c2[733],simde_mm_xor_si128(c2[613],simde_mm_xor_si128(c2[376],simde_mm_xor_si128(c2[734],simde_mm_xor_si128(c2[497],simde_mm_xor_si128(c2[1455],simde_mm_xor_si128(c2[1332],simde_mm_xor_si128(c2[1212],simde_mm_xor_si128(c2[373],simde_mm_xor_si128(c2[1349],simde_mm_xor_si128(c2[1229],simde_mm_xor_si128(c2[986],simde_mm_xor_si128(c2[384],simde_mm_xor_si128(c2[267],simde_mm_xor_si128(c2[147],simde_mm_xor_si128(c2[881],simde_mm_xor_si128(c2[761],simde_mm_xor_si128(c2[518],simde_mm_xor_si128(c2[999],simde_mm_xor_si128(c2[756],simde_mm_xor_si128(c2[1118],simde_mm_xor_si128(c2[881],simde_mm_xor_si128(c2[532],simde_mm_xor_si128(c2[289],simde_mm_xor_si128(c2[290],simde_mm_xor_si128(c2[53],simde_mm_xor_si128(c2[650],simde_mm_xor_si128(c2[533],simde_mm_xor_si128(c2[413],simde_mm_xor_si128(c2[61],simde_mm_xor_si128(c2[1743],simde_mm_xor_si128(c2[1505],simde_mm_xor_si128(c2[1382],simde_mm_xor_si128(c2[1262],simde_mm_xor_si128(c2[1517],simde_mm_xor_si128(c2[1397],simde_mm_xor_si128(c2[1154],simde_mm_xor_si128(c2[435],simde_mm_xor_si128(c2[192],simde_mm_xor_si128(c2[1874],simde_mm_xor_si128(c2[1757],simde_mm_xor_si128(c2[1637],simde_mm_xor_si128(c2[793],simde_mm_xor_si128(c2[1884],simde_mm_xor_si128(c2[1647],simde_mm_xor_si128(c2[1286],simde_mm_xor_si128(c2[1169],simde_mm_xor_si128(c2[1049],simde_mm_xor_si128(c2[1656],simde_mm_xor_si128(c2[1419],simde_mm_xor_si128(c2[818],simde_mm_xor_si128(c2[581],simde_mm_xor_si128(c2[99],simde_mm_xor_si128(c2[1901],simde_mm_xor_si128(c2[1781],simde_mm_xor_si128(c2[228],simde_mm_xor_si128(c2[108],simde_mm_xor_si128(c2[1790],simde_mm_xor_si128(c2[949],simde_mm_xor_si128(c2[712],simde_mm_xor_si128(c2[1788],simde_mm_xor_si128(c2[1671],c2[1551])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 15
+     d2[90]=simde_mm_xor_si128(c2[1442],simde_mm_xor_si128(c2[1803],simde_mm_xor_si128(c2[1683],simde_mm_xor_si128(c2[1085],simde_mm_xor_si128(c2[1320],simde_mm_xor_si128(c2[845],simde_mm_xor_si128(c2[1080],simde_mm_xor_si128(c2[484],simde_mm_xor_si128(c2[1815],simde_mm_xor_si128(c2[257],simde_mm_xor_si128(c2[137],simde_mm_xor_si128(c2[17],simde_mm_xor_si128(c2[252],simde_mm_xor_si128(c2[732],simde_mm_xor_si128(c2[973],simde_mm_xor_si128(c2[506],simde_mm_xor_si128(c2[867],simde_mm_xor_si128(c2[747],simde_mm_xor_si128(c2[1586],simde_mm_xor_si128(c2[1827],simde_mm_xor_si128(c2[38],simde_mm_xor_si128(c2[399],simde_mm_xor_si128(c2[279],simde_mm_xor_si128(c2[276],simde_mm_xor_si128(c2[517],simde_mm_xor_si128(c2[401],simde_mm_xor_si128(c2[636],simde_mm_xor_si128(c2[1728],simde_mm_xor_si128(c2[50],simde_mm_xor_si128(c2[1492],simde_mm_xor_si128(c2[1733],simde_mm_xor_si128(c2[1852],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[1263],simde_mm_xor_si128(c2[1504],simde_mm_xor_si128(c2[782],simde_mm_xor_si128(c2[1023],simde_mm_xor_si128(c2[674],simde_mm_xor_si128(c2[1035],simde_mm_xor_si128(c2[915],simde_mm_xor_si128(c2[1637],simde_mm_xor_si128(c2[1872],simde_mm_xor_si128(c2[1157],simde_mm_xor_si128(c2[1392],simde_mm_xor_si128(c2[1167],simde_mm_xor_si128(c2[1408],simde_mm_xor_si128(c2[569],simde_mm_xor_si128(c2[804],simde_mm_xor_si128(c2[939],simde_mm_xor_si128(c2[1180],simde_mm_xor_si128(c2[101],simde_mm_xor_si128(c2[336],simde_mm_xor_si128(c2[1301],simde_mm_xor_si128(c2[1536],simde_mm_xor_si128(c2[1310],simde_mm_xor_si128(c2[1671],simde_mm_xor_si128(c2[1551],simde_mm_xor_si128(c2[232],simde_mm_xor_si128(c2[473],simde_mm_xor_si128(c2[1071],c2[1312]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 16
+     d2[96]=simde_mm_xor_si128(c2[601],simde_mm_xor_si128(c2[481],simde_mm_xor_si128(c2[1682],simde_mm_xor_si128(c2[1562],simde_mm_xor_si128(c2[124],simde_mm_xor_si128(c2[1325],simde_mm_xor_si128(c2[1205],simde_mm_xor_si128(c2[1803],simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[974],simde_mm_xor_si128(c2[854],simde_mm_xor_si128(c2[136],simde_mm_xor_si128(c2[16],simde_mm_xor_si128(c2[975],simde_mm_xor_si128(c2[137],simde_mm_xor_si128(c2[1696],simde_mm_xor_si128(c2[852],simde_mm_xor_si128(c2[857],simde_mm_xor_si128(c2[1584],simde_mm_xor_si128(c2[1464],simde_mm_xor_si128(c2[746],simde_mm_xor_si128(c2[626],simde_mm_xor_si128(c2[625],simde_mm_xor_si128(c2[1706],simde_mm_xor_si128(c2[1116],simde_mm_xor_si128(c2[996],simde_mm_xor_si128(c2[278],simde_mm_xor_si128(c2[158],simde_mm_xor_si128(c2[1240],simde_mm_xor_si128(c2[516],simde_mm_xor_si128(c2[396],simde_mm_xor_si128(c2[1359],simde_mm_xor_si128(c2[521],simde_mm_xor_si128(c2[773],simde_mm_xor_si128(c2[49],simde_mm_xor_si128(c2[1848],simde_mm_xor_si128(c2[531],simde_mm_xor_si128(c2[1612],simde_mm_xor_si128(c2[891],simde_mm_xor_si128(c2[53],simde_mm_xor_si128(c2[302],simde_mm_xor_si128(c2[1503],simde_mm_xor_si128(c2[1383],simde_mm_xor_si128(c2[1740],simde_mm_xor_si128(c2[902],simde_mm_xor_si128(c2[1752],simde_mm_xor_si128(c2[1632],simde_mm_xor_si128(c2[914],simde_mm_xor_si128(c2[794],simde_mm_xor_si128(c2[676],simde_mm_xor_si128(c2[1877],simde_mm_xor_si128(c2[1757],simde_mm_xor_si128(c2[196],simde_mm_xor_si128(c2[1277],simde_mm_xor_si128(c2[206],simde_mm_xor_si128(c2[1407],simde_mm_xor_si128(c2[1287],simde_mm_xor_si128(c2[1527],simde_mm_xor_si128(c2[689],simde_mm_xor_si128(c2[1897],simde_mm_xor_si128(c2[1179],simde_mm_xor_si128(c2[1059],simde_mm_xor_si128(c2[1059],simde_mm_xor_si128(c2[221],simde_mm_xor_si128(c2[340],simde_mm_xor_si128(c2[1421],simde_mm_xor_si128(c2[469],simde_mm_xor_si128(c2[349],simde_mm_xor_si128(c2[1550],simde_mm_xor_si128(c2[1430],simde_mm_xor_si128(c2[1190],simde_mm_xor_si128(c2[472],simde_mm_xor_si128(c2[352],simde_mm_xor_si128(c2[110],simde_mm_xor_si128(c2[1191],c2[233])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 17
+     d2[102]=simde_mm_xor_si128(c2[845],simde_mm_xor_si128(c2[725],simde_mm_xor_si128(c2[485],simde_mm_xor_si128(c2[365],simde_mm_xor_si128(c2[362],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[2],simde_mm_xor_si128(c2[122],simde_mm_xor_si128(c2[1681],simde_mm_xor_si128(c2[1212],simde_mm_xor_si128(c2[1092],simde_mm_xor_si128(c2[852],simde_mm_xor_si128(c2[732],simde_mm_xor_si128(c2[1213],simde_mm_xor_si128(c2[853],simde_mm_xor_si128(c2[15],simde_mm_xor_si128(c2[1574],simde_mm_xor_si128(c2[1695],simde_mm_xor_si128(c2[1828],simde_mm_xor_si128(c2[1708],simde_mm_xor_si128(c2[1468],simde_mm_xor_si128(c2[1348],simde_mm_xor_si128(c2[869],simde_mm_xor_si128(c2[509],simde_mm_xor_si128(c2[1360],simde_mm_xor_si128(c2[1240],simde_mm_xor_si128(c2[1000],simde_mm_xor_si128(c2[880],simde_mm_xor_si128(c2[1478],simde_mm_xor_si128(c2[1238],simde_mm_xor_si128(c2[1118],simde_mm_xor_si128(c2[1597],simde_mm_xor_si128(c2[1237],simde_mm_xor_si128(c2[1011],simde_mm_xor_si128(c2[771],simde_mm_xor_si128(c2[651],simde_mm_xor_si128(c2[769],simde_mm_xor_si128(c2[409],simde_mm_xor_si128(c2[1129],simde_mm_xor_si128(c2[769],simde_mm_xor_si128(c2[540],simde_mm_xor_si128(c2[300],simde_mm_xor_si128(c2[180],simde_mm_xor_si128(c2[65],simde_mm_xor_si128(c2[1624],simde_mm_xor_si128(c2[901],simde_mm_xor_si128(c2[77],simde_mm_xor_si128(c2[1876],simde_mm_xor_si128(c2[1636],simde_mm_xor_si128(c2[1516],simde_mm_xor_si128(c2[914],simde_mm_xor_si128(c2[674],simde_mm_xor_si128(c2[554],simde_mm_xor_si128(c2[434],simde_mm_xor_si128(c2[74],simde_mm_xor_si128(c2[444],simde_mm_xor_si128(c2[204],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[1765],simde_mm_xor_si128(c2[1405],simde_mm_xor_si128(c2[216],simde_mm_xor_si128(c2[1901],simde_mm_xor_si128(c2[1781],simde_mm_xor_si128(c2[1297],simde_mm_xor_si128(c2[937],simde_mm_xor_si128(c2[578],simde_mm_xor_si128(c2[218],simde_mm_xor_si128(c2[713],simde_mm_xor_si128(c2[593],simde_mm_xor_si128(c2[353],simde_mm_xor_si128(c2[233],simde_mm_xor_si128(c2[1428],simde_mm_xor_si128(c2[1188],simde_mm_xor_si128(c2[1068],simde_mm_xor_si128(c2[348],c2[1913])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 18
+     d2[108]=simde_mm_xor_si128(c2[1205],simde_mm_xor_si128(c2[792],c2[1766]));
+
+//row: 19
+     d2[114]=simde_mm_xor_si128(c2[1082],simde_mm_xor_si128(c2[725],simde_mm_xor_si128(c2[485],simde_mm_xor_si128(c2[1203],simde_mm_xor_si128(c2[1455],simde_mm_xor_si128(c2[1576],simde_mm_xor_si128(c2[372],simde_mm_xor_si128(c2[1214],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[1226],simde_mm_xor_si128(c2[1597],simde_mm_xor_si128(c2[1841],simde_mm_xor_si128(c2[41],simde_mm_xor_si128(c2[1368],simde_mm_xor_si128(c2[1132],simde_mm_xor_si128(c2[1492],simde_mm_xor_si128(c2[903],simde_mm_xor_si128(c2[422],simde_mm_xor_si128(c2[314],simde_mm_xor_si128(c2[1277],simde_mm_xor_si128(c2[797],simde_mm_xor_si128(c2[807],simde_mm_xor_si128(c2[209],simde_mm_xor_si128(c2[579],simde_mm_xor_si128(c2[1660],simde_mm_xor_si128(c2[941],simde_mm_xor_si128(c2[950],simde_mm_xor_si128(c2[1791],c2[711]))))))))))))))))))))))))))));
+
+//row: 20
+     d2[120]=simde_mm_xor_si128(c2[842],simde_mm_xor_si128(c2[722],simde_mm_xor_si128(c2[365],simde_mm_xor_si128(c2[125],simde_mm_xor_si128(c2[1215],simde_mm_xor_si128(c2[1095],simde_mm_xor_si128(c2[1216],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[1456],simde_mm_xor_si128(c2[1825],simde_mm_xor_si128(c2[1705],simde_mm_xor_si128(c2[866],simde_mm_xor_si128(c2[1357],simde_mm_xor_si128(c2[1237],simde_mm_xor_si128(c2[1481],simde_mm_xor_si128(c2[1600],simde_mm_xor_si128(c2[1008],simde_mm_xor_si128(c2[772],simde_mm_xor_si128(c2[1132],simde_mm_xor_si128(c2[1611],simde_mm_xor_si128(c2[543],simde_mm_xor_si128(c2[62],simde_mm_xor_si128(c2[74],simde_mm_xor_si128(c2[1873],simde_mm_xor_si128(c2[917],simde_mm_xor_si128(c2[437],simde_mm_xor_si128(c2[447],simde_mm_xor_si128(c2[1768],simde_mm_xor_si128(c2[219],simde_mm_xor_si128(c2[1300],simde_mm_xor_si128(c2[581],simde_mm_xor_si128(c2[710],simde_mm_xor_si128(c2[590],simde_mm_xor_si128(c2[1431],c2[351]))))))))))))))))))))))))))))))))));
+
+//row: 21
+     d2[126]=simde_mm_xor_si128(c2[3],simde_mm_xor_si128(c2[1565],simde_mm_xor_si128(c2[1325],simde_mm_xor_si128(c2[1563],simde_mm_xor_si128(c2[376],simde_mm_xor_si128(c2[497],simde_mm_xor_si128(c2[1332],simde_mm_xor_si128(c2[1212],simde_mm_xor_si128(c2[986],simde_mm_xor_si128(c2[267],simde_mm_xor_si128(c2[147],simde_mm_xor_si128(c2[518],simde_mm_xor_si128(c2[756],simde_mm_xor_si128(c2[881],simde_mm_xor_si128(c2[289],simde_mm_xor_si128(c2[53],simde_mm_xor_si128(c2[533],simde_mm_xor_si128(c2[413],simde_mm_xor_si128(c2[1743],simde_mm_xor_si128(c2[1382],simde_mm_xor_si128(c2[1262],simde_mm_xor_si128(c2[1154],simde_mm_xor_si128(c2[192],simde_mm_xor_si128(c2[1757],simde_mm_xor_si128(c2[1637],simde_mm_xor_si128(c2[1647],simde_mm_xor_si128(c2[1169],simde_mm_xor_si128(c2[1049],simde_mm_xor_si128(c2[1419],simde_mm_xor_si128(c2[581],simde_mm_xor_si128(c2[1901],simde_mm_xor_si128(c2[1781],simde_mm_xor_si128(c2[1900],simde_mm_xor_si128(c2[1790],simde_mm_xor_si128(c2[712],simde_mm_xor_si128(c2[1671],c2[1551]))))))))))))))))))))))))))))))))))));
+
+//row: 22
+     d2[132]=simde_mm_xor_si128(c2[493],c2[507]);
+
+//row: 23
+     d2[138]=simde_mm_xor_si128(c2[1200],simde_mm_xor_si128(c2[761],c2[1865]));
+
+//row: 24
+     d2[144]=simde_mm_xor_si128(c2[737],simde_mm_xor_si128(c2[1107],c2[112]));
+
+//row: 25
+     d2[150]=simde_mm_xor_si128(c2[1562],c2[661]);
+
+//row: 26
+     d2[156]=simde_mm_xor_si128(c2[602],simde_mm_xor_si128(c2[482],simde_mm_xor_si128(c2[244],simde_mm_xor_si128(c2[245],simde_mm_xor_si128(c2[125],simde_mm_xor_si128(c2[1800],simde_mm_xor_si128(c2[1804],simde_mm_xor_si128(c2[1560],simde_mm_xor_si128(c2[975],simde_mm_xor_si128(c2[855],simde_mm_xor_si128(c2[617],simde_mm_xor_si128(c2[976],simde_mm_xor_si128(c2[732],simde_mm_xor_si128(c2[1697],simde_mm_xor_si128(c2[1573],simde_mm_xor_si128(c2[1453],simde_mm_xor_si128(c2[1585],simde_mm_xor_si128(c2[1465],simde_mm_xor_si128(c2[1227],simde_mm_xor_si128(c2[626],simde_mm_xor_si128(c2[508],simde_mm_xor_si128(c2[388],simde_mm_xor_si128(c2[388],simde_mm_xor_si128(c2[1117],simde_mm_xor_si128(c2[997],simde_mm_xor_si128(c2[759],simde_mm_xor_si128(c2[1361],simde_mm_xor_si128(c2[1241],simde_mm_xor_si128(c2[997],simde_mm_xor_si128(c2[1360],simde_mm_xor_si128(c2[1116],simde_mm_xor_si128(c2[888],simde_mm_xor_si128(c2[768],simde_mm_xor_si128(c2[530],simde_mm_xor_si128(c2[532],simde_mm_xor_si128(c2[288],simde_mm_xor_si128(c2[892],simde_mm_xor_si128(c2[768],simde_mm_xor_si128(c2[648],simde_mm_xor_si128(c2[423],simde_mm_xor_si128(c2[303],simde_mm_xor_si128(c2[65],simde_mm_xor_si128(c2[1741],simde_mm_xor_si128(c2[1623],simde_mm_xor_si128(c2[1503],simde_mm_xor_si128(c2[1753],simde_mm_xor_si128(c2[1633],simde_mm_xor_si128(c2[1395],simde_mm_xor_si128(c2[797],simde_mm_xor_si128(c2[677],simde_mm_xor_si128(c2[433],simde_mm_xor_si128(c2[197],simde_mm_xor_si128(c2[73],simde_mm_xor_si128(c2[1872],simde_mm_xor_si128(c2[327],simde_mm_xor_si128(c2[207],simde_mm_xor_si128(c2[1888],simde_mm_xor_si128(c2[1528],simde_mm_xor_si128(c2[1404],simde_mm_xor_si128(c2[1284],simde_mm_xor_si128(c2[1166],simde_mm_xor_si128(c2[99],simde_mm_xor_si128(c2[1898],simde_mm_xor_si128(c2[1660],simde_mm_xor_si128(c2[1060],simde_mm_xor_si128(c2[816],simde_mm_xor_si128(c2[341],simde_mm_xor_si128(c2[217],simde_mm_xor_si128(c2[97],simde_mm_xor_si128(c2[470],simde_mm_xor_si128(c2[350],simde_mm_xor_si128(c2[112],simde_mm_xor_si128(c2[1311],simde_mm_xor_si128(c2[1191],simde_mm_xor_si128(c2[953],simde_mm_xor_si128(c2[111],simde_mm_xor_si128(c2[1912],c2[1792])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 27
+     d2[162]=simde_mm_xor_si128(c2[840],c2[315]);
+
+//row: 28
+     d2[168]=simde_mm_xor_si128(c2[736],simde_mm_xor_si128(c2[1824],c2[1024]));
+
+//row: 29
+     d2[174]=simde_mm_xor_si128(c2[1680],c2[169]);
+
+//row: 30
+     d2[180]=simde_mm_xor_si128(c2[985],simde_mm_xor_si128(c2[1263],simde_mm_xor_si128(c2[567],c2[1071])));
+
+//row: 31
+     d2[186]=simde_mm_xor_si128(c2[843],simde_mm_xor_si128(c2[480],simde_mm_xor_si128(c2[240],simde_mm_xor_si128(c2[1216],simde_mm_xor_si128(c2[1337],simde_mm_xor_si128(c2[253],simde_mm_xor_si128(c2[133],simde_mm_xor_si128(c2[372],simde_mm_xor_si128(c2[1826],simde_mm_xor_si128(c2[1107],simde_mm_xor_si128(c2[987],simde_mm_xor_si128(c2[1358],simde_mm_xor_si128(c2[1596],simde_mm_xor_si128(c2[1721],simde_mm_xor_si128(c2[1129],simde_mm_xor_si128(c2[893],simde_mm_xor_si128(c2[1373],simde_mm_xor_si128(c2[1253],simde_mm_xor_si128(c2[664],simde_mm_xor_si128(c2[303],simde_mm_xor_si128(c2[183],simde_mm_xor_si128(c2[75],simde_mm_xor_si128(c2[1032],simde_mm_xor_si128(c2[672],simde_mm_xor_si128(c2[552],simde_mm_xor_si128(c2[568],simde_mm_xor_si128(c2[84],simde_mm_xor_si128(c2[1889],simde_mm_xor_si128(c2[340],simde_mm_xor_si128(c2[1421],simde_mm_xor_si128(c2[816],simde_mm_xor_si128(c2[696],simde_mm_xor_si128(c2[711],simde_mm_xor_si128(c2[1552],simde_mm_xor_si128(c2[592],c2[472])))))))))))))))))))))))))))))))))));
+
+//row: 32
+     d2[192]=simde_mm_xor_si128(c2[1805],simde_mm_xor_si128(c2[1685],simde_mm_xor_si128(c2[1442],simde_mm_xor_si128(c2[1322],simde_mm_xor_si128(c2[1082],simde_mm_xor_si128(c2[485],simde_mm_xor_si128(c2[253],simde_mm_xor_si128(c2[133],simde_mm_xor_si128(c2[254],simde_mm_xor_si128(c2[975],simde_mm_xor_si128(c2[869],simde_mm_xor_si128(c2[749],simde_mm_xor_si128(c2[1829],simde_mm_xor_si128(c2[401],simde_mm_xor_si128(c2[281],simde_mm_xor_si128(c2[639],simde_mm_xor_si128(c2[519],simde_mm_xor_si128(c2[638],simde_mm_xor_si128(c2[172],simde_mm_xor_si128(c2[52],simde_mm_xor_si128(c2[1729],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[1620],simde_mm_xor_si128(c2[1500],simde_mm_xor_si128(c2[1025],simde_mm_xor_si128(c2[1020],simde_mm_xor_si128(c2[1037],simde_mm_xor_si128(c2[917],simde_mm_xor_si128(c2[75],simde_mm_xor_si128(c2[1874],simde_mm_xor_si128(c2[1394],simde_mm_xor_si128(c2[1524],simde_mm_xor_si128(c2[1404],simde_mm_xor_si128(c2[806],simde_mm_xor_si128(c2[1296],simde_mm_xor_si128(c2[1176],simde_mm_xor_si128(c2[338],simde_mm_xor_si128(c2[1538],simde_mm_xor_si128(c2[1673],simde_mm_xor_si128(c2[1553],simde_mm_xor_si128(c2[589],simde_mm_xor_si128(c2[469],c2[1308]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 33
+     d2[198]=simde_mm_xor_si128(c2[1080],simde_mm_xor_si128(c2[723],simde_mm_xor_si128(c2[483],simde_mm_xor_si128(c2[1453],simde_mm_xor_si128(c2[1574],simde_mm_xor_si128(c2[376],simde_mm_xor_si128(c2[144],simde_mm_xor_si128(c2[1224],simde_mm_xor_si128(c2[628],simde_mm_xor_si128(c2[1601],simde_mm_xor_si128(c2[1839],simde_mm_xor_si128(c2[39],simde_mm_xor_si128(c2[1372],simde_mm_xor_si128(c2[1130],simde_mm_xor_si128(c2[1490],simde_mm_xor_si128(c2[901],simde_mm_xor_si128(c2[420],simde_mm_xor_si128(c2[312],simde_mm_xor_si128(c2[1275],simde_mm_xor_si128(c2[795],simde_mm_xor_si128(c2[805],simde_mm_xor_si128(c2[207],simde_mm_xor_si128(c2[449],simde_mm_xor_si128(c2[577],simde_mm_xor_si128(c2[1658],simde_mm_xor_si128(c2[939],simde_mm_xor_si128(c2[948],simde_mm_xor_si128(c2[1789],c2[709]))))))))))))))))))))))))))));
+
+//row: 34
+     d2[204]=simde_mm_xor_si128(c2[1803],simde_mm_xor_si128(c2[1683],simde_mm_xor_si128(c2[1085],simde_mm_xor_si128(c2[1440],simde_mm_xor_si128(c2[1320],simde_mm_xor_si128(c2[722],simde_mm_xor_si128(c2[1080],simde_mm_xor_si128(c2[482],simde_mm_xor_si128(c2[1564],simde_mm_xor_si128(c2[257],simde_mm_xor_si128(c2[137],simde_mm_xor_si128(c2[1452],simde_mm_xor_si128(c2[252],simde_mm_xor_si128(c2[1573],simde_mm_xor_si128(c2[973],simde_mm_xor_si128(c2[495],simde_mm_xor_si128(c2[375],simde_mm_xor_si128(c2[867],simde_mm_xor_si128(c2[747],simde_mm_xor_si128(c2[149],simde_mm_xor_si128(c2[1827],simde_mm_xor_si128(c2[1349],simde_mm_xor_si128(c2[1229],simde_mm_xor_si128(c2[399],simde_mm_xor_si128(c2[279],simde_mm_xor_si128(c2[1600],simde_mm_xor_si128(c2[637],simde_mm_xor_si128(c2[517],simde_mm_xor_si128(c2[1838],simde_mm_xor_si128(c2[636],simde_mm_xor_si128(c2[38],simde_mm_xor_si128(c2[170],simde_mm_xor_si128(c2[50],simde_mm_xor_si128(c2[1371],simde_mm_xor_si128(c2[1733],simde_mm_xor_si128(c2[1129],simde_mm_xor_si128(c2[168],simde_mm_xor_si128(c2[1609],simde_mm_xor_si128(c2[1489],simde_mm_xor_si128(c2[1624],simde_mm_xor_si128(c2[1504],simde_mm_xor_si128(c2[900],simde_mm_xor_si128(c2[1023],simde_mm_xor_si128(c2[545],simde_mm_xor_si128(c2[425],simde_mm_xor_si128(c2[1035],simde_mm_xor_si128(c2[915],simde_mm_xor_si128(c2[317],simde_mm_xor_si128(c2[73],simde_mm_xor_si128(c2[1872],simde_mm_xor_si128(c2[1274],simde_mm_xor_si128(c2[1392],simde_mm_xor_si128(c2[914],simde_mm_xor_si128(c2[794],simde_mm_xor_si128(c2[1528],simde_mm_xor_si128(c2[1408],simde_mm_xor_si128(c2[804],simde_mm_xor_si128(c2[804],simde_mm_xor_si128(c2[326],simde_mm_xor_si128(c2[206],simde_mm_xor_si128(c2[1300],simde_mm_xor_si128(c2[1180],simde_mm_xor_si128(c2[576],simde_mm_xor_si128(c2[336],simde_mm_xor_si128(c2[1657],simde_mm_xor_si128(c2[1536],simde_mm_xor_si128(c2[1058],simde_mm_xor_si128(c2[938],simde_mm_xor_si128(c2[1671],simde_mm_xor_si128(c2[1551],simde_mm_xor_si128(c2[953],simde_mm_xor_si128(c2[593],simde_mm_xor_si128(c2[473],simde_mm_xor_si128(c2[1788],simde_mm_xor_si128(c2[1312],simde_mm_xor_si128(c2[828],c2[708]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 35
+     d2[210]=simde_mm_xor_si128(c2[0],simde_mm_xor_si128(c2[1805],simde_mm_xor_si128(c2[1442],simde_mm_xor_si128(c2[1202],simde_mm_xor_si128(c2[373],simde_mm_xor_si128(c2[253],simde_mm_xor_si128(c2[374],simde_mm_xor_si128(c2[1095],simde_mm_xor_si128(c2[1576],simde_mm_xor_si128(c2[989],simde_mm_xor_si128(c2[869],simde_mm_xor_si128(c2[24],simde_mm_xor_si128(c2[521],simde_mm_xor_si128(c2[401],simde_mm_xor_si128(c2[639],simde_mm_xor_si128(c2[758],simde_mm_xor_si128(c2[172],simde_mm_xor_si128(c2[1849],simde_mm_xor_si128(c2[290],simde_mm_xor_si128(c2[1620],simde_mm_xor_si128(c2[1145],simde_mm_xor_si128(c2[1025],simde_mm_xor_si128(c2[1157],simde_mm_xor_si128(c2[1037],simde_mm_xor_si128(c2[75],simde_mm_xor_si128(c2[1514],simde_mm_xor_si128(c2[1524],simde_mm_xor_si128(c2[926],simde_mm_xor_si128(c2[1296],simde_mm_xor_si128(c2[458],simde_mm_xor_si128(c2[1658],simde_mm_xor_si128(c2[1793],simde_mm_xor_si128(c2[1673],simde_mm_xor_si128(c2[589],c2[1428]))))))))))))))))))))))))))))))))));
+
+//row: 36
+     d2[216]=simde_mm_xor_si128(c2[1081],simde_mm_xor_si128(c2[867],c2[1288]));
+
+//row: 37
+     d2[222]=simde_mm_xor_si128(c2[241],simde_mm_xor_si128(c2[1440],simde_mm_xor_si128(c2[1803],simde_mm_xor_si128(c2[1083],simde_mm_xor_si128(c2[1563],simde_mm_xor_si128(c2[843],simde_mm_xor_si128(c2[614],simde_mm_xor_si128(c2[1813],simde_mm_xor_si128(c2[735],simde_mm_xor_si128(c2[15],simde_mm_xor_si128(c2[1456],simde_mm_xor_si128(c2[856],simde_mm_xor_si128(c2[736],simde_mm_xor_si128(c2[1224],simde_mm_xor_si128(c2[504],simde_mm_xor_si128(c2[385],simde_mm_xor_si128(c2[1704],simde_mm_xor_si128(c2[1584],simde_mm_xor_si128(c2[756],simde_mm_xor_si128(c2[36],simde_mm_xor_si128(c2[1000],simde_mm_xor_si128(c2[280],simde_mm_xor_si128(c2[1119],simde_mm_xor_si128(c2[399],simde_mm_xor_si128(c2[533],simde_mm_xor_si128(c2[1732],simde_mm_xor_si128(c2[291],simde_mm_xor_si128(c2[1490],simde_mm_xor_si128(c2[651],simde_mm_xor_si128(c2[51],simde_mm_xor_si128(c2[1850],simde_mm_xor_si128(c2[62],simde_mm_xor_si128(c2[1261],simde_mm_xor_si128(c2[1500],simde_mm_xor_si128(c2[900],simde_mm_xor_si128(c2[780],simde_mm_xor_si128(c2[1392],simde_mm_xor_si128(c2[672],simde_mm_xor_si128(c2[436],simde_mm_xor_si128(c2[1635],simde_mm_xor_si128(c2[1875],simde_mm_xor_si128(c2[1275],simde_mm_xor_si128(c2[1155],simde_mm_xor_si128(c2[1885],simde_mm_xor_si128(c2[1165],simde_mm_xor_si128(c2[1287],simde_mm_xor_si128(c2[687],simde_mm_xor_si128(c2[567],simde_mm_xor_si128(c2[1657],simde_mm_xor_si128(c2[937],simde_mm_xor_si128(c2[819],simde_mm_xor_si128(c2[99],simde_mm_xor_si128(c2[100],simde_mm_xor_si128(c2[1419],simde_mm_xor_si128(c2[1299],simde_mm_xor_si128(c2[109],simde_mm_xor_si128(c2[1308],simde_mm_xor_si128(c2[950],simde_mm_xor_si128(c2[230],simde_mm_xor_si128(c2[1789],simde_mm_xor_si128(c2[1189],c2[1069])))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 38
+     d2[228]=simde_mm_xor_si128(c2[1444],simde_mm_xor_si128(c2[1324],simde_mm_xor_si128(c2[961],simde_mm_xor_si128(c2[721],simde_mm_xor_si128(c2[1817],simde_mm_xor_si128(c2[1697],simde_mm_xor_si128(c2[1812],simde_mm_xor_si128(c2[614],simde_mm_xor_si128(c2[497],simde_mm_xor_si128(c2[508],simde_mm_xor_si128(c2[388],simde_mm_xor_si128(c2[1468],simde_mm_xor_si128(c2[40],simde_mm_xor_si128(c2[1839],simde_mm_xor_si128(c2[158],simde_mm_xor_si128(c2[277],simde_mm_xor_si128(c2[1610],simde_mm_xor_si128(c2[1368],simde_mm_xor_si128(c2[1728],simde_mm_xor_si128(c2[1145],simde_mm_xor_si128(c2[664],simde_mm_xor_si128(c2[903],simde_mm_xor_si128(c2[676],simde_mm_xor_si128(c2[556],simde_mm_xor_si128(c2[1513],simde_mm_xor_si128(c2[1033],simde_mm_xor_si128(c2[1049],simde_mm_xor_si128(c2[445],simde_mm_xor_si128(c2[821],simde_mm_xor_si128(c2[1896],simde_mm_xor_si128(c2[1177],simde_mm_xor_si128(c2[1312],simde_mm_xor_si128(c2[1192],simde_mm_xor_si128(c2[108],c2[953]))))))))))))))))))))))))))))))))));
+
+//row: 39
+     d2[234]=simde_mm_xor_si128(c2[842],simde_mm_xor_si128(c2[722],simde_mm_xor_si128(c2[485],simde_mm_xor_si128(c2[365],simde_mm_xor_si128(c2[125],simde_mm_xor_si128(c2[1565],simde_mm_xor_si128(c2[1215],simde_mm_xor_si128(c2[1095],simde_mm_xor_si128(c2[1216],simde_mm_xor_si128(c2[12],simde_mm_xor_si128(c2[1825],simde_mm_xor_si128(c2[1705],simde_mm_xor_si128(c2[866],simde_mm_xor_si128(c2[1357],simde_mm_xor_si128(c2[1237],simde_mm_xor_si128(c2[1601],simde_mm_xor_si128(c2[1481],simde_mm_xor_si128(c2[1600],simde_mm_xor_si128(c2[1128],simde_mm_xor_si128(c2[1008],simde_mm_xor_si128(c2[772],simde_mm_xor_si128(c2[1132],simde_mm_xor_si128(c2[663],simde_mm_xor_si128(c2[543],simde_mm_xor_si128(c2[62],simde_mm_xor_si128(c2[74],simde_mm_xor_si128(c2[1873],simde_mm_xor_si128(c2[1037],simde_mm_xor_si128(c2[917],simde_mm_xor_si128(c2[437],simde_mm_xor_si128(c2[567],simde_mm_xor_si128(c2[447],simde_mm_xor_si128(c2[1768],simde_mm_xor_si128(c2[566],simde_mm_xor_si128(c2[339],simde_mm_xor_si128(c2[219],simde_mm_xor_si128(c2[1300],simde_mm_xor_si128(c2[581],simde_mm_xor_si128(c2[710],simde_mm_xor_si128(c2[590],simde_mm_xor_si128(c2[1551],simde_mm_xor_si128(c2[1431],c2[351]))))))))))))))))))))))))))))))))))))))))));
+
+//row: 40
+     d2[240]=simde_mm_xor_si128(c2[965],simde_mm_xor_si128(c2[3],simde_mm_xor_si128(c2[602],simde_mm_xor_si128(c2[1565],simde_mm_xor_si128(c2[362],simde_mm_xor_si128(c2[1325],simde_mm_xor_si128(c2[1332],simde_mm_xor_si128(c2[376],simde_mm_xor_si128(c2[1453],simde_mm_xor_si128(c2[497],simde_mm_xor_si128(c2[255],simde_mm_xor_si128(c2[1332],simde_mm_xor_si128(c2[1212],simde_mm_xor_si128(c2[29],simde_mm_xor_si128(c2[986],simde_mm_xor_si128(c2[1109],simde_mm_xor_si128(c2[267],simde_mm_xor_si128(c2[147],simde_mm_xor_si128(c2[864],simde_mm_xor_si128(c2[1480],simde_mm_xor_si128(c2[518],simde_mm_xor_si128(c2[1718],simde_mm_xor_si128(c2[756],simde_mm_xor_si128(c2[1837],simde_mm_xor_si128(c2[881],simde_mm_xor_si128(c2[1251],simde_mm_xor_si128(c2[289],simde_mm_xor_si128(c2[1009],simde_mm_xor_si128(c2[53],simde_mm_xor_si128(c2[1369],simde_mm_xor_si128(c2[533],simde_mm_xor_si128(c2[413],simde_mm_xor_si128(c2[780],simde_mm_xor_si128(c2[1743],simde_mm_xor_si128(c2[305],simde_mm_xor_si128(c2[1382],simde_mm_xor_si128(c2[1262],simde_mm_xor_si128(c2[197],simde_mm_xor_si128(c2[1154],simde_mm_xor_si128(c2[1154],simde_mm_xor_si128(c2[192],simde_mm_xor_si128(c2[674],simde_mm_xor_si128(c2[1757],simde_mm_xor_si128(c2[1637],simde_mm_xor_si128(c2[684],simde_mm_xor_si128(c2[1647],simde_mm_xor_si128(c2[86],simde_mm_xor_si128(c2[1169],simde_mm_xor_si128(c2[1049],simde_mm_xor_si128(c2[456],simde_mm_xor_si128(c2[1419],simde_mm_xor_si128(c2[1537],simde_mm_xor_si128(c2[581],simde_mm_xor_si128(c2[818],simde_mm_xor_si128(c2[1901],simde_mm_xor_si128(c2[1781],simde_mm_xor_si128(c2[833],simde_mm_xor_si128(c2[1790],simde_mm_xor_si128(c2[1668],simde_mm_xor_si128(c2[712],simde_mm_xor_si128(c2[588],simde_mm_xor_si128(c2[1671],c2[1551]))))))))))))))))))))))))))))))))))))))))))))))))))))))))))))));
+
+//row: 41
+     d2[246]=simde_mm_xor_si128(c2[1202],simde_mm_xor_si128(c2[1082],simde_mm_xor_si128(c2[725],simde_mm_xor_si128(c2[485],simde_mm_xor_si128(c2[1575],simde_mm_xor_si128(c2[1455],simde_mm_xor_si128(c2[1576],simde_mm_xor_si128(c2[372],simde_mm_xor_si128(c2[375],simde_mm_xor_si128(c2[266],simde_mm_xor_si128(c2[146],simde_mm_xor_si128(c2[1226],simde_mm_xor_si128(c2[1717],simde_mm_xor_si128(c2[1597],simde_mm_xor_si128(c2[1841],simde_mm_xor_si128(c2[41],simde_mm_xor_si128(c2[1368],simde_mm_xor_si128(c2[1132],simde_mm_xor_si128(c2[1492],simde_mm_xor_si128(c2[903],simde_mm_xor_si128(c2[422],simde_mm_xor_si128(c2[900],simde_mm_xor_si128(c2[434],simde_mm_xor_si128(c2[314],simde_mm_xor_si128(c2[1277],simde_mm_xor_si128(c2[797],simde_mm_xor_si128(c2[807],simde_mm_xor_si128(c2[209],simde_mm_xor_si128(c2[579],simde_mm_xor_si128(c2[1660],simde_mm_xor_si128(c2[941],simde_mm_xor_si128(c2[1070],simde_mm_xor_si128(c2[950],simde_mm_xor_si128(c2[1791],c2[711]))))))))))))))))))))))))))))))))));
+  }
+}
+#endif
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_encode_parity_check.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_encode_parity_check.c
index d36398a13a71f833725325c65c6b949cce6da005..f4632c61d7169a871a90f5d170321e0ffe15f1d6 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_encode_parity_check.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_encode_parity_check.c
@@ -38,32 +38,49 @@
 
 
 #include "ldpc384_byte.c"
+#include "ldpc384_byte_128.c"
 #include "ldpc352_byte.c"
+#include "ldpc352_byte_128.c"
 #include "ldpc320_byte.c"
+#include "ldpc320_byte_128.c"
 #include "ldpc288_byte.c"
+#include "ldpc288_byte_128.c"
 #include "ldpc256_byte.c"
+#include "ldpc256_byte_128.c"
 #include "ldpc240_byte.c"
 #include "ldpc224_byte.c"
+#include "ldpc224_byte_128.c"
 #include "ldpc208_byte.c"
 #include "ldpc192_byte.c"
+#include "ldpc192_byte_128.c"
 #include "ldpc176_byte.c"
 #include "ldpc_BG2_Zc384_byte.c"
+#include "ldpc_BG2_Zc384_byte_128.c"
 #include "ldpc_BG2_Zc352_byte.c"
+#include "ldpc_BG2_Zc352_byte_128.c"
 #include "ldpc_BG2_Zc320_byte.c"
+#include "ldpc_BG2_Zc320_byte_128.c"
 #include "ldpc_BG2_Zc288_byte.c"
+#include "ldpc_BG2_Zc288_byte_128.c"
 #include "ldpc_BG2_Zc256_byte.c"
+#include "ldpc_BG2_Zc256_byte_128.c"
 #include "ldpc_BG2_Zc240_byte.c"
 #include "ldpc_BG2_Zc224_byte.c"
+#include "ldpc_BG2_Zc224_byte_128.c"
 #include "ldpc_BG2_Zc208_byte.c"
 #include "ldpc_BG2_Zc192_byte.c"
+#include "ldpc_BG2_Zc192_byte_128.c"
 #include "ldpc_BG2_Zc176_byte.c"
+#include "ldpc_BG2_Zc160_byte_128.c"
 #include "ldpc_BG2_Zc160_byte.c"
 #include "ldpc_BG2_Zc144_byte.c"
 #include "ldpc_BG2_Zc128_byte.c"
+#include "ldpc_BG2_Zc128_byte_128.c"
 #include "ldpc_BG2_Zc120_byte.c"
 #include "ldpc_BG2_Zc112_byte.c"
 #include "ldpc_BG2_Zc104_byte.c"
 #include "ldpc_BG2_Zc96_byte.c"
+#include "ldpc_BG2_Zc96_byte_128.c"
 #include "ldpc_BG2_Zc88_byte.c"
 #include "ldpc_BG2_Zc80_byte.c"
 #include "ldpc_BG2_Zc72_byte.c"
@@ -73,7 +90,6 @@
 static void encode_parity_check_part_optim(uint8_t *cc,uint8_t *d, short BG,short Zc,short Kb, int simd_size, int ncols)
 {
   unsigned char c[2*22*Zc*simd_size] __attribute__((aligned(32)));      //double size matrix of c
-  
   for (int i1=0; i1 < ncols; i1++)   {
     memcpy(&c[2*i1*Zc], &cc[i1*Zc], Zc*sizeof(unsigned char));
     memcpy(&c[(2*i1+1)*Zc], &cc[i1*Zc], Zc*sizeof(unsigned char));
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_encoder.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_encoder.c
index 90475831ff490280f40a5e3456d9827fed3bd04f..1e1b3c156f79dc79e2012f1b29fd1dca46ca60ad 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_encoder.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_encoder.c
@@ -41,11 +41,11 @@
 #include "ldpc_generate_coefficient.c"
 
 
-int ldpc_encoder_orig(unsigned char *test_input,unsigned char *channel_input,int Zc,int Kb,short block_length, short BG,unsigned char gen_code)
+int ldpc_encoder_orig(uint8_t *test_input,uint8_t *channel_input,int Zc,int Kb,short block_length, short BG,uint8_t gen_code)
 {
-  unsigned char c[22*384]; //padded input, unpacked, max size
-  unsigned char d[68*384]; //coded output, unpacked, max size
-  unsigned char channel_temp,temp;
+  uint8_t c[22*384]; //padded input, unpacked, max size
+  uint8_t d[68*384]; //coded output, unpacked, max size
+  uint8_t channel_temp,temp;
   short *Gen_shift_values, *no_shift_values, *pointer_shift_values;
 
   short nrows = 46;//parity check bits
@@ -103,8 +103,8 @@ int ldpc_encoder_orig(unsigned char *test_input,unsigned char *channel_input,int
   //printf("%d\n",no_punctured_columns);
   //printf("%d\n",removed_bit);
   // unpack input
-  memset(c,0,sizeof(unsigned char) * ncols * Zc);
-  memset(d,0,sizeof(unsigned char) * nrows * Zc);
+  memset(c,0,sizeof(uint8_t) * ncols * Zc);
+  memset(d,0,sizeof(uint8_t) * nrows * Zc);
 
   for (i=0; i<block_length; i++)
   {
@@ -115,7 +115,7 @@ int ldpc_encoder_orig(unsigned char *test_input,unsigned char *channel_input,int
 
   // parity check part
 
-  if (gen_code==1)
+  if (gen_code>=1)
   {
     char fname[100];
     sprintf(fname,"ldpc_BG%d_Zc%d_byte.c",BG,Zc);
@@ -136,24 +136,24 @@ int ldpc_encoder_orig(unsigned char *test_input,unsigned char *channel_input,int
     fprintf(fd,"#include \"PHY/sse_intrin.h\"\n");
     fprintf(fd2,"#include \"PHY/sse_intrin.h\"\n");
 
-    if ((Zc&31)==0) {
+    if (gen_code == 1 && (Zc&31)==0) {
       shift=5; // AVX2 - 256-bit SIMD
       mask=31;
-      strcpy(data_type,"__m256i");
+      strcpy(data_type,"simde__m256i");
       strcpy(xor_command,"simde_mm256_xor_si256");
     }
     else if ((Zc&15)==0) {
       shift=4; // SSE4 - 128-bit SIMD
       mask=15;
-      strcpy(data_type,"__m128i");
-      strcpy(xor_command,"_mm_xor_si128");
+      strcpy(data_type,"simde__m128i");
+      strcpy(xor_command,"simde_mm_xor_si128");
 
     }
     else if ((Zc&7)==0) {
       shift=3; // MMX  - 64-bit SIMD
       mask=7;
-      strcpy(data_type,"__m64");
-      strcpy(xor_command,"_mm_xor_si64"); 
+      strcpy(data_type,"simde__m64");
+      strcpy(xor_command,"simde_mm_xor_si64"); 
     }
     else {
       shift=0;                 // no SIMD
@@ -209,10 +209,10 @@ int ldpc_encoder_orig(unsigned char *test_input,unsigned char *channel_input,int
 	          
 	      var=(int)((i3*Zc + (Gen_shift_values[ pointer_shift_values[temp_prime]+i4 ]+1)%Zc)/Zc);
 	      int index =var*2*Zc + (i3*Zc + (Gen_shift_values[ pointer_shift_values[temp_prime]+i4 ]+1)%Zc) % Zc;
-	      
-	      indlist[nind] = ((index&mask)*((2*Zc)>>shift)*Kb)+(index>>shift);
-	      indlist2[nind++] = ((index&(mask>>1))*((2*Zc)>>(shift-1))*Kb)+(index>>(shift-1));
-	      
+	      printf("var %d, i3 %d, i4 %d, index %d, shift %d, Zc %d, pointer_shift_values[%d] %d gen_shift_value %d\n",var,i3,i4,index,shift,Zc,temp_prime,pointer_shift_values[temp_prime],Gen_shift_values[pointer_shift_values[temp_prime]]);
+	      indlist[nind] = ((index&mask)*((2*Zc*ncols)>>shift)/* *Kb */)+(index>>shift);
+	      printf("indlist[%d] %d, index&mask %d, index>>shift %d\n",nind,indlist[nind],index&mask,index>>shift);
+	      indlist2[nind++] = ((index&(mask>>1))*((2*Zc*ncols)>>(shift-1))*Kb)+(index>>(shift-1));
 	    }
 	  
 
@@ -244,7 +244,7 @@ int ldpc_encoder_orig(unsigned char *test_input,unsigned char *channel_input,int
       for (i5=0; i5 < Kb; i5++)
       {
         temp = c[i5*Zc];
-        memmove(&c[i5*Zc], &c[i5*Zc+1], (Zc-1)*sizeof(unsigned char));
+        memmove(&c[i5*Zc], &c[i5*Zc+1], (Zc-1)*sizeof(uint8_t));
         c[i5*Zc+Zc-1] = temp;
       }
 
@@ -270,13 +270,13 @@ int ldpc_encoder_orig(unsigned char *test_input,unsigned char *channel_input,int
   }
 
   // information part and puncture columns
-  memcpy(&channel_input[0], &c[2*Zc], (block_length-2*Zc)*sizeof(unsigned char));
-  memcpy(&channel_input[block_length-2*Zc], &d[0], ((nrows-no_punctured_columns) * Zc-removed_bit)*sizeof(unsigned char));
-  //memcpy(channel_input,c,Kb*Zc*sizeof(unsigned char));
+  memcpy(&channel_input[0], &c[2*Zc], (block_length-2*Zc)*sizeof(uint8_t));
+  memcpy(&channel_input[block_length-2*Zc], &d[0], ((nrows-no_punctured_columns) * Zc-removed_bit)*sizeof(uint8_t));
+  //memcpy(channel_input,c,Kb*Zc*sizeof(uint8_t));
   return 0;
 }
 
 
-int nrLDPC_encod(unsigned char **test_input,unsigned char **channel_input,int Zc,int Kb,short block_length, short BG, encoder_implemparams_t *impp) {
+int nrLDPC_encod(uint8_t **test_input,uint8_t **channel_input,int Zc,int Kb,short block_length, short BG, encoder_implemparams_t *impp) {
   return ldpc_encoder_orig(test_input[0],channel_input[0],Zc,Kb,block_length,BG,impp->gen_code);
 }
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_encoder2.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_encoder2.c
index 28fcc7f04fca57d785e32ac618ec7ffeb4ff5263..1cdb75f45917cd648c7504e80c37b7c2288cc8fb 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_encoder2.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_encoder2.c
@@ -50,6 +50,13 @@
 #include "ldpc208_byte.c"
 #include "ldpc192_byte.c"
 #include "ldpc176_byte.c"
+#include "ldpc384_byte_128.c"
+#include "ldpc352_byte_128.c"
+#include "ldpc320_byte_128.c"
+#include "ldpc288_byte_128.c"
+#include "ldpc256_byte_128.c"
+#include "ldpc224_byte_128.c"
+#include "ldpc192_byte_128.c"
 #include "ldpc_BG2_Zc384_byte.c"
 #include "ldpc_BG2_Zc352_byte.c"
 #include "ldpc_BG2_Zc320_byte.c"
@@ -304,11 +311,11 @@ int ldpc_encoder_optim_8seg(unsigned char **test_input,unsigned char **channel_i
   char temp;
   int simd_size;
 
-  __m256i shufmask = simde_mm256_set_epi64x(0x0303030303030303, 0x0202020202020202,0x0101010101010101, 0x0000000000000000);
-  __m256i andmask  = simde_mm256_set1_epi64x(0x0102040810204080);  // every 8 bits -> 8 bytes, pattern repeats.
-  __m256i zero256   = simde_mm256_setzero_si256();
-  __m256i masks[8];
-  register __m256i c256;
+  simde__m256i shufmask = simde_mm256_set_epi64x(0x0303030303030303, 0x0202020202020202, 0x0101010101010101, 0x0000000000000000);
+  simde__m256i andmask = simde_mm256_set1_epi64x(0x0102040810204080); // every 8 bits -> 8 bytes, pattern repeats.
+  simde__m256i zero256 = simde_mm256_setzero_si256();
+  simde__m256i masks[8];
+  register simde__m256i c256;
   masks[0] = simde_mm256_set1_epi8(0x1);
   masks[1] = simde_mm256_set1_epi8(0x2);
   masks[2] = simde_mm256_set1_epi8(0x4);
@@ -378,7 +385,7 @@ int ldpc_encoder_optim_8seg(unsigned char **test_input,unsigned char **channel_i
     for (j=1; j<n_segments; j++) {
       c256 = simde_mm256_or_si256(simde_mm256_and_si256(simde_mm256_cmpeq_epi8(simde_mm256_andnot_si256(simde_mm256_shuffle_epi8(simde_mm256_set1_epi32(((uint32_t*)test_input[j])[i]), shufmask),andmask),zero256),masks[j]),c256);
     }
-    ((__m256i *)c)[i] = c256;
+    ((simde__m256i *)c)[i] = c256;
   }
 
   for (i=(block_length>>5)<<5;i<block_length;i++) {
@@ -433,17 +440,19 @@ int ldpc_encoder_optim_8seg(unsigned char **test_input,unsigned char **channel_i
     //AssertFatal(((block_length-(2*Zc))&31) == 0,"block_length-(2*Zc) needs to be a multiple of 32 for now\n");
     uint32_t l1 = (block_length-(2*Zc))>>5;
     uint32_t l2 = ((nrows-no_punctured_columns) * Zc-removed_bit)>>5;
-    __m256i *c256p = (__m256i *)&c[2*Zc];
-    __m256i *d256p = (__m256i *)&d[0];
+    simde__m256i *c256p = (simde__m256i *)&c[2 * Zc];
+    simde__m256i *d256p = (simde__m256i *)&d[0];
     //  if (((block_length-(2*Zc))&31)>0) l1++;
     
     for (i=0;i<l1;i++)
-      for (j=0;j<n_segments;j++) ((__m256i *)channel_input[j])[i] = simde_mm256_and_si256(simde_mm256_srai_epi16(c256p[i],j),masks[0]);
-    
+      for (j = 0; j < n_segments; j++)
+        ((simde__m256i *)channel_input[j])[i] = simde_mm256_and_si256(simde_mm256_srai_epi16(c256p[i], j), masks[0]);
+
     //  if ((((nrows-no_punctured_columns) * Zc-removed_bit)&31)>0) l2++;
     
     for (i1=0;i1<l2;i1++,i++)
-      for (j=0;j<n_segments;j++) ((__m256i *)channel_input[j])[i] = simde_mm256_and_si256(simde_mm256_srai_epi16(d256p[i1],j),masks[0]);
+      for (j = 0; j < n_segments; j++)
+        ((simde__m256i *)channel_input[j])[i] = simde_mm256_and_si256(simde_mm256_srai_epi16(d256p[i1], j), masks[0]);
   }
   else {
 #ifdef DEBUG_LDPC
@@ -480,11 +489,11 @@ int ldpc_encoder_optim_8seg_multi(unsigned char **test_input,unsigned char **cha
   //printf("macro_segment: %d\n", macro_segment);
   //printf("macro_segment_end: %d\n", macro_segment_end );
 
-  __m256i shufmask = simde_mm256_set_epi64x(0x0303030303030303, 0x0202020202020202,0x0101010101010101, 0x0000000000000000);
-  __m256i andmask  = simde_mm256_set1_epi64x(0x0102040810204080);  // every 8 bits -> 8 bytes, pattern repeats.
-  __m256i zero256   = simde_mm256_setzero_si256();
-  __m256i masks[8];
-  register __m256i c256;
+  simde__m256i shufmask = simde_mm256_set_epi64x(0x0303030303030303, 0x0202020202020202, 0x0101010101010101, 0x0000000000000000);
+  simde__m256i andmask = simde_mm256_set1_epi64x(0x0102040810204080); // every 8 bits -> 8 bytes, pattern repeats.
+  simde__m256i zero256 = simde_mm256_setzero_si256();
+  simde__m256i masks[8];
+  register simde__m256i c256;
   masks[0] = simde_mm256_set1_epi8(0x1);
   masks[1] = simde_mm256_set1_epi8(0x2);
   masks[2] = simde_mm256_set1_epi8(0x4);
@@ -550,7 +559,7 @@ int ldpc_encoder_optim_8seg_multi(unsigned char **test_input,unsigned char **cha
     for (j=macro_segment+1; j < macro_segment_end; j++) {
       c256 = simde_mm256_or_si256(simde_mm256_and_si256(simde_mm256_cmpeq_epi8(simde_mm256_andnot_si256(simde_mm256_shuffle_epi8(simde_mm256_set1_epi32(((uint32_t*)test_input[j])[i]), shufmask),andmask),zero256),masks[j-macro_segment]),c256);
     }
-    ((__m256i *)c)[i] = c256;
+    ((simde__m256i *)c)[i] = c256;
   }
 
   for (i=(block_length>>5)<<5;i<block_length;i++) {
@@ -606,20 +615,25 @@ int ldpc_encoder_optim_8seg_multi(unsigned char **test_input,unsigned char **cha
     //AssertFatal(((block_length-(2*Zc))&31) == 0,"block_length-(2*Zc) needs to be a multiple of 32 for now\n");
     uint32_t l1 = (block_length-(2*Zc))>>5;
     uint32_t l2 = ((nrows-no_punctured_columns) * Zc-removed_bit)>>5;
-    __m256i *c256p = (__m256i *)&c[2*Zc];
-    __m256i *d256p = (__m256i *)&d[0];
+    simde__m256i *c256p = (simde__m256i *)&c[2 * Zc];
+    simde__m256i *d256p = (simde__m256i *)&d[0];
     //  if (((block_length-(2*Zc))&31)>0) l1++;
 
     for (i=0;i<l1;i++)
-      //for (j=0;j<n_segments;j++) ((__m256i *)channel_input[j])[i] = simde_mm256_and_si256(simde_mm256_srai_epi16(c256p[i],j),masks[0]);
-    	for (j=macro_segment; j < macro_segment_end; j++) ((__m256i *)channel_input[j])[i] = simde_mm256_and_si256(simde_mm256_srai_epi16(c256p[i],j-macro_segment),masks[0]);
-
+      // for (j=0;j<n_segments;j++) ((simde__m256i *)channel_input[j])[i] =
+      // simde_mm256_and_si256(simde_mm256_srai_epi16(c256p[i],j),masks[0]);
+      for (j = macro_segment; j < macro_segment_end; j++)
+        ((simde__m256i *)channel_input[j])[i] =
+            simde_mm256_and_si256(simde_mm256_srai_epi16(c256p[i], j - macro_segment), masks[0]);
 
     //  if ((((nrows-no_punctured_columns) * Zc-removed_bit)&31)>0) l2++;
 
     for (i1=0;i1<l2;i1++,i++)
-      //for (j=0;j<n_segments;j++) ((__m256i *)channel_input[j])[i] = simde_mm256_and_si256(simde_mm256_srai_epi16(d256p[i1],j),masks[0]);
-    	for (j=macro_segment; j < macro_segment_end; j++)  ((__m256i *)channel_input[j])[i] = simde_mm256_and_si256(simde_mm256_srai_epi16(d256p[i1],j-macro_segment),masks[0]);
+      // for (j=0;j<n_segments;j++) ((simde__m256i *)channel_input[j])[i] =
+      // simde_mm256_and_si256(simde_mm256_srai_epi16(d256p[i1],j),masks[0]);
+      for (j = macro_segment; j < macro_segment_end; j++)
+        ((simde__m256i *)channel_input[j])[i] =
+            simde_mm256_and_si256(simde_mm256_srai_epi16(d256p[i1], j - macro_segment), masks[0]);
   }
   else {
 #ifdef DEBUG_LDPC
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_encoder_optim.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_encoder_optim.c
index d05ced18c67965ede543f32b06cc72cb67113406..9a235d6a58d8cbc97e903a0f1670fdfa7b3b0922 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_encoder_optim.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_encoder_optim.c
@@ -99,7 +99,7 @@ int nrLDPC_encod(unsigned char **test_input,unsigned char **channel_input,int Zc
 
   if(impp->tinput != NULL) stop_meas(impp->tinput);
 
-  if ((BG==1 && Zc>176) || (BG==2 && Zc>64)) { 
+  if ((BG==1 && Zc>=176) || (BG==2 && Zc>=64)) { 
     // extend matrix
     if(impp->tprep != NULL) start_meas(impp->tprep);
     if(impp->tprep != NULL) stop_meas(impp->tprep);
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_encoder_optim8seg.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_encoder_optim8seg.c
index ad71674349c1adabd4c6b48a3d4886aa85ba1621..6ec1c05342e2bdec11463f3384a63a9183ffc4fb 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_encoder_optim8seg.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_encoder_optim8seg.c
@@ -51,11 +51,11 @@ int nrLDPC_encod(unsigned char **test_input,unsigned char **channel_input,int Zc
   char temp;
   int simd_size;
 
-  __m256i shufmask = simde_mm256_set_epi64x(0x0303030303030303, 0x0202020202020202,0x0101010101010101, 0x0000000000000000);
-  __m256i andmask  = simde_mm256_set1_epi64x(0x0102040810204080);  // every 8 bits -> 8 bytes, pattern repeats.
-  __m256i zero256   = simde_mm256_setzero_si256();
-  __m256i masks[8];
-  register __m256i c256;
+  simde__m256i shufmask = simde_mm256_set_epi64x(0x0303030303030303, 0x0202020202020202,0x0101010101010101, 0x0000000000000000);
+  simde__m256i andmask  = simde_mm256_set1_epi64x(0x0102040810204080);  // every 8 bits -> 8 bytes, pattern repeats.
+  simde__m256i zero256   = simde_mm256_setzero_si256();
+  simde__m256i masks[8];
+  register simde__m256i c256;
   masks[0] = simde_mm256_set1_epi8(0x1);
   masks[1] = simde_mm256_set1_epi8(0x2);
   masks[2] = simde_mm256_set1_epi8(0x4);
@@ -124,7 +124,7 @@ int nrLDPC_encod(unsigned char **test_input,unsigned char **channel_input,int Zc
     for (j=1; j<impp->n_segments; j++) {
       c256 = simde_mm256_or_si256(simde_mm256_and_si256(simde_mm256_cmpeq_epi8(simde_mm256_andnot_si256(simde_mm256_shuffle_epi8(simde_mm256_set1_epi32(((uint32_t*)test_input[j])[i]), shufmask),andmask),zero256),masks[j]),c256);
     }
-    ((__m256i *)c)[i] = c256;
+    ((simde__m256i *)c)[i] = c256;
   }
 
   for (i=(block_length>>5)<<5;i<block_length;i++) {
@@ -139,7 +139,7 @@ int nrLDPC_encod(unsigned char **test_input,unsigned char **channel_input,int Zc
 
   if(impp->tinput != NULL) stop_meas(impp->tinput);
 
-  if ((BG==1 && Zc>176) || (BG==2 && Zc>64)) { 
+  if ((BG==1 && Zc>=176) || (BG==2 && Zc>=64)) { 
     // extend matrix
     if(impp->tprep != NULL) start_meas(impp->tprep);
     if(impp->tprep != NULL) stop_meas(impp->tprep);
@@ -165,17 +165,17 @@ int nrLDPC_encod(unsigned char **test_input,unsigned char **channel_input,int Zc
     //AssertFatal(((block_length-(2*Zc))&31) == 0,"block_length-(2*Zc) needs to be a multiple of 32 for now\n");
     uint32_t l1 = (block_length-(2*Zc))>>5;
     uint32_t l2 = ((nrows-no_punctured_columns) * Zc-removed_bit)>>5;
-    __m256i *c256p = (__m256i *)&c[2*Zc];
-    __m256i *d256p = (__m256i *)&d[0];
+    simde__m256i *c256p = (simde__m256i *)&c[2*Zc];
+    simde__m256i *d256p = (simde__m256i *)&d[0];
     //  if (((block_length-(2*Zc))&31)>0) l1++;
     
     for (i=0;i<l1;i++)
-      for (j=0;j<impp->n_segments;j++) ((__m256i *)channel_input[j])[i] = simde_mm256_and_si256(simde_mm256_srai_epi16(c256p[i],j),masks[0]);
+      for (j=0;j<impp->n_segments;j++) ((simde__m256i *)channel_input[j])[i] = simde_mm256_and_si256(simde_mm256_srai_epi16(c256p[i],j),masks[0]);
     
     //  if ((((nrows-no_punctured_columns) * Zc-removed_bit)&31)>0) l2++;
     
     for (i1=0;i1<l2;i1++,i++)
-      for (j=0;j<impp->n_segments;j++) ((__m256i *)channel_input[j])[i] = simde_mm256_and_si256(simde_mm256_srai_epi16(d256p[i1],j),masks[0]);
+      for (j=0;j<impp->n_segments;j++) ((simde__m256i *)channel_input[j])[i] = simde_mm256_and_si256(simde_mm256_srai_epi16(d256p[i1],j),masks[0]);
   }
   else {
 #ifdef DEBUG_LDPC
diff --git a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_encoder_optim8segmulti.c b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_encoder_optim8segmulti.c
index 5df688bc5853021b7cae7176fa62bf09f5663146..8a9df739bf27b48ab82749af8b4d47051151f413 100644
--- a/openair1/PHY/CODING/nrLDPC_encoder/ldpc_encoder_optim8segmulti.c
+++ b/openair1/PHY/CODING/nrLDPC_encoder/ldpc_encoder_optim8segmulti.c
@@ -62,11 +62,11 @@ int nrLDPC_encod(unsigned char **input,unsigned char **output,int Zc,int Kb,shor
   ///printf("macro_segment: %d\n", macro_segment);
   ///printf("macro_segment_end: %d\n", macro_segment_end );
 
-  __m256i shufmask = simde_mm256_set_epi64x(0x0303030303030303, 0x0202020202020202,0x0101010101010101, 0x0000000000000000);
-  __m256i andmask  = simde_mm256_set1_epi64x(0x0102040810204080);  // every 8 bits -> 8 bytes, pattern repeats.
-  __m256i zero256   = simde_mm256_setzero_si256();
-  __m256i masks[8];
-  register __m256i c256;
+  simde__m256i shufmask = simde_mm256_set_epi64x(0x0303030303030303, 0x0202020202020202,0x0101010101010101, 0x0000000000000000);
+  simde__m256i andmask  = simde_mm256_set1_epi64x(0x0102040810204080);  // every 8 bits -> 8 bytes, pattern repeats.
+  simde__m256i zero256   = simde_mm256_setzero_si256();
+  simde__m256i masks[8];
+  register simde__m256i c256;
   masks[0] = simde_mm256_set1_epi8(0x1);
   masks[1] = simde_mm256_set1_epi8(0x2);
   masks[2] = simde_mm256_set1_epi8(0x4);
@@ -97,11 +97,10 @@ int nrLDPC_encod(unsigned char **input,unsigned char **output,int Zc,int Kb,shor
   LOG_D(PHY,"ldpc_encoder_optim_8seg: PDU (seg 0) %x %x %x %x\n",input[0][0],input[0][1],input[0][2],input[0][3]);
 #endif
 
-  AssertFatal(Zc>0,"no valid Zc found for block length %d\n",block_length);
-
+  AssertFatal(Zc > 0, "no valid Zc found for block length %d\n", block_length);
   if ((Zc&31) > 0) simd_size = 16;
-  else          simd_size = 32;
-
+  else
+    simd_size = 32;
   unsigned char cc[22*Zc] __attribute__((aligned(32))); //padded input, unpacked, max size
   unsigned char dd[46*Zc] __attribute__((aligned(32))); //coded parity part output, unpacked, max size
 
@@ -132,7 +131,7 @@ int nrLDPC_encod(unsigned char **input,unsigned char **output,int Zc,int Kb,shor
     for (int j=macro_segment+1; j < macro_segment_end; j++) {    
       c256 = simde_mm256_or_si256(simde_mm256_and_si256(simde_mm256_cmpeq_epi8(simde_mm256_andnot_si256(simde_mm256_shuffle_epi8(simde_mm256_set1_epi32(((uint32_t*)input[j])[i]), shufmask),andmask),zero256),masks[j-macro_segment]),c256);
     }
-    ((__m256i *)cc)[i] = c256;
+    ((simde__m256i *)cc)[i] = c256;
   }
 
   for (int i=(block_length>>5)<<5;i<block_length;i++) {
@@ -148,7 +147,7 @@ int nrLDPC_encod(unsigned char **input,unsigned char **output,int Zc,int Kb,shor
 
   if(impp->tinput != NULL) stop_meas(impp->tinput);
 
-  if ((BG==1 && Zc>176) || (BG==2 && Zc>64)) {
+  if ((BG==1 && Zc>=176) || (BG==2 && Zc>=64)) {
     // extend matrix
     if(impp->tprep != NULL) start_meas(impp->tprep);
     if(impp->tprep != NULL) stop_meas(impp->tprep);
@@ -174,20 +173,20 @@ int nrLDPC_encod(unsigned char **input,unsigned char **output,int Zc,int Kb,shor
     //AssertFatal(((block_length-(2*Zc))&31) == 0,"block_length-(2*Zc) needs to be a multiple of 32 for now\n");
     uint32_t l1 = (block_length-(2*Zc))>>5;
     uint32_t l2 = ((nrows-no_punctured_columns) * Zc-removed_bit)>>5;
-    __m256i *c256p = (__m256i *)&cc[2*Zc];
-    __m256i *d256p = (__m256i *)&dd[0];
+    simde__m256i *c256p = (simde__m256i *)&cc[2*Zc];
+    simde__m256i *d256p = (simde__m256i *)&dd[0];
     //  if (((block_length-(2*Zc))&31)>0) l1++;
 
     for (int i=0;i<l1;i++)
-      //for (j=0;j<n_segments;j++) ((__m256i *)output[j])[i] = simde_mm256_and_si256(simde_mm256_srai_epi16(c256p[i],j),masks[0]);
-    	for (int j=macro_segment; j < macro_segment_end; j++) ((__m256i *)output[j])[i] = simde_mm256_and_si256(simde_mm256_srai_epi16(c256p[i],j-macro_segment),masks[0]);
+      //for (j=0;j<n_segments;j++) ((simde__m256i *)output[j])[i] = simde_mm256_and_si256(simde_mm256_srai_epi16(c256p[i],j),masks[0]);
+    	for (int j=macro_segment; j < macro_segment_end; j++) ((simde__m256i *)output[j])[i] = simde_mm256_and_si256(simde_mm256_srai_epi16(c256p[i],j-macro_segment),masks[0]);
 
 
     //  if ((((nrows-no_punctured_columns) * Zc-removed_bit)&31)>0) l2++;
 
     for (int i1=0, i=l1;i1<l2;i1++,i++)
-      //for (j=0;j<n_segments;j++) ((__m256i *)output[j])[i] = simde_mm256_and_si256(simde_mm256_srai_epi16(d256p[i1],j),masks[0]);
-    	for (int j=macro_segment; j < macro_segment_end; j++)  ((__m256i *)output[j])[i] = simde_mm256_and_si256(simde_mm256_srai_epi16(d256p[i1],j-macro_segment),masks[0]);
+      //for (j=0;j<n_segments;j++) ((simde__m256i *)output[j])[i] = simde_mm256_and_si256(simde_mm256_srai_epi16(d256p[i1],j),masks[0]);
+    	for (int j=macro_segment; j < macro_segment_end; j++)  ((simde__m256i *)output[j])[i] = simde_mm256_and_si256(simde_mm256_srai_epi16(d256p[i1],j-macro_segment),masks[0]);
   }
   else {
 #ifdef DEBUG_LDPC
diff --git a/openair1/PHY/CODING/nrPolar_tools/nr_polar_decoding_tools.c b/openair1/PHY/CODING/nrPolar_tools/nr_polar_decoding_tools.c
index 7fd8d351ffd18ff6631914008e72a788a4c0c602..6ae6c46498cd197122799e9bcb6c3df502912d2d 100644
--- a/openair1/PHY/CODING/nrPolar_tools/nr_polar_decoding_tools.c
+++ b/openair1/PHY/CODING/nrPolar_tools/nr_polar_decoding_tools.c
@@ -233,18 +233,6 @@ void build_decoder_tree(t_nrPolar_params *polarParams)
 #endif
 }
 
-#if defined(__arm__) || defined(__aarch64__)
-// translate 1-1 SIMD functions from SSE to NEON
-#define __m128i int16x8_t
-#define __m64 int8x8_t
-#define _mm_abs_epi16(a) vabsq_s16(a)
-#define _mm_min_epi16(a,b) vminq_s16(a,b)
-#define _mm_subs_epi16(a,b) vsubq_s16(a,b)
-#define _mm_abs_pi16(a) vabs_s16(a)
-#define _mm_min_pi16(a,b) vmin_s16(a,b)
-#define _mm_subs_pi16(a,b) vsub_s16(a,b)
-#endif
-
 void applyFtoleft(const t_nrPolar_params *pp, decoder_node_t *node) {
   int16_t *alpha_v=node->alpha;
   int16_t *alpha_l=node->left->alpha;
@@ -263,36 +251,36 @@ void applyFtoleft(const t_nrPolar_params *pp, decoder_node_t *node) {
   if (node->left->all_frozen == 0) {
     int avx2mod = (node->Nv/2)&15;
     if (avx2mod == 0) {
-      __m256i a256,b256,absa256,absb256,minabs256;
+      simde__m256i a256,b256,absa256,absb256,minabs256;
       int avx2len = node->Nv/2/16;
 
       //      printf("avx2len %d\n",avx2len);
       for (int i=0;i<avx2len;i++) {
-	a256       =((__m256i*)alpha_v)[i];
-	b256       =((__m256i*)alpha_v)[i+avx2len];
+	a256       =((simde__m256i*)alpha_v)[i];
+	b256       =((simde__m256i*)alpha_v)[i+avx2len];
 	absa256    =simde_mm256_abs_epi16(a256);
 	absb256    =simde_mm256_abs_epi16(b256);
 	minabs256  =simde_mm256_min_epi16(absa256,absb256);
-	((__m256i*)alpha_l)[i] =simde_mm256_sign_epi16(minabs256,simde_mm256_sign_epi16(a256,b256));
+	((simde__m256i*)alpha_l)[i] =simde_mm256_sign_epi16(minabs256,simde_mm256_sign_epi16(a256,b256));
       }
     }
     else if (avx2mod == 8) {
-      __m128i a128,b128,absa128,absb128,minabs128;
-      a128       =*((__m128i*)alpha_v);
-      b128       =((__m128i*)alpha_v)[1];
-      absa128    =_mm_abs_epi16(a128);
-      absb128    =_mm_abs_epi16(b128);
-      minabs128  =_mm_min_epi16(absa128,absb128);
-      *((__m128i*)alpha_l) =_mm_sign_epi16(minabs128,_mm_sign_epi16(a128,b128));
+      simde__m128i a128,b128,absa128,absb128,minabs128;
+      a128       =*((simde__m128i*)alpha_v);
+      b128       =((simde__m128i*)alpha_v)[1];
+      absa128    =simde_mm_abs_epi16(a128);
+      absb128    =simde_mm_abs_epi16(b128);
+      minabs128  =simde_mm_min_epi16(absa128,absb128);
+      *((simde__m128i*)alpha_l) =simde_mm_sign_epi16(minabs128,simde_mm_sign_epi16(a128,b128));
     }
     else if (avx2mod == 4) {
-      __m64 a64,b64,absa64,absb64,minabs64;
-      a64       =*((__m64*)alpha_v);
-      b64       =((__m64*)alpha_v)[1];
-      absa64    =_mm_abs_pi16(a64);
-      absb64    =_mm_abs_pi16(b64);
-      minabs64  =_mm_min_pi16(absa64,absb64);
-      *((__m64*)alpha_l) =_mm_sign_pi16(minabs64,_mm_sign_pi16(a64,b64));
+      simde__m64 a64,b64,absa64,absb64,minabs64;
+      a64       =*((simde__m64*)alpha_v);
+      b64       =((simde__m64*)alpha_v)[1];
+      absa64    =simde_mm_abs_pi16(a64);
+      absb64    =simde_mm_abs_pi16(b64);
+      minabs64  =simde_mm_min_pi16(absa64,absb64);
+      *((simde__m64*)alpha_l) =simde_mm_sign_pi16(minabs64,simde_mm_sign_pi16(a64,b64));
     }
     else
     { // equivalent scalar code to above, activated only on non x86/ARM architectures
@@ -338,17 +326,17 @@ void applyGtoright(const t_nrPolar_params *pp,decoder_node_t *node) {
       int avx2len = node->Nv/2/16;
       
       for (int i=0;i<avx2len;i++) {
-	((__m256i *)alpha_r)[i] = 
-	  simde_mm256_subs_epi16(((__m256i *)alpha_v)[i+avx2len],
-			    simde_mm256_sign_epi16(((__m256i *)alpha_v)[i],
-					      ((__m256i *)betal)[i]));	
+	((simde__m256i *)alpha_r)[i] = 
+	  simde_mm256_subs_epi16(((simde__m256i *)alpha_v)[i+avx2len],
+			    simde_mm256_sign_epi16(((simde__m256i *)alpha_v)[i],
+					      ((simde__m256i *)betal)[i]));	
       }
     }
     else if (avx2mod == 8) {
-      ((__m128i *)alpha_r)[0] = _mm_subs_epi16(((__m128i *)alpha_v)[1],_mm_sign_epi16(((__m128i *)alpha_v)[0],((__m128i *)betal)[0]));	
+      ((simde__m128i *)alpha_r)[0] = simde_mm_subs_epi16(((simde__m128i *)alpha_v)[1],simde_mm_sign_epi16(((simde__m128i *)alpha_v)[0],((simde__m128i *)betal)[0]));	
     }
     else if (avx2mod == 4) {
-      ((__m64 *)alpha_r)[0] = _mm_subs_pi16(((__m64 *)alpha_v)[1],_mm_sign_pi16(((__m64 *)alpha_v)[0],((__m64 *)betal)[0]));	
+      ((simde__m64 *)alpha_r)[0] = simde_mm_subs_pi16(((simde__m64 *)alpha_v)[1],simde_mm_sign_pi16(((simde__m64 *)alpha_v)[0],((simde__m64 *)betal)[0]));	
     }
     else
       {
@@ -386,21 +374,21 @@ void computeBeta(const t_nrPolar_params *pp,decoder_node_t *node) {
 #endif
   if (node->left->all_frozen==0) { // if left node is not aggregation of frozen bits
     int avx2mod = (node->Nv/2)&15;
-    register __m256i allones=*((__m256i*)all1);
+    register simde__m256i allones=*((simde__m256i*)all1);
     if (avx2mod == 0) {
       int avx2len = node->Nv/2/16;
       for (int i=0;i<avx2len;i++) {
-	((__m256i*)betav)[i] = simde_mm256_or_si256(simde_mm256_cmpeq_epi16(((__m256i*)betar)[i],
-								  ((__m256i*)betal)[i]),allones);
+	((simde__m256i*)betav)[i] = simde_mm256_or_si256(simde_mm256_cmpeq_epi16(((simde__m256i*)betar)[i],
+								  ((simde__m256i*)betal)[i]),allones);
       }
     }
     else if (avx2mod == 8) {
-      ((__m128i*)betav)[0] = _mm_or_si128(_mm_cmpeq_epi16(((__m128i*)betar)[0],
-							  ((__m128i*)betal)[0]),*((__m128i*)all1));
+      ((simde__m128i*)betav)[0] = simde_mm_or_si128(simde_mm_cmpeq_epi16(((simde__m128i*)betar)[0],
+							  ((simde__m128i*)betal)[0]),*((simde__m128i*)all1));
     }
     else if (avx2mod == 4) {
-      ((__m64*)betav)[0] = _mm_or_si64(_mm_cmpeq_pi16(((__m64*)betar)[0],
-						      ((__m64*)betal)[0]),*((__m64*)all1));
+      ((simde__m64*)betav)[0] = simde_mm_or_si64(simde_mm_cmpeq_pi16(((simde__m64*)betar)[0],
+						      ((simde__m64*)betal)[0]),*((simde__m64*)all1));
     }
     else
       {
@@ -428,4 +416,3 @@ void generic_polar_decoder(const t_nrPolar_params *pp,decoder_node_t *node) {
   computeBeta(pp, node);
 
 } 
-
diff --git a/openair1/PHY/CODING/nrPolar_tools/nr_polar_kernal_operation.c b/openair1/PHY/CODING/nrPolar_tools/nr_polar_kernal_operation.c
index c826a523df792308c2f821cb4dc0de733d8c40c7..81e7bb99f778b42ec84b338e3fb692d9bc9e8699 100644
--- a/openair1/PHY/CODING/nrPolar_tools/nr_polar_kernal_operation.c
+++ b/openair1/PHY/CODING/nrPolar_tools/nr_polar_kernal_operation.c
@@ -11,8 +11,8 @@ void nr_polar_kernal_operation(uint8_t *u, uint8_t *d, uint16_t N)
 	
 	uint32_t i,j;
 
-	__m256i A,B,C,D,E,U,zerosOnly, OUT;
-	__m256i inc;
+	simde__m256i A,B,C,D,E,U,zerosOnly, OUT;
+	simde__m256i inc;
 	uint32_t dTest[8];
 	uint32_t uArray[8];
 	uint32_t k;	
@@ -21,7 +21,7 @@ void nr_polar_kernal_operation(uint8_t *u, uint8_t *d, uint16_t N)
 	//initialisation
 	for(k=0; k<8; k++)
 		incArray[k]=k;
-	inc=simde_mm256_loadu_si256((__m256i const*)incArray); // 0, 1, ..., 7 to increase
+	inc=simde_mm256_loadu_si256((simde__m256i const*)incArray); // 0, 1, ..., 7 to increase
 	
 	zerosOnly=simde_mm256_setzero_si256(); // for comparison
 
@@ -38,7 +38,7 @@ void nr_polar_kernal_operation(uint8_t *u, uint8_t *d, uint16_t N)
 			A=simde_mm256_sub_epi32(A, B); //(j-i), (j-(i+1)), ... (j-(i+7))  
 			
 			U=simde_mm256_set1_epi32((int)u[j]);
-			simde_mm256_storeu_si256((__m256i*)uArray, U); //u(j) ... u(j) for the maskload
+			simde_mm256_storeu_si256((simde__m256i*)uArray, U); //u(j) ... u(j) for the maskload
 
 			C=simde_mm256_and_si256(A, B); //(j-i)&i -> If zero, then XOR with the u(j)
 			D=simde_mm256_cmpeq_epi32(C, zerosOnly); // compare with zero and use the result as mask
@@ -47,7 +47,7 @@ void nr_polar_kernal_operation(uint8_t *u, uint8_t *d, uint16_t N)
 			OUT=simde_mm256_xor_si256(OUT, E); //32 bit x 8
 
 		}
-		simde_mm256_storeu_si256((__m256i*)dTest, OUT);
+		simde_mm256_storeu_si256((simde__m256i*)dTest, OUT);
 
 		for(k=0; k<8; k++) // Conversion from 32 bits to 8 bits
                 {	
diff --git a/openair1/PHY/CODING/nrSmallBlock/decodeSmallBlock.c b/openair1/PHY/CODING/nrSmallBlock/decodeSmallBlock.c
index 53b45894bd510b80e8978d2d821e456a8b8b9d78..db619bffcb5b31518ab0152f56687de0a2946406 100644
--- a/openair1/PHY/CODING/nrSmallBlock/decodeSmallBlock.c
+++ b/openair1/PHY/CODING/nrSmallBlock/decodeSmallBlock.c
@@ -54,9 +54,9 @@ uint16_t decodeSmallBlock(int8_t *in, uint8_t len){
 				Rhat[j] += in[k] * hadamard32InterleavedTransposed[j][k];
 
 		for (int i = 0; i < NR_SMALL_BLOCK_CODED_BITS; i += 16) {
-			__m256i a15_a0 = simde_mm256_loadu_si256((__m256i*)&Rhat[i]);
+			simde__m256i a15_a0 = simde_mm256_loadu_si256((simde__m256i*)&Rhat[i]);
 			a15_a0 = simde_mm256_abs_epi16(a15_a0);
-			simde_mm256_storeu_si256((__m256i*)(&Rhatabs[i]), a15_a0);
+			simde_mm256_storeu_si256((simde__m256i*)(&Rhatabs[i]), a15_a0);
 		}
 		maxVal = Rhatabs[0];
 		for (int k = 1; k < jmax; ++k){
@@ -82,23 +82,20 @@ uint16_t decodeSmallBlock(int8_t *in, uint8_t len){
 #if !defined(__AVX512F__)
 		int8_t DmatrixElement[NR_SMALL_BLOCK_CODED_BITS] = {0};
 #endif		
-		__m256i _in_256 = simde_mm256_loadu_si256 ((__m256i*)&in[0]);
-		__m256i _maskD_256, _Dmatrixj_256, _maskH_256, _DmatrixElement_256;
+		simde__m256i _in_256 = simde_mm256_loadu_si256 ((simde__m256i*)&in[0]);
+		simde__m256i _maskD_256, _Dmatrixj_256, _maskH_256, _DmatrixElement_256;
 		for (int j = 0; j < ( 1<<(len-6) ); ++j) {
-			_maskD_256 = simde_mm256_loadu_si256 ((__m256i*)(&maskD[j][0]));
+			_maskD_256 = simde_mm256_loadu_si256 ((simde__m256i*)(&maskD[j][0]));
 			_Dmatrixj_256 = simde_mm256_sign_epi8 (_in_256, _maskD_256);
 			for (int k = 0; k < NR_SMALL_BLOCK_CODED_BITS; ++k) {
-				_maskH_256 = simde_mm256_loadu_si256 ((__m256i*)(&hadamard32InterleavedTransposed[k][0]));
+				_maskH_256 = simde_mm256_loadu_si256 ((simde__m256i*)(&hadamard32InterleavedTransposed[k][0]));
 				_DmatrixElement_256 = simde_mm256_sign_epi8 (_Dmatrixj_256, _maskH_256);
 #if defined(__AVX512F__)
-			    DmatrixElementVal = _mm512_reduce_add_epi32 (
-			    		            _mm512_add_epi32(
-			    				    _mm512_cvtepi8_epi32 (simde_mm256_extracti128_si256 (_DmatrixElement_256, 1)),
-								    _mm512_cvtepi8_epi32 (simde_mm256_castsi256_si128 (_DmatrixElement_256))
-			    		            				)
-															);
+        DmatrixElementVal = simde_mm512_reduce_add_epi32(
+            simde_mm512_add_epi32(simde_mm512_cvtepi8_epi32(simde_mm256_extracti128_si256(_DmatrixElement_256, 1)),
+                                  simde_mm512_cvtepi8_epi32(simde_mm256_castsi256_si128(_DmatrixElement_256))));
 #else
-				simde_mm256_storeu_si256((__m256i*)(&DmatrixElement[0]), _DmatrixElement_256);
+				simde_mm256_storeu_si256((simde__m256i*)(&DmatrixElement[0]), _DmatrixElement_256);
 				for (int i = 0; i < NR_SMALL_BLOCK_CODED_BITS; ++i)
 					DmatrixElementVal += DmatrixElement[i];
 #endif
diff --git a/openair1/PHY/CODING/nr_rate_matching.c b/openair1/PHY/CODING/nr_rate_matching.c
index bdcbaef77639c3383f086df4eabfbc912927e1dc..5a6d2885a5e9086c4ea2f02236c49d48181748cb 100644
--- a/openair1/PHY/CODING/nr_rate_matching.c
+++ b/openair1/PHY/CODING/nr_rate_matching.c
@@ -42,18 +42,18 @@ void nr_interleaving_ldpc(uint32_t E, uint8_t Qm, uint8_t *e,uint8_t *f)
   uint8_t *e0,*e1,*e2,*e3,*e4,*e5,*e6,*e7;
   uint8_t *fp;
 #if 0 //def __WASAVX2__
-  __m256i tmp0,tmp1,tmp2,tmp0b,tmp1b,tmp3,tmp4,tmp5;
-  __m256i *e0_256,*e1_256,*e2_256,*e3_256,*e4_256,*e5_256,*e6_256,*e7_256;
+  simde__m256i tmp0,tmp1,tmp2,tmp0b,tmp1b,tmp3,tmp4,tmp5;
+  simde__m256i *e0_256,*e1_256,*e2_256,*e3_256,*e4_256,*e5_256,*e6_256,*e7_256;
 
-  __m256i *f_256=(__m256i *)f;
+  simde__m256i *f_256=(simde__m256i *)f;
 
   uint8_t *fp2;
   switch(Qm) {
   case 2:
     e0=e;
     e1=e0+EQm;
-    e0_256=(__m256i *)e0;
-    e1_256=(__m256i *)e1;
+    e0_256=(simde__m256i *)e0;
+    e1_256=(simde__m256i *)e1;
     for (int k=0,j=0;j<EQm>>5;j++,k+=2) {
       f_256[k]   = simde_mm256_unpacklo_epi8(e0_256[j],e1_256[j]);
       f_256[k+1] = simde_mm256_unpackhi_epi8(e0_256[j],e1_256[j]); 
@@ -64,10 +64,10 @@ void nr_interleaving_ldpc(uint32_t E, uint8_t Qm, uint8_t *e,uint8_t *f)
     e1=e0+EQm;
     e2=e1+EQm;
     e3=e2+EQm;
-    e0_256=(__m256i *)e0;
-    e1_256=(__m256i *)e1;
-    e2_256=(__m256i *)e2;
-    e3_256=(__m256i *)e3;
+    e0_256=(simde__m256i *)e0;
+    e1_256=(simde__m256i *)e1;
+    e2_256=(simde__m256i *)e2;
+    e3_256=(simde__m256i *)e3;
     for (int k=0,j=0;j<EQm>>5;j++,k+=4) {
       tmp0   = simde_mm256_unpacklo_epi8(e0_256[j],e1_256[j]); // e0(i) e1(i) e0(i+1) e1(i+1) .... e0(i+15) e1(i+15)
       tmp1   = simde_mm256_unpacklo_epi8(e2_256[j],e3_256[j]); // e2(i) e3(i) e2(i+1) e3(i+1) .... e2(i+15) e3(i+15)
@@ -86,12 +86,12 @@ void nr_interleaving_ldpc(uint32_t E, uint8_t Qm, uint8_t *e,uint8_t *f)
     e3=e2+EQm;
     e4=e3+EQm;
     e5=e4+EQm;
-    e0_256=(__m256i *)e0;
-    e1_256=(__m256i *)e1;
-    e2_256=(__m256i *)e2;
-    e3_256=(__m256i *)e3;
-    e4_256=(__m256i *)e4;
-    e5_256=(__m256i *)e5;
+    e0_256=(simde__m256i *)e0;
+    e1_256=(simde__m256i *)e1;
+    e2_256=(simde__m256i *)e2;
+    e3_256=(simde__m256i *)e3;
+    e4_256=(simde__m256i *)e4;
+    e5_256=(simde__m256i *)e5;
 
     for (int j=0,k=0;j<EQm>>5;j++,k+=192) {
       fp  = f+k;
@@ -186,14 +186,14 @@ void nr_interleaving_ldpc(uint32_t E, uint8_t Qm, uint8_t *e,uint8_t *f)
     e6=e5+EQm;
     e7=e6+EQm;
 
-    e0_256=(__m256i *)e0;
-    e1_256=(__m256i *)e1;
-    e2_256=(__m256i *)e2;
-    e3_256=(__m256i *)e3;
-    e4_256=(__m256i *)e4;
-    e5_256=(__m256i *)e5;
-    e6_256=(__m256i *)e6;
-    e7_256=(__m256i *)e7;
+    e0_256=(simde__m256i *)e0;
+    e1_256=(simde__m256i *)e1;
+    e2_256=(simde__m256i *)e2;
+    e3_256=(simde__m256i *)e3;
+    e4_256=(simde__m256i *)e4;
+    e5_256=(simde__m256i *)e5;
+    e6_256=(simde__m256i *)e6;
+    e7_256=(simde__m256i *)e7;
     for (int k=0,j=0;j<EQm>>5;j++,k+=8) {
       tmp0   = simde_mm256_unpacklo_epi8(e0_256[j],e1_256[j]); // e0(i) e1(i) e0(i+1) e1(i+1) .... e0(i+15) e1(i+15)
       tmp1   = simde_mm256_unpacklo_epi8(e2_256[j],e3_256[j]); // e2(i) e3(i) e2(i+1) e3(i+1) .... e2(i+15) e3(i+15)
diff --git a/openair1/PHY/CODING/viterbi.c b/openair1/PHY/CODING/viterbi.c
index 6d900d452dfdf47d5280887dad0ae53f1ec340a5..cf04de3f328741a5dca56a528611bddeda0a2509 100644
--- a/openair1/PHY/CODING/viterbi.c
+++ b/openair1/PHY/CODING/viterbi.c
@@ -182,29 +182,15 @@ void phy_generate_viterbi_tables(void)
 
 void phy_viterbi_dot11_sse2(char *y,unsigned char *decoded_bytes,unsigned short n,int offset, int traceback )
 {
+  simde__m128i TB[4 * 4095 * 8]; // 4 simde__m128i per input bit (64 states, 8-bits per state = 16-way), 4095 is largest packet size
+                                 // in bytes, 8 bits/byte
 
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i  TB[4*4095*8]; // 4 __m128i per input bit (64 states, 8-bits per state = 16-way), 4095 is largest packet size in bytes, 8 bits/byte
+  simde__m128i metrics0_15, metrics16_31, metrics32_47, metrics48_63, even0_30a, even0_30b, even32_62a, even32_62b, odd1_31a,
+      odd1_31b, odd33_63a, odd33_63b, TBeven0_30, TBeven32_62, TBodd1_31, TBodd33_63;
 
-  __m128i metrics0_15,metrics16_31,metrics32_47,metrics48_63,even0_30a,even0_30b,even32_62a,even32_62b,odd1_31a,odd1_31b,odd33_63a,odd33_63b,TBeven0_30,TBeven32_62,TBodd1_31,TBodd33_63;
+  simde__m128i min_state, min_state2;
 
-  __m128i min_state,min_state2;
-
-
-  __m128i *m0_ptr,*m1_ptr,*TB_ptr = &TB[offset<<2];
-
-#elif defined(__arm__) || defined(__aarch64__)
-  uint8x16x2_t TB[2*4095*8];  // 2 int8x16_t per input bit, 8 bits / byte, 4095 is largest packet size in bytes
-
-  uint8x16_t even0_30a,even0_30b,even32_62a,even32_62b,odd1_31a,odd1_31b,odd33_63a,odd33_63b,TBeven0_30,TBeven32_62,TBodd1_31,TBodd33_63;
-  uint8x16x2_t metrics0_31,metrics32_63;
-
-  uint8x16_t min_state;
-
-  uint8x16_t *m0_ptr,*m1_ptr;
-  uint8x16x2_t *TB_ptr = &TB[offset<<1];
-
-#endif
+  simde__m128i *m0_ptr, *m1_ptr, *TB_ptr = &TB[offset << 2];
 
   char *in = y;
   unsigned char prev_state0;
@@ -214,29 +200,15 @@ void phy_viterbi_dot11_sse2(char *y,unsigned char *decoded_bytes,unsigned short
   short position;
 
   //  printf("offset %d, TB_ptr %p\n",offset,TB_ptr);
-#if defined(__x86_64__) || defined(__i386__)
   if (offset == 0) {
     // set initial metrics
 
-    metrics0_15 = _mm_cvtsi32_si128(INIT0);
-    metrics16_31 = _mm_setzero_si128();
-    metrics32_47 = _mm_setzero_si128();
-    metrics48_63 = _mm_setzero_si128();
+    metrics0_15 = simde_mm_cvtsi32_si128(INIT0);
+    metrics16_31 = simde_mm_setzero_si128();
+    metrics32_47 = simde_mm_setzero_si128();
+    metrics48_63 = simde_mm_setzero_si128();
   }
 
-#elif defined(__arm__) || defined(__aarch64__)
-  if (offset == 0) {
-    // set initial metrics
-
-    metrics0_31.val[0]  = vdupq_n_u8(0); metrics0_31.val[0] = vsetq_lane_u8(INIT0,metrics0_31.val[0],0);
-    metrics0_31.val[1]  = vdupq_n_u8(0);
-    metrics32_63.val[0] = vdupq_n_u8(0);
-    metrics32_63.val[1] = vdupq_n_u8(0);
-  }
-
-
-#endif
-
   for (position=offset; position<(offset+n); position++) {
 
     //printf("%d : (%d,%d)\n",position,in[0],in[1]);
@@ -244,43 +216,41 @@ void phy_viterbi_dot11_sse2(char *y,unsigned char *decoded_bytes,unsigned short
     // get branch metric offsets for the 64 states
     table_offset = (in[0]+8 + ((in[1]+8)<<4))<<6;
 
-#if defined(__x86_64__) || defined(__i386__)
-    m0_ptr = (__m128i *)&m0_table[table_offset];
-    m1_ptr = (__m128i *)&m1_table[table_offset];
-
+    m0_ptr = (simde__m128i *)&m0_table[table_offset];
+    m1_ptr = (simde__m128i *)&m1_table[table_offset];
 
     // even states
-    even0_30a  = _mm_adds_epu8(metrics0_15,m0_ptr[0]);
-    even32_62a = _mm_adds_epu8(metrics16_31,m0_ptr[1]);
-    even0_30b  = _mm_adds_epu8(metrics32_47,m0_ptr[2]);
-    even32_62b = _mm_adds_epu8(metrics48_63,m0_ptr[3]);
+    even0_30a = simde_mm_adds_epu8(metrics0_15, m0_ptr[0]);
+    even32_62a = simde_mm_adds_epu8(metrics16_31, m0_ptr[1]);
+    even0_30b = simde_mm_adds_epu8(metrics32_47, m0_ptr[2]);
+    even32_62b = simde_mm_adds_epu8(metrics48_63, m0_ptr[3]);
 
     // odd states
-    odd1_31a   = _mm_adds_epu8(metrics0_15,m1_ptr[0]);
-    odd33_63a  = _mm_adds_epu8(metrics16_31,m1_ptr[1]);
-    odd1_31b   = _mm_adds_epu8(metrics32_47,m1_ptr[2]);
-    odd33_63b  = _mm_adds_epu8(metrics48_63,m1_ptr[3]);
+    odd1_31a = simde_mm_adds_epu8(metrics0_15, m1_ptr[0]);
+    odd33_63a = simde_mm_adds_epu8(metrics16_31, m1_ptr[1]);
+    odd1_31b = simde_mm_adds_epu8(metrics32_47, m1_ptr[2]);
+    odd33_63b = simde_mm_adds_epu8(metrics48_63, m1_ptr[3]);
     // select maxima
-    even0_30a  = _mm_max_epu8(even0_30a,even0_30b);
-    even32_62a = _mm_max_epu8(even32_62a,even32_62b);
-    odd1_31a   = _mm_max_epu8(odd1_31a,odd1_31b);
-    odd33_63a  = _mm_max_epu8(odd33_63a,odd33_63b);
+    even0_30a = simde_mm_max_epu8(even0_30a, even0_30b);
+    even32_62a = simde_mm_max_epu8(even32_62a, even32_62b);
+    odd1_31a = simde_mm_max_epu8(odd1_31a, odd1_31b);
+    odd33_63a = simde_mm_max_epu8(odd33_63a, odd33_63b);
 
     // Traceback information
-    TBeven0_30  = _mm_cmpeq_epi8(even0_30a,even0_30b);
-    TBeven32_62 = _mm_cmpeq_epi8(even32_62a,even32_62b);
-    TBodd1_31   = _mm_cmpeq_epi8(odd1_31a,odd1_31b);
-    TBodd33_63  = _mm_cmpeq_epi8(odd33_63a,odd33_63b);
+    TBeven0_30 = simde_mm_cmpeq_epi8(even0_30a, even0_30b);
+    TBeven32_62 = simde_mm_cmpeq_epi8(even32_62a, even32_62b);
+    TBodd1_31 = simde_mm_cmpeq_epi8(odd1_31a, odd1_31b);
+    TBodd33_63 = simde_mm_cmpeq_epi8(odd33_63a, odd33_63b);
 
-    metrics0_15        = _mm_unpacklo_epi8(even0_30a ,odd1_31a);
-    metrics16_31       = _mm_unpackhi_epi8(even0_30a ,odd1_31a);
-    metrics32_47       = _mm_unpacklo_epi8(even32_62a,odd33_63a);
-    metrics48_63       = _mm_unpackhi_epi8(even32_62a,odd33_63a);
+    metrics0_15 = simde_mm_unpacklo_epi8(even0_30a, odd1_31a);
+    metrics16_31 = simde_mm_unpackhi_epi8(even0_30a, odd1_31a);
+    metrics32_47 = simde_mm_unpacklo_epi8(even32_62a, odd33_63a);
+    metrics48_63 = simde_mm_unpackhi_epi8(even32_62a, odd33_63a);
 
-    TB_ptr[0] = _mm_unpacklo_epi8(TBeven0_30,TBodd1_31);
-    TB_ptr[1] = _mm_unpackhi_epi8(TBeven0_30,TBodd1_31);
-    TB_ptr[2] = _mm_unpacklo_epi8(TBeven32_62,TBodd33_63);
-    TB_ptr[3] = _mm_unpackhi_epi8(TBeven32_62,TBodd33_63);
+    TB_ptr[0] = simde_mm_unpacklo_epi8(TBeven0_30, TBodd1_31);
+    TB_ptr[1] = simde_mm_unpackhi_epi8(TBeven0_30, TBodd1_31);
+    TB_ptr[2] = simde_mm_unpacklo_epi8(TBeven32_62, TBodd33_63);
+    TB_ptr[3] = simde_mm_unpackhi_epi8(TBeven32_62, TBodd33_63);
 
     in+=2;
     TB_ptr += 4;
@@ -289,96 +259,34 @@ void phy_viterbi_dot11_sse2(char *y,unsigned char *decoded_bytes,unsigned short
     /****************************************************
     USE SSSE instruction phminpos!!!!!!!
     ****************************************************/
-    min_state =_mm_min_epu8(metrics0_15,metrics16_31);
-    min_state =_mm_min_epu8(min_state,metrics32_47);
-    min_state =_mm_min_epu8(min_state,metrics48_63);
-
+    min_state = simde_mm_min_epu8(metrics0_15, metrics16_31);
+    min_state = simde_mm_min_epu8(min_state, metrics32_47);
+    min_state = simde_mm_min_epu8(min_state, metrics48_63);
 
     min_state2 = min_state;
-    min_state  = _mm_unpacklo_epi8(min_state,min_state);
-    min_state2 = _mm_unpackhi_epi8(min_state2,min_state2);
-    min_state  = _mm_min_epu8(min_state,min_state2);
+    min_state = simde_mm_unpacklo_epi8(min_state, min_state);
+    min_state2 = simde_mm_unpackhi_epi8(min_state2, min_state2);
+    min_state = simde_mm_min_epu8(min_state, min_state2);
 
     min_state2 = min_state;
-    min_state  = _mm_unpacklo_epi8(min_state,min_state);
-    min_state2 = _mm_unpackhi_epi8(min_state2,min_state2);
-    min_state  = _mm_min_epu8(min_state,min_state2);
+    min_state = simde_mm_unpacklo_epi8(min_state, min_state);
+    min_state2 = simde_mm_unpackhi_epi8(min_state2, min_state2);
+    min_state = simde_mm_min_epu8(min_state, min_state2);
 
     min_state2 = min_state;
-    min_state  = _mm_unpacklo_epi8(min_state,min_state);
-    min_state2 = _mm_unpackhi_epi8(min_state2,min_state2);
-    min_state  = _mm_min_epu8(min_state,min_state2);
+    min_state = simde_mm_unpacklo_epi8(min_state, min_state);
+    min_state2 = simde_mm_unpackhi_epi8(min_state2, min_state2);
+    min_state = simde_mm_min_epu8(min_state, min_state2);
 
     min_state2 = min_state;
-    min_state  = _mm_unpacklo_epi8(min_state,min_state);
-    min_state2 = _mm_unpackhi_epi8(min_state2,min_state2);
-    min_state  = _mm_min_epu8(min_state,min_state2);
-
-    metrics0_15  = _mm_subs_epu8(metrics0_15,min_state);
-    metrics16_31 = _mm_subs_epu8(metrics16_31,min_state);
-    metrics32_47 = _mm_subs_epu8(metrics32_47,min_state);
-    metrics48_63 = _mm_subs_epu8(metrics48_63,min_state);
-#elif defined(__arm__) || defined(__aarch64__)
-    m0_ptr = (uint8x16_t *)&m0_table[table_offset];
-    m1_ptr = (uint8x16_t *)&m1_table[table_offset];
-
-
-    // even states
-    even0_30a  = vqaddq_u8(metrics0_31.val[0],m0_ptr[0]);
-    even32_62a = vqaddq_u8(metrics0_31.val[1],m0_ptr[1]);
-    even0_30b  = vqaddq_u8(metrics32_63.val[0],m0_ptr[2]);
-    even32_62b = vqaddq_u8(metrics32_63.val[1],m0_ptr[3]);
-
-    // odd states
-    odd1_31a   = vqaddq_u8(metrics0_31.val[0],m1_ptr[0]);
-    odd33_63a  = vqaddq_u8(metrics0_31.val[1],m1_ptr[1]);
-    odd1_31b   = vqaddq_u8(metrics32_63.val[0],m1_ptr[2]);
-    odd33_63b  = vqaddq_u8(metrics32_63.val[1],m1_ptr[3]);
-    // select maxima
-    even0_30a  = vmaxq_u8(even0_30a,even0_30b);
-    even32_62a = vmaxq_u8(even32_62a,even32_62b);
-    odd1_31a   = vmaxq_u8(odd1_31a,odd1_31b);
-    odd33_63a  = vmaxq_u8(odd33_63a,odd33_63b);
-
-    // Traceback information
-    TBeven0_30  = vceqq_u8(even0_30a,even0_30b);
-    TBeven32_62 = vceqq_u8(even32_62a,even32_62b);
-    TBodd1_31   = vceqq_u8(odd1_31a,odd1_31b);
-    TBodd33_63  = vceqq_u8(odd33_63a,odd33_63b);
-
-    metrics0_31  = vzipq_u8(even0_30a,odd1_31a);
-    metrics32_63 = vzipq_u8(even32_62a,odd33_63a);
-
-    TB_ptr[0] = vzipq_u8(TBeven0_30,TBodd1_31);
-    TB_ptr[1] = vzipq_u8(TBeven32_62,TBodd33_63);
-
-    in+=2;
-    TB_ptr += 2;
-
-    // rescale by subtracting minimum
-    /****************************************************
-    USE SSSE instruction phminpos!!!!!!!
-    ****************************************************/
-    min_state =vminq_u8(metrics0_31.val[0],metrics0_31.val[1]);
-    min_state =vminq_u8(min_state,metrics32_63.val[0]);
-    min_state =vminq_u8(min_state,metrics32_63.val[1]);
-    // here we have 16 maximum metrics from the 64 states
-    uint8x8_t min_state2 = vpmin_u8(((uint8x8_t*)&min_state)[0],((uint8x8_t*)&min_state)[0]);
-    // now the 8 maximum in min_state2
-    min_state2 = vpmin_u8(min_state2,min_state2);
-    // now the 4 maximum in min_state2, repeated twice
-    min_state2 = vpmin_u8(min_state2,min_state2);
-    // now the 2 maximum in min_state2, repeated 4 times
-    min_state2 = vpmin_u8(min_state2,min_state2);
-    // now the 1 maximum in min_state2, repeated 8 times
-    min_state  = vcombine_u8(min_state2,min_state2);
-    // now the 1 maximum in min_state, repeated 16 times
-    metrics0_31.val[0]  = vqsubq_u8(metrics0_31.val[0],min_state);
-    metrics0_31.val[1]  = vqsubq_u8(metrics0_31.val[1],min_state);
-    metrics32_63.val[0] = vqsubq_u8(metrics32_63.val[0],min_state);
-    metrics32_63.val[1] = vqsubq_u8(metrics32_63.val[1],min_state);
-
-#endif
+    min_state = simde_mm_unpacklo_epi8(min_state, min_state);
+    min_state2 = simde_mm_unpackhi_epi8(min_state2, min_state2);
+    min_state = simde_mm_min_epu8(min_state, min_state2);
+
+    metrics0_15 = simde_mm_subs_epu8(metrics0_15, min_state);
+    metrics16_31 = simde_mm_subs_epu8(metrics16_31, min_state);
+    metrics32_47 = simde_mm_subs_epu8(metrics32_47, min_state);
+    metrics48_63 = simde_mm_subs_epu8(metrics48_63, min_state);
   }
 
   // Traceback
@@ -405,9 +313,7 @@ void phy_viterbi_dot11_sse2(char *y,unsigned char *decoded_bytes,unsigned short
     }
   }
 
-#if defined(__x86_64) || defined(__i386__)
-  _mm_empty();
-#endif
+  simde_mm_empty();
 }
 
 #ifdef TEST_DEBUG
diff --git a/openair1/PHY/CODING/viterbi_lte.c b/openair1/PHY/CODING/viterbi_lte.c
index ecb5f2271c3c9f4d09bb164af08a6842de0b0992..d3fd71bc13e504108ab26d3acc250aa07ba97f5a 100644
--- a/openair1/PHY/CODING/viterbi_lte.c
+++ b/openair1/PHY/CODING/viterbi_lte.c
@@ -97,7 +97,7 @@ void phy_generate_viterbi_tables_lte( void )
 //#define DEBUG_VITERBI
 
 #ifdef DEBUG_VITERBI
-void print_bytes(char *s,__m128i *x)
+void print_bytes(char *s, simde__m128i *x)
 {
 
   uint8_t *tempb = (uint8_t *)x;
@@ -109,7 +109,7 @@ void print_bytes(char *s,__m128i *x)
 }
 
 /*
-void print_shorts(__m128i x,char *s) {
+void print_shorts(simde__m128i x,char *s) {
 
   int16_t *tempb = (int16_t *)&x;
 
@@ -125,30 +125,14 @@ void print_shorts(__m128i x,char *s) {
 
 void phy_viterbi_lte_sse2(int8_t *y,uint8_t *decoded_bytes,uint16_t n)
 {
+  simde__m128i TB[4 * 8192];
+  simde__m128i *m0_ptr, *m1_ptr, *TB_ptr = &TB[0];
 
+  simde__m128i metrics0_15, metrics16_31, metrics32_47, metrics48_63, even0_30a, even0_30b, even32_62a, even32_62b, odd1_31a,
+      odd1_31b, odd33_63a, odd33_63b, TBeven0_30, TBeven32_62, TBodd1_31, TBodd33_63;
 
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i  TB[4*8192];
-  __m128i *m0_ptr,*m1_ptr,*TB_ptr = &TB[0];
-  
-  __m128i metrics0_15,metrics16_31,metrics32_47,metrics48_63,even0_30a,even0_30b,even32_62a,even32_62b,odd1_31a,odd1_31b,odd33_63a,odd33_63b,TBeven0_30,TBeven32_62,TBodd1_31,
-    TBodd33_63;
-  
-  __m128i min_state,min_state2;
+  simde__m128i min_state, min_state2;
 
-#elif defined(__arm__) || defined(__aarch64__)
-  uint8x16x2_t TB[2*8192];  // 2 int8x16_t per input bit, 8 bits / byte, 8192 is largest packet size in bits
-
-  uint8x16_t even0_30a,even0_30b,even32_62a,even32_62b,odd1_31a,odd1_31b,odd33_63a,odd33_63b,TBeven0_30,TBeven32_62,TBodd1_31,TBodd33_63;
-  uint8x16x2_t metrics0_31,metrics32_63;
-
-  uint8x16_t min_state;
-
-  uint8x16_t *m0_ptr,*m1_ptr;
-  uint8x16x2_t *TB_ptr = &TB[0];
-
-
-#endif
   int8_t *in = y;
   uint8_t prev_state0,maxm,s;
   static uint8_t *TB_ptr2;
@@ -159,18 +143,10 @@ void phy_viterbi_lte_sse2(int8_t *y,uint8_t *decoded_bytes,uint16_t n)
   // set initial metrics
   //debug_msg("Doing viterbi\n");
 
-#if defined(__x86_64__) || defined(__i386__)
-
-  metrics0_15  = _mm_setzero_si128();
-  metrics16_31 = _mm_setzero_si128();
-  metrics32_47 = _mm_setzero_si128();
-  metrics48_63 = _mm_setzero_si128();
-#elif defined(__arm__) || defined(__aarch64__)
-    metrics0_31.val[0]  = vdupq_n_u8(0); 
-    metrics0_31.val[1]  = vdupq_n_u8(0);
-    metrics32_63.val[0] = vdupq_n_u8(0);
-    metrics32_63.val[1] = vdupq_n_u8(0);
-#endif
+  metrics0_15 = simde_mm_setzero_si128();
+  metrics16_31 = simde_mm_setzero_si128();
+  metrics32_47 = simde_mm_setzero_si128();
+  metrics48_63 = simde_mm_setzero_si128();
 
   for (iter=0; iter<2; iter++) {
     in = y;
@@ -182,47 +158,44 @@ void phy_viterbi_lte_sse2(int8_t *y,uint8_t *decoded_bytes,uint16_t n)
       // get branch metric offsets for the 64 states
       table_offset = (in[0]+8 + ((in[1]+8)<<4) + ((in[2]+8)<<8))<<6;
 
-#if defined(__x86_64__) || defined(__i386__)
-      m0_ptr = (__m128i *)&m0_table[table_offset];
-      m1_ptr = (__m128i *)&m1_table[table_offset];
+      m0_ptr = (simde__m128i *)&m0_table[table_offset];
+      m1_ptr = (simde__m128i *)&m1_table[table_offset];
 
       // even states
-      even0_30a  = _mm_adds_epu8(metrics0_15,m0_ptr[0]);
-      even32_62a = _mm_adds_epu8(metrics16_31,m0_ptr[1]);
-      even0_30b  = _mm_adds_epu8(metrics32_47,m0_ptr[2]);
-      even32_62b = _mm_adds_epu8(metrics48_63,m0_ptr[3]);
-
+      even0_30a = simde_mm_adds_epu8(metrics0_15, m0_ptr[0]);
+      even32_62a = simde_mm_adds_epu8(metrics16_31, m0_ptr[1]);
+      even0_30b = simde_mm_adds_epu8(metrics32_47, m0_ptr[2]);
+      even32_62b = simde_mm_adds_epu8(metrics48_63, m0_ptr[3]);
 
       // odd states
-      odd1_31a   = _mm_adds_epu8(metrics0_15,m1_ptr[0]);
-      odd33_63a  = _mm_adds_epu8(metrics16_31,m1_ptr[1]);
-      odd1_31b   = _mm_adds_epu8(metrics32_47,m1_ptr[2]);
-      odd33_63b  = _mm_adds_epu8(metrics48_63,m1_ptr[3]);
+      odd1_31a = simde_mm_adds_epu8(metrics0_15, m1_ptr[0]);
+      odd33_63a = simde_mm_adds_epu8(metrics16_31, m1_ptr[1]);
+      odd1_31b = simde_mm_adds_epu8(metrics32_47, m1_ptr[2]);
+      odd33_63b = simde_mm_adds_epu8(metrics48_63, m1_ptr[3]);
 
       // select maxima
 
-      even0_30a  = _mm_max_epu8(even0_30a,even0_30b);
-      even32_62a = _mm_max_epu8(even32_62a,even32_62b);
-      odd1_31a   = _mm_max_epu8(odd1_31a,odd1_31b);
-      odd33_63a  = _mm_max_epu8(odd33_63a,odd33_63b);
+      even0_30a = simde_mm_max_epu8(even0_30a, even0_30b);
+      even32_62a = simde_mm_max_epu8(even32_62a, even32_62b);
+      odd1_31a = simde_mm_max_epu8(odd1_31a, odd1_31b);
+      odd33_63a = simde_mm_max_epu8(odd33_63a, odd33_63b);
 
       // Traceback information
 
-      TBeven0_30  = _mm_cmpeq_epi8(even0_30a,even0_30b);
-      TBeven32_62 = _mm_cmpeq_epi8(even32_62a,even32_62b);
-      TBodd1_31   = _mm_cmpeq_epi8(odd1_31a,odd1_31b);
-      TBodd33_63  = _mm_cmpeq_epi8(odd33_63a,odd33_63b);
+      TBeven0_30 = simde_mm_cmpeq_epi8(even0_30a, even0_30b);
+      TBeven32_62 = simde_mm_cmpeq_epi8(even32_62a, even32_62b);
+      TBodd1_31 = simde_mm_cmpeq_epi8(odd1_31a, odd1_31b);
+      TBodd33_63 = simde_mm_cmpeq_epi8(odd33_63a, odd33_63b);
 
-      metrics0_15        = _mm_unpacklo_epi8(even0_30a ,odd1_31a);
-      metrics16_31       = _mm_unpackhi_epi8(even0_30a ,odd1_31a);
-      metrics32_47       = _mm_unpacklo_epi8(even32_62a,odd33_63a);
-      metrics48_63       = _mm_unpackhi_epi8(even32_62a,odd33_63a);
-
-      TB_ptr[0]  = _mm_unpacklo_epi8(TBeven0_30,TBodd1_31);
-      TB_ptr[1] = _mm_unpackhi_epi8(TBeven0_30,TBodd1_31);
-      TB_ptr[2] = _mm_unpacklo_epi8(TBeven32_62,TBodd33_63);
-      TB_ptr[3] = _mm_unpackhi_epi8(TBeven32_62,TBodd33_63);
+      metrics0_15 = simde_mm_unpacklo_epi8(even0_30a, odd1_31a);
+      metrics16_31 = simde_mm_unpackhi_epi8(even0_30a, odd1_31a);
+      metrics32_47 = simde_mm_unpacklo_epi8(even32_62a, odd33_63a);
+      metrics48_63 = simde_mm_unpackhi_epi8(even32_62a, odd33_63a);
 
+      TB_ptr[0] = simde_mm_unpacklo_epi8(TBeven0_30, TBodd1_31);
+      TB_ptr[1] = simde_mm_unpackhi_epi8(TBeven0_30, TBodd1_31);
+      TB_ptr[2] = simde_mm_unpacklo_epi8(TBeven32_62, TBodd33_63);
+      TB_ptr[3] = simde_mm_unpackhi_epi8(TBeven32_62, TBodd33_63);
 
       in+=3;
       TB_ptr += 4;
@@ -231,94 +204,34 @@ void phy_viterbi_lte_sse2(int8_t *y,uint8_t *decoded_bytes,uint16_t n)
       /****************************************************
       USE SSSE instruction phminpos!!!!!!!
       ****************************************************/
-      min_state =_mm_min_epu8(metrics0_15,metrics16_31);
-      min_state =_mm_min_epu8(min_state,metrics32_47);
-      min_state =_mm_min_epu8(min_state,metrics48_63);
+      min_state = simde_mm_min_epu8(metrics0_15, metrics16_31);
+      min_state = simde_mm_min_epu8(min_state, metrics32_47);
+      min_state = simde_mm_min_epu8(min_state, metrics48_63);
 
       min_state2 = min_state;
-      min_state  = _mm_unpacklo_epi8(min_state,min_state);
-      min_state2 = _mm_unpackhi_epi8(min_state2,min_state2);
-      min_state  = _mm_min_epu8(min_state,min_state2);
+      min_state = simde_mm_unpacklo_epi8(min_state, min_state);
+      min_state2 = simde_mm_unpackhi_epi8(min_state2, min_state2);
+      min_state = simde_mm_min_epu8(min_state, min_state2);
 
       min_state2 = min_state;
-      min_state  = _mm_unpacklo_epi8(min_state,min_state);
-      min_state2 = _mm_unpackhi_epi8(min_state2,min_state2);
-      min_state  = _mm_min_epu8(min_state,min_state2);
+      min_state = simde_mm_unpacklo_epi8(min_state, min_state);
+      min_state2 = simde_mm_unpackhi_epi8(min_state2, min_state2);
+      min_state = simde_mm_min_epu8(min_state, min_state2);
 
       min_state2 = min_state;
-      min_state  = _mm_unpacklo_epi8(min_state,min_state);
-      min_state2 = _mm_unpackhi_epi8(min_state2,min_state2);
-      min_state  = _mm_min_epu8(min_state,min_state2);
+      min_state = simde_mm_unpacklo_epi8(min_state, min_state);
+      min_state2 = simde_mm_unpackhi_epi8(min_state2, min_state2);
+      min_state = simde_mm_min_epu8(min_state, min_state2);
 
       min_state2 = min_state;
-      min_state  = _mm_unpacklo_epi8(min_state,min_state);
-      min_state2 = _mm_unpackhi_epi8(min_state2,min_state2);
-      min_state  = _mm_min_epu8(min_state,min_state2);
-
-      metrics0_15  = _mm_subs_epu8(metrics0_15,min_state);
-      metrics16_31 = _mm_subs_epu8(metrics16_31,min_state);
-      metrics32_47 = _mm_subs_epu8(metrics32_47,min_state);
-      metrics48_63 = _mm_subs_epu8(metrics48_63,min_state);
-#elif defined(__arm__) || defined(__aarch64__)
-    m0_ptr = (uint8x16_t *)&m0_table[table_offset];
-    m1_ptr = (uint8x16_t *)&m1_table[table_offset];
-
-
-    // even states
-    even0_30a  = vqaddq_u8(metrics0_31.val[0],m0_ptr[0]);
-    even32_62a = vqaddq_u8(metrics0_31.val[1],m0_ptr[1]);
-    even0_30b  = vqaddq_u8(metrics32_63.val[0],m0_ptr[2]);
-    even32_62b = vqaddq_u8(metrics32_63.val[1],m0_ptr[3]);
-
-    // odd states
-    odd1_31a   = vqaddq_u8(metrics0_31.val[0],m1_ptr[0]);
-    odd33_63a  = vqaddq_u8(metrics0_31.val[1],m1_ptr[1]);
-    odd1_31b   = vqaddq_u8(metrics32_63.val[0],m1_ptr[2]);
-    odd33_63b  = vqaddq_u8(metrics32_63.val[1],m1_ptr[3]);
-    // select maxima
-    even0_30a  = vmaxq_u8(even0_30a,even0_30b);
-    even32_62a = vmaxq_u8(even32_62a,even32_62b);
-    odd1_31a   = vmaxq_u8(odd1_31a,odd1_31b);
-    odd33_63a  = vmaxq_u8(odd33_63a,odd33_63b);
-
-    // Traceback information
-    TBeven0_30  = vceqq_u8(even0_30a,even0_30b);
-    TBeven32_62 = vceqq_u8(even32_62a,even32_62b);
-    TBodd1_31   = vceqq_u8(odd1_31a,odd1_31b);
-    TBodd33_63  = vceqq_u8(odd33_63a,odd33_63b);
-
-    metrics0_31  = vzipq_u8(even0_30a,odd1_31a);
-    metrics32_63 = vzipq_u8(even32_62a,odd33_63a);
-
-    TB_ptr[0] = vzipq_u8(TBeven0_30,TBodd1_31);
-    TB_ptr[1] = vzipq_u8(TBeven32_62,TBodd33_63);
-
-    in+=2;
-    TB_ptr += 2;
-
-    // rescale by subtracting minimum
-    /****************************************************
-    USE SSSE instruction phminpos!!!!!!!
-    ****************************************************/
-    min_state =vminq_u8(metrics0_31.val[0],metrics0_31.val[1]);
-    min_state =vminq_u8(min_state,metrics32_63.val[0]);
-    min_state =vminq_u8(min_state,metrics32_63.val[1]);
-    // here we have 16 maximum metrics from the 64 states
-    uint8x8_t min_state2 = vpmin_u8(((uint8x8_t*)&min_state)[0],((uint8x8_t*)&min_state)[0]);
-    // now the 8 maximum in min_state2
-    min_state2 = vpmin_u8(min_state2,min_state2);
-    // now the 4 maximum in min_state2, repeated twice
-    min_state2 = vpmin_u8(min_state2,min_state2);
-    // now the 2 maximum in min_state2, repeated 4 times
-    min_state2 = vpmin_u8(min_state2,min_state2);
-    // now the 1 maximum in min_state2, repeated 8 times
-    min_state  = vcombine_u8(min_state2,min_state2);
-    // now the 1 maximum in min_state, repeated 16 times
-    metrics0_31.val[0]  = vqsubq_u8(metrics0_31.val[0],min_state);
-    metrics0_31.val[1]  = vqsubq_u8(metrics0_31.val[1],min_state);
-    metrics32_63.val[0] = vqsubq_u8(metrics32_63.val[0],min_state);
-    metrics32_63.val[1] = vqsubq_u8(metrics32_63.val[1],min_state);
-#endif
+      min_state = simde_mm_unpacklo_epi8(min_state, min_state);
+      min_state2 = simde_mm_unpackhi_epi8(min_state2, min_state2);
+      min_state = simde_mm_min_epu8(min_state, min_state2);
+
+      metrics0_15 = simde_mm_subs_epu8(metrics0_15, min_state);
+      metrics16_31 = simde_mm_subs_epu8(metrics16_31, min_state);
+      metrics32_47 = simde_mm_subs_epu8(metrics32_47, min_state);
+      metrics48_63 = simde_mm_subs_epu8(metrics48_63, min_state);
     }
 
   } // iteration
@@ -327,7 +240,6 @@ void phy_viterbi_lte_sse2(int8_t *y,uint8_t *decoded_bytes,uint16_t n)
   prev_state0 = 0;
   maxm = 0;
 
-#if defined(__x86_64__) || defined(__i386__)
   for (s=0; s<16; s++)
     if (((uint8_t *)&metrics0_15)[s] > maxm) {
       maxm = ((uint8_t *)&metrics0_15)[s];
@@ -352,33 +264,6 @@ void phy_viterbi_lte_sse2(int8_t *y,uint8_t *decoded_bytes,uint16_t n)
       prev_state0 = s+48;
     }
 
-
-#elif defined(__arm__) || defined(__aarch64__)
-  for (s=0; s<16; s++)
-    if (((uint8_t *)&metrics0_31.val[0])[s] > maxm) {
-      maxm = ((uint8_t *)&metrics0_31.val[0])[s];
-      prev_state0 = s;
-    }
-
-  for (s=0; s<16; s++)
-    if (((uint8_t *)&metrics0_31.val[1])[s] > maxm) {
-      maxm = ((uint8_t *)&metrics0_31.val[1])[s];
-      prev_state0 = s+16;
-    }
-
-  for (s=0; s<16; s++)
-    if (((uint8_t *)&metrics32_63.val[0])[s] > maxm) {
-      maxm = ((uint8_t *)&metrics32_63.val[0])[s];
-      prev_state0 = s+32;
-    }
-
-  for (s=0; s<16; s++)
-    if (((uint8_t *)&metrics32_63.val[1])[s] > maxm) {
-      maxm = ((uint8_t *)&metrics32_63.val[1])[s];
-      prev_state0 = s+48;
-    }
-#endif
-
   TB_ptr2 = (uint8_t *)&TB[(n-1)*4];
 
   for (position = n-1 ; position>-1; position--) {
@@ -394,11 +279,8 @@ void phy_viterbi_lte_sse2(int8_t *y,uint8_t *decoded_bytes,uint16_t n)
     TB_ptr2-=64;
   }
 
-
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 #ifdef TEST_DEBUG
diff --git a/openair1/PHY/LTE_ESTIMATION/freq_equalization.c b/openair1/PHY/LTE_ESTIMATION/freq_equalization.c
index e864e1cfc7ccd7f6e721ae0b4f48763b13c3227a..2b42d68fd0a1c99b45c5b18a990904b0dabd649f 100644
--- a/openair1/PHY/LTE_ESTIMATION/freq_equalization.c
+++ b/openair1/PHY/LTE_ESTIMATION/freq_equalization.c
@@ -23,8 +23,8 @@
 #include "PHY/sse_intrin.h"
 #include "PHY/LTE_ESTIMATION/lte_estimation.h"
 
-// This is 4096/(1:4096) in __m128i format
-__m128i inv_ch[4096];/* = {512,512,512,512,512,512,512,512,
+// This is 4096/(1:4096) in simde__m128i format
+simde__m128i inv_ch[4096];/* = {512,512,512,512,512,512,512,512,
                          256,256,256,256,256,256,256,256,
                          170,170,170,170,170,170,170,170,
                          128,128,128,128,128,128,128,128,
@@ -283,7 +283,7 @@ __m128i inv_ch[4096];/* = {512,512,512,512,512,512,512,512,
                         };*/
 
 void init_fde() {
-  for (int i=1;i<4096;i++) inv_ch[i] = _mm_set1_epi16(4096/i);
+  for (int i=1;i<4096;i++) inv_ch[i] = simde_mm_set1_epi16(4096/i);
 }
 
 void freq_equalization(LTE_DL_FRAME_PARMS *frame_parms,
@@ -296,17 +296,10 @@ void freq_equalization(LTE_DL_FRAME_PARMS *frame_parms,
 {
   uint16_t re;
   int16_t amp;
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *ul_ch_mag128,*ul_ch_magb128,*rxdataF_comp128;
-  rxdataF_comp128   = (__m128i *)&rxdataF_comp[0][symbol*frame_parms->N_RB_DL*12];
-  ul_ch_mag128      = (__m128i *)&ul_ch_mag[0][symbol*frame_parms->N_RB_DL*12];
-  ul_ch_magb128      = (__m128i *)&ul_ch_magb[0][symbol*frame_parms->N_RB_DL*12];
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *ul_ch_mag128,*ul_ch_magb128,*rxdataF_comp128;
-  rxdataF_comp128   = (int16x8_t*)&rxdataF_comp[0][symbol*frame_parms->N_RB_DL*12];
-  ul_ch_mag128      = (int16x8_t*)&ul_ch_mag[0][symbol*frame_parms->N_RB_DL*12];
-  ul_ch_magb128     = (int16x8_t*)&ul_ch_magb[0][symbol*frame_parms->N_RB_DL*12];
-#endif
+  simde__m128i *ul_ch_mag128,*ul_ch_magb128,*rxdataF_comp128;
+  rxdataF_comp128   = (simde__m128i *)&rxdataF_comp[0][symbol*frame_parms->N_RB_DL*12];
+  ul_ch_mag128      = (simde__m128i *)&ul_ch_mag[0][symbol*frame_parms->N_RB_DL*12];
+  ul_ch_magb128      = (simde__m128i *)&ul_ch_magb[0][symbol*frame_parms->N_RB_DL*12];
 
   AssertFatal(symbol<frame_parms->symbols_per_tti,"symbol %d >= %d\n",
 	      symbol,frame_parms->symbols_per_tti);
@@ -320,26 +313,16 @@ void freq_equalization(LTE_DL_FRAME_PARMS *frame_parms,
     if (amp>4095)
       amp=4095;
 
-    //printf("freq_eq: symbol %d re %d => mag %d,amp %d,inv %d, prod %d (%d,%d)\n",symbol,re,*((int16_t*)(&ul_ch_mag128[re])),amp,_mm_extract_epi16(inv_ch[amp],0),(*((int16_t*)(&ul_ch_mag128[re]))*_mm_extract_epi16(inv_ch[amp],0))>>3,*(int16_t*)&(rxdataF_comp128[re]),*(1+(int16_t*)&(rxdataF_comp128[re])));
-#if defined(__x86_64__) || defined(__i386__)
-    rxdataF_comp128[re] = _mm_srai_epi16(_mm_mullo_epi16(rxdataF_comp128[re],inv_ch[amp]),3);
+    // printf("freq_eq: symbol %d re %d => mag %d,amp %d,inv %d, prod %d
+    // (%d,%d)\n",symbol,re,*((int16_t*)(&ul_ch_mag128[re])),amp,simde_mm_extract_epi16(inv_ch[amp],0),(*((int16_t*)(&ul_ch_mag128[re]))*_mm_extract_epi16(inv_ch[amp],0))>>3,*(int16_t*)&(rxdataF_comp128[re]),*(1+(int16_t*)&(rxdataF_comp128[re])));
+    rxdataF_comp128[re] = simde_mm_srai_epi16(simde_mm_mullo_epi16(rxdataF_comp128[re],inv_ch[amp]),3);
 
     if (Qm==4)
-      ul_ch_mag128[re]  = _mm_set1_epi16(324);  // this is 512*2/sqrt(10)
+      ul_ch_mag128[re]  = simde_mm_set1_epi16(324);  // this is 512*2/sqrt(10)
     else {
-      ul_ch_mag128[re]  = _mm_set1_epi16(316);  // this is 512*4/sqrt(42)
-      ul_ch_magb128[re] = _mm_set1_epi16(158);  // this is 512*2/sqrt(42)
+      ul_ch_mag128[re]  = simde_mm_set1_epi16(316);  // this is 512*4/sqrt(42)
+      ul_ch_magb128[re] = simde_mm_set1_epi16(158);  // this is 512*2/sqrt(42)
     }
-#elif defined(__arm__) || defined(__aarch64__)
-    rxdataF_comp128[re] = vmulq_s16(rxdataF_comp128[re],inv_ch[amp]);
-
-    if (Qm==4)
-      ul_ch_mag128[re]  = vdupq_n_s16(324);  // this is 512*2/sqrt(10)
-    else {
-      ul_ch_mag128[re]  = vdupq_n_s16(316);  // this is 512*4/sqrt(42)
-      ul_ch_magb128[re] = vdupq_n_s16(158);  // this is 512*2/sqrt(42)
-    }
-#endif
     //            printf("(%d,%d)\n",*(int16_t*)&(rxdataF_comp128[re]),*(1+(int16_t*)&(rxdataF_comp128[re])));
 
   }
diff --git a/openair1/PHY/LTE_ESTIMATION/lte_est_freq_offset.c b/openair1/PHY/LTE_ESTIMATION/lte_est_freq_offset.c
index 20c6ae2c0d312428e3d3694749b5cfa64b55ee66..ce221d1fb4ed269c3821b4aba9bcae4067d33152 100644
--- a/openair1/PHY/LTE_ESTIMATION/lte_est_freq_offset.c
+++ b/openair1/PHY/LTE_ESTIMATION/lte_est_freq_offset.c
@@ -28,68 +28,30 @@
 #include "PHY/defs_eNB.h"
 //#define DEBUG_PHY
 
-#if defined(__x86_64__) || defined(__i386__)
-__m128i avg128F;
-#elif defined(__arm__) || defined(__aarch64__)
-int32x4_t avg128F;
-#endif
-
 //compute average channel_level on each (TX,RX) antenna pair
 int dl_channel_level(c16_t *dl_ch, LTE_DL_FRAME_PARMS *frame_parms)
 {
-
   int16_t rb;
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *dl_ch128;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x4_t *dl_ch128;
-#endif
+  simde__m128i *dl_ch128;
   int avg;
 
   //clear average level
-#if defined(__x86_64__) || defined(__i386__)
-  avg128F = _mm_setzero_si128();
-  dl_ch128=(__m128i *)dl_ch;
+  simde__m128i avg128F = simde_mm_setzero_si128();
+  dl_ch128 = (simde__m128i *)dl_ch;
 
   for (rb=0; rb<frame_parms->N_RB_DL; rb++) {
-
-    avg128F = _mm_add_epi32(avg128F,_mm_madd_epi16(dl_ch128[0],dl_ch128[0]));
-    avg128F = _mm_add_epi32(avg128F,_mm_madd_epi16(dl_ch128[1],dl_ch128[1]));
-    avg128F = _mm_add_epi32(avg128F,_mm_madd_epi16(dl_ch128[2],dl_ch128[2]));
+    avg128F = simde_mm_add_epi32(avg128F, simde_mm_madd_epi16(dl_ch128[0], dl_ch128[0]));
+    avg128F = simde_mm_add_epi32(avg128F, simde_mm_madd_epi16(dl_ch128[1], dl_ch128[1]));
+    avg128F = simde_mm_add_epi32(avg128F, simde_mm_madd_epi16(dl_ch128[2], dl_ch128[2]));
 
     dl_ch128+=3;
-
   }
-#elif defined(__arm__) || defined(__aarch64__)
-  avg128F = vdupq_n_s32(0);
-  dl_ch128=(int16x4_t *)dl_ch;
-
-  for (rb=0; rb<frame_parms->N_RB_DL; rb++) {
-
-       avg128F = vqaddq_s32(avg128F,vmull_s16(dl_ch128[0],dl_ch128[0]));
-       avg128F = vqaddq_s32(avg128F,vmull_s16(dl_ch128[1],dl_ch128[1]));
-       avg128F = vqaddq_s32(avg128F,vmull_s16(dl_ch128[2],dl_ch128[2]));
-       avg128F = vqaddq_s32(avg128F,vmull_s16(dl_ch128[3],dl_ch128[3]));
-       avg128F = vqaddq_s32(avg128F,vmull_s16(dl_ch128[4],dl_ch128[4]));
-       avg128F = vqaddq_s32(avg128F,vmull_s16(dl_ch128[5],dl_ch128[5]));
-       dl_ch128+=6;
-
-
-  }
-
-
-#endif
   DevAssert( frame_parms->N_RB_DL );
   avg = (((int*)&avg128F)[0] +
          ((int*)&avg128F)[1] +
          ((int*)&avg128F)[2] +
          ((int*)&avg128F)[3])/(frame_parms->N_RB_DL*12);
 
-
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
   return(avg);
 }
 
diff --git a/openair1/PHY/LTE_ESTIMATION/lte_sync_timefreq.c b/openair1/PHY/LTE_ESTIMATION/lte_sync_timefreq.c
index 780c3955cf92081a3b7cbca75e851a6fc98225ac..bfda00607f8beef42dbbc65164a5b3684ceb5611 100644
--- a/openair1/PHY/LTE_ESTIMATION/lte_sync_timefreq.c
+++ b/openair1/PHY/LTE_ESTIMATION/lte_sync_timefreq.c
@@ -40,27 +40,24 @@
 #include "PHY/phy_extern_ue.h"
 #include "PHY/phy_extern.h"
 
-#if defined(__x86_64__) || defined(__i386__)
 #include "pss6144.h"
-extern void print_shorts(char*,__m128i*);
-#endif
+extern void print_shorts(char *, simde__m128i *);
 
 void lte_sync_timefreq(PHY_VARS_UE *ue,int band,unsigned int DL_freq)
 {
-#if defined(__x86_64__) || defined(__i386__)
   UE_SCAN_INFO_t *scan_info = &ue->scan_info[band];
   int16_t spectrum[12288] __attribute__((aligned(32)));
   int16_t spectrum_p5ms[12288] __attribute__((aligned(32)));
   int i,f,band_idx;
-  __m128i autocorr0[256/4],autocorr1[256/4],autocorr2[256/4];
-  __m128i autocorr0_t[256/4],autocorr1_t[256/4],autocorr2_t[256/4];
-  __m128i tmp_t[256/4];
+  simde__m128i autocorr0[256 / 4], autocorr1[256 / 4], autocorr2[256 / 4];
+  simde__m128i autocorr0_t[256 / 4], autocorr1_t[256 / 4], autocorr2_t[256 / 4];
+  simde__m128i tmp_t[256 / 4];
   int32_t *rxp;
   int16_t *sp;
-  __m128i *sp2 = NULL;
-  __m128i s;
+  simde__m128i *sp2 = NULL;
+  simde__m128i s;
   int re,re256;
-  __m128i mmtmp00,mmtmp01,mmtmp02,mmtmp10,mmtmp11,mmtmp12;
+  simde__m128i mmtmp00, mmtmp01, mmtmp02, mmtmp10, mmtmp11, mmtmp12;
   int maxcorr[3],minamp,pos=0,pssind;
   int16_t *pss6144_0 = NULL, *pss6144_1 = NULL, *pss6144_2 = NULL;
 
@@ -104,28 +101,28 @@ void lte_sync_timefreq(PHY_VARS_UE *ue,int band,unsigned int DL_freq)
             pss6144_0 = &pss6144_0_0[0];
             pss6144_1 = &pss6144_1_0[0];
             pss6144_2 = &pss6144_2_0[0];
-            sp2 = (f<0) ? (__m128i*)&sp[12288+(f<<1)] : (__m128i*)&sp[(f<<1)];
+            sp2 = (f < 0) ? (simde__m128i *)&sp[12288 + (f << 1)] : (simde__m128i *)&sp[(f << 1)];
             break;
 
           case 1:
             pss6144_0 = &pss6144_0_1[0];
             pss6144_1 = &pss6144_1_1[0];
             pss6144_2 = &pss6144_2_1[0];
-            sp2 = (f<0) ? (__m128i*)&sp[12286+(f<<1)] : (__m128i*)&sp[-2+(f<<1)];
+            sp2 = (f < 0) ? (simde__m128i *)&sp[12286 + (f << 1)] : (simde__m128i *)&sp[-2 + (f << 1)];
             break;
 
           case 2:
             pss6144_0 = &pss6144_0_2[0];
             pss6144_1 = &pss6144_1_2[0];
             pss6144_2 = &pss6144_2_2[0];
-            sp2 = (f<0) ? (__m128i*)&sp[12284+(f<<1)] : (__m128i*)&sp[-4+(f<<1)];
+            sp2 = (f < 0) ? (simde__m128i *)&sp[12284 + (f << 1)] : (simde__m128i *)&sp[-4 + (f << 1)];
             break;
 
           case 3:
             pss6144_0 = &pss6144_0_3[0];
             pss6144_1 = &pss6144_1_3[0];
             pss6144_2 = &pss6144_2_3[0];
-            sp2 = (f<0) ? (__m128i*)&sp[12282+(f<<1)] : (__m128i*)&sp[-6+(f<<1)];
+            sp2 = (f < 0) ? (simde__m128i *)&sp[12282 + (f << 1)] : (simde__m128i *)&sp[-6 + (f << 1)];
             break;
           }
 
@@ -134,20 +131,23 @@ void lte_sync_timefreq(PHY_VARS_UE *ue,int band,unsigned int DL_freq)
           for (re = 0; re<256/4; re++) {  // loop over 256 points of upsampled PSS
             //      printf("f %d, re %d\n",f,re);
             s = sp2[re];
-            mmtmp00 = _mm_srai_epi32(_mm_madd_epi16(((__m128i*)pss6144_0)[re],s),15);
-            mmtmp01 = _mm_srai_epi32(_mm_madd_epi16(((__m128i*)pss6144_1)[re],s),15);
-            mmtmp02 = _mm_srai_epi32(_mm_madd_epi16(((__m128i*)pss6144_2)[re],s),15);
-
-            s = _mm_shufflelo_epi16(s,_MM_SHUFFLE(2,3,0,1));
-            s = _mm_shufflehi_epi16(s,_MM_SHUFFLE(2,3,0,1));
-            s = _mm_sign_epi16(s,*(__m128i*)&conjugate[0]);
-            mmtmp10 = _mm_srai_epi32(_mm_madd_epi16(((__m128i*)pss6144_0)[re],s),15);
-            mmtmp11 = _mm_srai_epi32(_mm_madd_epi16(((__m128i*)pss6144_1)[re],s),15);
-            mmtmp12 = _mm_srai_epi32(_mm_madd_epi16(((__m128i*)pss6144_2)[re],s),15);
-
-            autocorr0[re256] = _mm_packs_epi32(_mm_unpacklo_epi32(mmtmp00,mmtmp10),_mm_unpackhi_epi32(mmtmp00,mmtmp10));
-            autocorr1[re256] = _mm_packs_epi32(_mm_unpacklo_epi32(mmtmp01,mmtmp11),_mm_unpackhi_epi32(mmtmp01,mmtmp11));
-            autocorr2[re256] = _mm_packs_epi32(_mm_unpacklo_epi32(mmtmp02,mmtmp12),_mm_unpackhi_epi32(mmtmp02,mmtmp12));
+            mmtmp00 = simde_mm_srai_epi32(simde_mm_madd_epi16(((simde__m128i *)pss6144_0)[re], s), 15);
+            mmtmp01 = simde_mm_srai_epi32(simde_mm_madd_epi16(((simde__m128i *)pss6144_1)[re], s), 15);
+            mmtmp02 = simde_mm_srai_epi32(simde_mm_madd_epi16(((simde__m128i *)pss6144_2)[re], s), 15);
+
+            s = simde_mm_shufflelo_epi16(s, SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+            s = simde_mm_shufflehi_epi16(s, SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+            s = simde_mm_sign_epi16(s, *(simde__m128i *)&conjugate[0]);
+            mmtmp10 = simde_mm_srai_epi32(simde_mm_madd_epi16(((simde__m128i *)pss6144_0)[re], s), 15);
+            mmtmp11 = simde_mm_srai_epi32(simde_mm_madd_epi16(((simde__m128i *)pss6144_1)[re], s), 15);
+            mmtmp12 = simde_mm_srai_epi32(simde_mm_madd_epi16(((simde__m128i *)pss6144_2)[re], s), 15);
+
+            autocorr0[re256] =
+                simde_mm_packs_epi32(simde_mm_unpacklo_epi32(mmtmp00, mmtmp10), simde_mm_unpackhi_epi32(mmtmp00, mmtmp10));
+            autocorr1[re256] =
+                simde_mm_packs_epi32(simde_mm_unpacklo_epi32(mmtmp01, mmtmp11), simde_mm_unpackhi_epi32(mmtmp01, mmtmp11));
+            autocorr2[re256] =
+                simde_mm_packs_epi32(simde_mm_unpacklo_epi32(mmtmp02, mmtmp12), simde_mm_unpackhi_epi32(mmtmp02, mmtmp12));
 
             re256 = (re256+1)&0x3f;
           }
@@ -160,28 +160,28 @@ void lte_sync_timefreq(PHY_VARS_UE *ue,int band,unsigned int DL_freq)
             pss6144_0 = &pss6144_0_0[0];
             pss6144_1 = &pss6144_1_0[0];
             pss6144_2 = &pss6144_2_0[0];
-            sp2 = (__m128i*)&sp[12288+(f<<1)];
+            sp2 = (simde__m128i *)&sp[12288 + (f << 1)];
             break;
 
           case 1:
             pss6144_0 = &pss6144_0_1[0];
             pss6144_1 = &pss6144_1_1[0];
             pss6144_2 = &pss6144_2_1[0];
-            sp2 = (__m128i*)&sp[12286+(f<<1)];
+            sp2 = (simde__m128i *)&sp[12286 + (f << 1)];
             break;
 
           case 2:
             pss6144_0 = &pss6144_0_2[0];
             pss6144_1 = &pss6144_1_2[0];
             pss6144_2 = &pss6144_2_2[0];
-            sp2 = (__m128i*)&sp[12284+(f<<1)];
+            sp2 = (simde__m128i *)&sp[12284 + (f << 1)];
             break;
 
           case 3:
             pss6144_0 = &pss6144_0_3[0];
             pss6144_1 = &pss6144_1_3[0];
             pss6144_2 = &pss6144_2_3[0];
-            sp2 = (__m128i*)&sp[12282+(f<<1)];
+            sp2 = (simde__m128i *)&sp[12282 + (f << 1)];
             break;
           }
 
@@ -191,22 +191,25 @@ void lte_sync_timefreq(PHY_VARS_UE *ue,int band,unsigned int DL_freq)
             s = sp2[re];
             /*            printf("re %d, %p\n",re,&sp2[re]);
                   print_shorts("s",&s);
-                  print_shorts("pss",&((__m128i*)pss6144_0)[re]);*/
-
-            mmtmp00 = _mm_srai_epi32(_mm_madd_epi16(((__m128i*)pss6144_0)[re],s),15);
-            mmtmp01 = _mm_srai_epi32(_mm_madd_epi16(((__m128i*)pss6144_1)[re],s),15);
-            mmtmp02 = _mm_srai_epi32(_mm_madd_epi16(((__m128i*)pss6144_2)[re],s),15);
-
-            s = _mm_shufflelo_epi16(s,_MM_SHUFFLE(2,3,0,1));
-            s = _mm_shufflehi_epi16(s,_MM_SHUFFLE(2,3,0,1));
-            s = _mm_sign_epi16(s,*(__m128i*)&conjugate[0]);
-            mmtmp10 = _mm_srai_epi32(_mm_madd_epi16(((__m128i*)pss6144_0)[re],s),15);
-            mmtmp11 = _mm_srai_epi32(_mm_madd_epi16(((__m128i*)pss6144_1)[re],s),15);
-            mmtmp12 = _mm_srai_epi32(_mm_madd_epi16(((__m128i*)pss6144_2)[re],s),15);
-
-            autocorr0[re256] = _mm_packs_epi32(_mm_unpacklo_epi32(mmtmp00,mmtmp10),_mm_unpackhi_epi32(mmtmp00,mmtmp10));
-            autocorr1[re256] = _mm_packs_epi32(_mm_unpacklo_epi32(mmtmp01,mmtmp11),_mm_unpackhi_epi32(mmtmp01,mmtmp11));
-            autocorr2[re256] = _mm_packs_epi32(_mm_unpacklo_epi32(mmtmp02,mmtmp12),_mm_unpackhi_epi32(mmtmp02,mmtmp12));
+                  print_shorts("pss",&((simde__m128i*)pss6144_0)[re]);*/
+
+            mmtmp00 = simde_mm_srai_epi32(simde_mm_madd_epi16(((simde__m128i *)pss6144_0)[re], s), 15);
+            mmtmp01 = simde_mm_srai_epi32(simde_mm_madd_epi16(((simde__m128i *)pss6144_1)[re], s), 15);
+            mmtmp02 = simde_mm_srai_epi32(simde_mm_madd_epi16(((simde__m128i *)pss6144_2)[re], s), 15);
+
+            s = simde_mm_shufflelo_epi16(s, SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+            s = simde_mm_shufflehi_epi16(s, SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+            s = simde_mm_sign_epi16(s, *(simde__m128i *)&conjugate[0]);
+            mmtmp10 = simde_mm_srai_epi32(simde_mm_madd_epi16(((simde__m128i *)pss6144_0)[re], s), 15);
+            mmtmp11 = simde_mm_srai_epi32(simde_mm_madd_epi16(((simde__m128i *)pss6144_1)[re], s), 15);
+            mmtmp12 = simde_mm_srai_epi32(simde_mm_madd_epi16(((simde__m128i *)pss6144_2)[re], s), 15);
+
+            autocorr0[re256] =
+                simde_mm_packs_epi32(simde_mm_unpacklo_epi32(mmtmp00, mmtmp10), simde_mm_unpackhi_epi32(mmtmp00, mmtmp10));
+            autocorr1[re256] =
+                simde_mm_packs_epi32(simde_mm_unpacklo_epi32(mmtmp01, mmtmp11), simde_mm_unpackhi_epi32(mmtmp01, mmtmp11));
+            autocorr2[re256] =
+                simde_mm_packs_epi32(simde_mm_unpacklo_epi32(mmtmp02, mmtmp12), simde_mm_unpackhi_epi32(mmtmp02, mmtmp12));
 
             re256 = (re256+1)&0x3f;
           }
@@ -214,7 +217,7 @@ void lte_sync_timefreq(PHY_VARS_UE *ue,int band,unsigned int DL_freq)
           // This is the +ve frequencies
 
           // align filters to 128-bit
-          sp2 = (__m128i*)&sp[0];
+          sp2 = (simde__m128i *)&sp[0];
 
           switch (f&3) {
           case 0:
@@ -246,21 +249,24 @@ void lte_sync_timefreq(PHY_VARS_UE *ue,int band,unsigned int DL_freq)
             s = sp2[re];
             /*            printf("re %d %p\n",re,&sp2[re]);
                   print_shorts("s",&s);
-                  print_shorts("pss",&((__m128i*)pss6144_0)[re]);*/
-            mmtmp00 = _mm_srai_epi32(_mm_madd_epi16(((__m128i*)pss6144_0)[re],s),15);
-            mmtmp01 = _mm_srai_epi32(_mm_madd_epi16(((__m128i*)pss6144_1)[re],s),15);
-            mmtmp02 = _mm_srai_epi32(_mm_madd_epi16(((__m128i*)pss6144_2)[re],s),15);
-
-            s = _mm_shufflelo_epi16(s,_MM_SHUFFLE(2,3,0,1));
-            s = _mm_shufflehi_epi16(s,_MM_SHUFFLE(2,3,0,1));
-            s = _mm_sign_epi16(s,*(__m128i*)&conjugate[0]);
-            mmtmp10 = _mm_srai_epi32(_mm_madd_epi16(((__m128i*)pss6144_0)[re],s),15);
-            mmtmp11 = _mm_srai_epi32(_mm_madd_epi16(((__m128i*)pss6144_1)[re],s),15);
-            mmtmp12 = _mm_srai_epi32(_mm_madd_epi16(((__m128i*)pss6144_2)[re],s),15);
-
-            autocorr0[re256] = _mm_packs_epi32(_mm_unpacklo_epi32(mmtmp00,mmtmp10),_mm_unpackhi_epi32(mmtmp00,mmtmp10));
-            autocorr1[re256] = _mm_packs_epi32(_mm_unpacklo_epi32(mmtmp01,mmtmp11),_mm_unpackhi_epi32(mmtmp01,mmtmp11));
-            autocorr2[re256] = _mm_packs_epi32(_mm_unpacklo_epi32(mmtmp02,mmtmp12),_mm_unpackhi_epi32(mmtmp02,mmtmp12));
+                  print_shorts("pss",&((simde__m128i*)pss6144_0)[re]);*/
+            mmtmp00 = simde_mm_srai_epi32(simde_mm_madd_epi16(((simde__m128i *)pss6144_0)[re], s), 15);
+            mmtmp01 = simde_mm_srai_epi32(simde_mm_madd_epi16(((simde__m128i *)pss6144_1)[re], s), 15);
+            mmtmp02 = simde_mm_srai_epi32(simde_mm_madd_epi16(((simde__m128i *)pss6144_2)[re], s), 15);
+
+            s = simde_mm_shufflelo_epi16(s, SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+            s = simde_mm_shufflehi_epi16(s, SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+            s = simde_mm_sign_epi16(s, *(simde__m128i *)&conjugate[0]);
+            mmtmp10 = simde_mm_srai_epi32(simde_mm_madd_epi16(((simde__m128i *)pss6144_0)[re], s), 15);
+            mmtmp11 = simde_mm_srai_epi32(simde_mm_madd_epi16(((simde__m128i *)pss6144_1)[re], s), 15);
+            mmtmp12 = simde_mm_srai_epi32(simde_mm_madd_epi16(((simde__m128i *)pss6144_2)[re], s), 15);
+
+            autocorr0[re256] =
+                simde_mm_packs_epi32(simde_mm_unpacklo_epi32(mmtmp00, mmtmp10), simde_mm_unpackhi_epi32(mmtmp00, mmtmp10));
+            autocorr1[re256] =
+                simde_mm_packs_epi32(simde_mm_unpacklo_epi32(mmtmp01, mmtmp11), simde_mm_unpackhi_epi32(mmtmp01, mmtmp11));
+            autocorr2[re256] =
+                simde_mm_packs_epi32(simde_mm_unpacklo_epi32(mmtmp02, mmtmp12), simde_mm_unpackhi_epi32(mmtmp02, mmtmp12));
 
             re256 = (re256+1)&0x3f;
           }
@@ -284,18 +290,17 @@ void lte_sync_timefreq(PHY_VARS_UE *ue,int band,unsigned int DL_freq)
         memset((void*)autocorr2_t,0,256*4);
 
         for (re=0; re<(256/4); re++)
-          autocorr0_t[re] = _mm_add_epi32(autocorr0_t[re],_mm_madd_epi16(tmp_t[re],tmp_t[re]));
+          autocorr0_t[re] = simde_mm_add_epi32(autocorr0_t[re], simde_mm_madd_epi16(tmp_t[re], tmp_t[re]));
 
         idft(IDFT_256,(int16_t*)autocorr1,(int16_t*)tmp_t,1);
 
         for (re=0; re<(256/4); re++)
-          autocorr1_t[re] = _mm_add_epi32(autocorr1_t[re],_mm_madd_epi16(tmp_t[re],tmp_t[re]));
+          autocorr1_t[re] = simde_mm_add_epi32(autocorr1_t[re], simde_mm_madd_epi16(tmp_t[re], tmp_t[re]));
 
         idft(IDFT_256,(int16_t*)autocorr2,(int16_t*)tmp_t,1);
 
         for (re=0; re<(256/4); re++)
-          autocorr2_t[re] = _mm_add_epi32(autocorr2_t[re],_mm_madd_epi16(tmp_t[re],tmp_t[re]));
-
+          autocorr2_t[re] = simde_mm_add_epi32(autocorr2_t[re], simde_mm_madd_epi16(tmp_t[re], tmp_t[re]));
 
         //compute max correlation over time window
         maxcorr[0] = 0;
@@ -351,7 +356,4 @@ void lte_sync_timefreq(PHY_VARS_UE *ue,int band,unsigned int DL_freq)
 
   for (band_idx=0; band_idx<10; band_idx++)
     printf("pss 2: level %d dB, freq %u\n", dB_fixed(scan_info->amp[2][band_idx]),scan_info->freq_offset_Hz[2][band_idx]);
-
-#endif
 }
-
diff --git a/openair1/PHY/LTE_ESTIMATION/lte_ue_measurements.c b/openair1/PHY/LTE_ESTIMATION/lte_ue_measurements.c
index 68d7b87984344d62c272bb155122bfab171dcb0b..453b8a3e7954c2ce672fec9a64700a96421313bf 100644
--- a/openair1/PHY/LTE_ESTIMATION/lte_ue_measurements.c
+++ b/openair1/PHY/LTE_ESTIMATION/lte_ue_measurements.c
@@ -461,26 +461,26 @@ void conjch0_mult_ch1(int *ch0,
 {
   //This function is used to compute multiplications in Hhermitian * H matrix
   unsigned short rb;
-  __m128i *dl_ch0_128,*dl_ch1_128, *ch0conj_ch1_128, mmtmpD0,mmtmpD1,mmtmpD2,mmtmpD3;
+  simde__m128i *dl_ch0_128,*dl_ch1_128, *ch0conj_ch1_128, mmtmpD0,mmtmpD1,mmtmpD2,mmtmpD3;
 
-  dl_ch0_128 = (__m128i *)ch0;
-  dl_ch1_128 = (__m128i *)ch1;
+  dl_ch0_128 = (simde__m128i *)ch0;
+  dl_ch1_128 = (simde__m128i *)ch1;
 
-  ch0conj_ch1_128 = (__m128i *)ch0conj_ch1;
+  ch0conj_ch1_128 = (simde__m128i *)ch0conj_ch1;
 
   for (rb=0; rb<3*nb_rb; rb++) {
 
-    mmtmpD0 = _mm_madd_epi16(dl_ch0_128[0],dl_ch1_128[0]);
-    mmtmpD1 = _mm_shufflelo_epi16(dl_ch0_128[0],_MM_SHUFFLE(2,3,0,1));
-    mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-    mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i*)&conjugate[0]);
-    mmtmpD1 = _mm_madd_epi16(mmtmpD1,dl_ch1_128[0]);
-    mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift0);
-    mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift0);
-    mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-    mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+    mmtmpD0 = simde_mm_madd_epi16(dl_ch0_128[0],dl_ch1_128[0]);
+    mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch0_128[0], SIMDE_MM_SHUFFLE(2,3,0,1));
+    mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+    mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i*)&conjugate[0]);
+    mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,dl_ch1_128[0]);
+    mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift0);
+    mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift0);
+    mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+    mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
 
-    ch0conj_ch1_128[0] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+    ch0conj_ch1_128[0] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
 
 #ifdef DEBUG_RANK_EST
     printf("\n Computing conjugates \n");
@@ -493,8 +493,8 @@ void conjch0_mult_ch1(int *ch0,
     dl_ch1_128+=1;
     ch0conj_ch1_128+=1;
   }
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void construct_HhH_elements(int *ch0conj_ch0, //00_00
@@ -512,29 +512,30 @@ void construct_HhH_elements(int *ch0conj_ch0, //00_00
                             unsigned short nb_rb)
 {
   unsigned short rb;
-  __m128i *ch0conj_ch0_128, *ch1conj_ch1_128, *ch2conj_ch2_128, *ch3conj_ch3_128;
-  __m128i *ch0conj_ch1_128, *ch1conj_ch0_128, *ch2conj_ch3_128, *ch3conj_ch2_128;
-  __m128i *after_mf_00_128, *after_mf_01_128, *after_mf_10_128, *after_mf_11_128;
-
-  ch0conj_ch0_128 = (__m128i *)ch0conj_ch0;
-  ch1conj_ch1_128 = (__m128i *)ch1conj_ch1;
-  ch2conj_ch2_128 = (__m128i *)ch2conj_ch2;
-  ch3conj_ch3_128 = (__m128i *)ch3conj_ch3;
-  ch0conj_ch1_128 = (__m128i *)ch0conj_ch1;
-  ch1conj_ch0_128 = (__m128i *)ch1conj_ch0;
-  ch2conj_ch3_128 = (__m128i *)ch2conj_ch3;
-  ch3conj_ch2_128 = (__m128i *)ch3conj_ch2;
-  after_mf_00_128 = (__m128i *)after_mf_00;
-  after_mf_01_128 = (__m128i *)after_mf_01;
-  after_mf_10_128 = (__m128i *)after_mf_10;
-  after_mf_11_128 = (__m128i *)after_mf_11;
+  simde__m128i *ch0conj_ch0_128, *ch1conj_ch1_128, *ch2conj_ch2_128, *ch3conj_ch3_128;
+  simde__m128i *ch0conj_ch1_128, *ch1conj_ch0_128, *ch2conj_ch3_128, *ch3conj_ch2_128;
+  simde__m128i *after_mf_00_128, *after_mf_01_128, *after_mf_10_128, *after_mf_11_128;
+
+  ch0conj_ch0_128 = (simde__m128i *)ch0conj_ch0;
+  ch1conj_ch1_128 = (simde__m128i *)ch1conj_ch1;
+  ch2conj_ch2_128 = (simde__m128i *)ch2conj_ch2;
+  ch3conj_ch3_128 = (simde__m128i *)ch3conj_ch3;
+  ch0conj_ch1_128 = (simde__m128i *)ch0conj_ch1;
+  ch1conj_ch0_128 = (simde__m128i *)ch1conj_ch0;
+  ch2conj_ch3_128 = (simde__m128i *)ch2conj_ch3;
+  ch3conj_ch2_128 = (simde__m128i *)ch3conj_ch2;
+  after_mf_00_128 = (simde__m128i *)after_mf_00;
+  after_mf_01_128 = (simde__m128i *)after_mf_01;
+  after_mf_10_128 = (simde__m128i *)after_mf_10;
+  after_mf_11_128 = (simde__m128i *)after_mf_11;
 
   for (rb=0; rb<3*nb_rb; rb++) {
-
-    after_mf_00_128[0] =_mm_adds_epi16(ch0conj_ch0_128[0],ch3conj_ch3_128[0]);// _mm_adds_epi32(ch0conj_ch0_128[0], ch3conj_ch3_128[0]); //00_00 + 10_10
-    after_mf_11_128[0] =_mm_adds_epi16(ch1conj_ch1_128[0], ch2conj_ch2_128[0]); //01_01 + 11_11
-    after_mf_01_128[0] =_mm_adds_epi16(ch0conj_ch1_128[0], ch2conj_ch3_128[0]);//00_01 + 10_11
-    after_mf_10_128[0] =_mm_adds_epi16(ch1conj_ch0_128[0], ch3conj_ch2_128[0]);//01_00 + 11_10
+    after_mf_00_128[0] =
+        simde_mm_adds_epi16(ch0conj_ch0_128[0],
+                            ch3conj_ch3_128[0]); // simde_mm_adds_epi32(ch0conj_ch0_128[0], ch3conj_ch3_128[0]); //00_00 + 10_10
+    after_mf_11_128[0] =simde_mm_adds_epi16(ch1conj_ch1_128[0], ch2conj_ch2_128[0]); //01_01 + 11_11
+    after_mf_01_128[0] =simde_mm_adds_epi16(ch0conj_ch1_128[0], ch2conj_ch3_128[0]);//00_01 + 10_11
+    after_mf_10_128[0] =simde_mm_adds_epi16(ch1conj_ch0_128[0], ch3conj_ch2_128[0]);//01_00 + 11_10
 
 #ifdef DEBUG_RANK_EST
     printf(" \n construct_HhH_elements \n");
@@ -566,8 +567,8 @@ void construct_HhH_elements(int *ch0conj_ch0, //00_00
     after_mf_10_128+=1;
     after_mf_11_128+=1;
   }
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 
@@ -576,14 +577,14 @@ void squared_matrix_element(int32_t *Hh_h_00,
                             unsigned short nb_rb)
 {
    unsigned short rb;
-  __m128i *Hh_h_00_128,*Hh_h_00_sq_128;
+  simde__m128i *Hh_h_00_128,*Hh_h_00_sq_128;
 
-  Hh_h_00_128 = (__m128i *)Hh_h_00;
-  Hh_h_00_sq_128 = (__m128i *)Hh_h_00_sq;
+  Hh_h_00_128 = (simde__m128i *)Hh_h_00;
+  Hh_h_00_sq_128 = (simde__m128i *)Hh_h_00_sq;
 
   for (rb=0; rb<3*nb_rb; rb++) {
 
-    Hh_h_00_sq_128[0] = _mm_madd_epi16(Hh_h_00_128[0],Hh_h_00_128[0]);
+    Hh_h_00_sq_128[0] = simde_mm_madd_epi16(Hh_h_00_128[0],Hh_h_00_128[0]);
 
 #ifdef DEBUG_RANK_EST
     printf("\n Computing squared_matrix_element \n");
@@ -594,8 +595,8 @@ void squared_matrix_element(int32_t *Hh_h_00,
     Hh_h_00_sq_128+=1;
     Hh_h_00_128+=1;
   }
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 
@@ -609,23 +610,23 @@ void det_HhH(int32_t *after_mf_00,
 
 {
   unsigned short rb;
-  __m128i *after_mf_00_128,*after_mf_01_128, *after_mf_11_128, ad_re_128, bc_re_128;
+  simde__m128i *after_mf_00_128, *after_mf_01_128, *after_mf_11_128, ad_re_128, bc_re_128;
   //__m128i *after_mf_10_128; // the variable is only written, but leave it here for "symmetry" in the algorithm
-  __m128i *det_fin_128, det_128;
+  simde__m128i *det_fin_128, det_128;
 
-  after_mf_00_128 = (__m128i *)after_mf_00;
-  after_mf_01_128 = (__m128i *)after_mf_01;
+  after_mf_00_128 = (simde__m128i *)after_mf_00;
+  after_mf_01_128 = (simde__m128i *)after_mf_01;
   //after_mf_10_128 = (__m128i *)after_mf_10;
-  after_mf_11_128 = (__m128i *)after_mf_11;
+  after_mf_11_128 = (simde__m128i *)after_mf_11;
 
-  det_fin_128 = (__m128i *)det_fin;
+  det_fin_128 = (simde__m128i *)det_fin;
 
   for (rb=0; rb<3*nb_rb; rb++) {
 
-    ad_re_128 = _mm_madd_epi16(after_mf_00_128[0],after_mf_11_128[0]);
-    bc_re_128 = _mm_madd_epi16(after_mf_01_128[0],after_mf_01_128[0]);
-    det_128 = _mm_sub_epi32(ad_re_128, bc_re_128);
-    det_fin_128[0] = _mm_abs_epi32(det_128);
+    ad_re_128 = simde_mm_madd_epi16(after_mf_00_128[0],after_mf_11_128[0]);
+    bc_re_128 = simde_mm_madd_epi16(after_mf_01_128[0],after_mf_01_128[0]);
+    det_128 = simde_mm_sub_epi32(ad_re_128, bc_re_128);
+    det_fin_128[0] = simde_mm_abs_epi32(det_128);
 
 #ifdef DEBUG_RANK_EST
     printf("\n Computing denominator \n");
@@ -636,7 +637,7 @@ void det_HhH(int32_t *after_mf_00,
     print_ints("ad_re_128:",(int32_t*)&ad_re_128);
     print_ints("bc_re_128:",(int32_t*)&bc_re_128);
     print_ints("det_fin_128:",(int32_t*)&det_fin_128[0]);
-#endif
+#endif 
 
     det_fin_128+=1;
     after_mf_00_128+=1;
@@ -644,8 +645,8 @@ void det_HhH(int32_t *after_mf_00,
     //after_mf_10_128+=1;
     after_mf_11_128+=1;
   }
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void numer(int32_t *Hh_h_00_sq,
@@ -657,21 +658,21 @@ void numer(int32_t *Hh_h_00_sq,
 
 {
   unsigned short rb;
-  __m128i *h_h_00_sq_128, *h_h_01_sq_128, *h_h_10_sq_128, *h_h_11_sq_128;
-  __m128i *num_fin_128, sq_a_plus_sq_d_128, sq_b_plus_sq_c_128;
+  simde__m128i *h_h_00_sq_128, *h_h_01_sq_128, *h_h_10_sq_128, *h_h_11_sq_128;
+  simde__m128i *num_fin_128, sq_a_plus_sq_d_128, sq_b_plus_sq_c_128;
 
-  h_h_00_sq_128 = (__m128i *)Hh_h_00_sq;
-  h_h_01_sq_128 = (__m128i *)Hh_h_01_sq;
-  h_h_10_sq_128 = (__m128i *)Hh_h_10_sq;
-  h_h_11_sq_128 = (__m128i *)Hh_h_11_sq;
+  h_h_00_sq_128 = (simde__m128i *)Hh_h_00_sq;
+  h_h_01_sq_128 = (simde__m128i *)Hh_h_01_sq;
+  h_h_10_sq_128 = (simde__m128i *)Hh_h_10_sq;
+  h_h_11_sq_128 = (simde__m128i *)Hh_h_11_sq;
 
-  num_fin_128 = (__m128i *)num_fin;
+  num_fin_128 = (simde__m128i *)num_fin;
 
   for (rb=0; rb<3*nb_rb; rb++) {
 
-    sq_a_plus_sq_d_128 = _mm_add_epi32(h_h_00_sq_128[0],h_h_11_sq_128[0]);
-    sq_b_plus_sq_c_128 = _mm_add_epi32(h_h_01_sq_128[0],h_h_10_sq_128[0]);
-    num_fin_128[0] = _mm_add_epi32(sq_a_plus_sq_d_128, sq_b_plus_sq_c_128);
+    sq_a_plus_sq_d_128 = simde_mm_add_epi32(h_h_00_sq_128[0],h_h_11_sq_128[0]);
+    sq_b_plus_sq_c_128 = simde_mm_add_epi32(h_h_01_sq_128[0],h_h_10_sq_128[0]);
+    num_fin_128[0] = simde_mm_add_epi32(sq_a_plus_sq_d_128, sq_b_plus_sq_c_128);
 
 #ifdef DEBUG_RANK_EST
     printf("\n Computing numerator \n");
@@ -690,8 +691,8 @@ void numer(int32_t *Hh_h_00_sq,
     h_h_10_sq_128+=1;
     h_h_11_sq_128+=1;
   }
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void dlsch_channel_level_TM34_meas(int *ch00,
@@ -703,39 +704,38 @@ void dlsch_channel_level_TM34_meas(int *ch00,
                                    unsigned short nb_rb)
 {
 
-#if defined(__x86_64__)||defined(__i386__)
 
   short rb;
   unsigned char nre=12;
-  __m128i *ch00_128, *ch01_128, *ch10_128, *ch11_128;
-  __m128i avg_0_row0_128D, avg_1_row0_128D, avg_0_row1_128D, avg_1_row1_128D;
-  __m128i ch00_128_tmp, ch01_128_tmp, ch10_128_tmp, ch11_128_tmp;
+  simde__m128i *ch00_128, *ch01_128, *ch10_128, *ch11_128;
+  simde__m128i avg_0_row0_128D, avg_1_row0_128D, avg_0_row1_128D, avg_1_row1_128D;
+  simde__m128i ch00_128_tmp, ch01_128_tmp, ch10_128_tmp, ch11_128_tmp;
 
   avg_0[0] = 0;
   avg_0[1] = 0;
   avg_1[0] = 0;
   avg_1[1] = 0;
 
-  ch00_128 = (__m128i *)ch00;
-  ch01_128 = (__m128i *)ch01;
-  ch10_128 = (__m128i *)ch10;
-  ch11_128 = (__m128i *)ch11;
+  ch00_128 = (simde__m128i *)ch00;
+  ch01_128 = (simde__m128i *)ch01;
+  ch10_128 = (simde__m128i *)ch10;
+  ch11_128 = (simde__m128i *)ch11;
 
-  avg_0_row0_128D = _mm_setzero_si128();
-  avg_1_row0_128D = _mm_setzero_si128();
-  avg_0_row1_128D = _mm_setzero_si128();
-  avg_1_row1_128D = _mm_setzero_si128();
+  avg_0_row0_128D = simde_mm_setzero_si128();
+  avg_1_row0_128D = simde_mm_setzero_si128();
+  avg_0_row1_128D = simde_mm_setzero_si128();
+  avg_1_row1_128D = simde_mm_setzero_si128();
 
   for (rb=0; rb<3*nb_rb; rb++) {
-    ch00_128_tmp = _mm_load_si128(&ch00_128[0]);
-    ch01_128_tmp = _mm_load_si128(&ch01_128[0]);
-    ch10_128_tmp = _mm_load_si128(&ch10_128[0]);
-    ch11_128_tmp = _mm_load_si128(&ch11_128[0]);
+    ch00_128_tmp = simde_mm_load_si128(&ch00_128[0]);
+    ch01_128_tmp = simde_mm_load_si128(&ch01_128[0]);
+    ch10_128_tmp = simde_mm_load_si128(&ch10_128[0]);
+    ch11_128_tmp = simde_mm_load_si128(&ch11_128[0]);
 
-    avg_0_row0_128D = _mm_add_epi32(avg_0_row0_128D,_mm_madd_epi16(ch00_128_tmp,ch00_128_tmp));
-    avg_1_row0_128D = _mm_add_epi32(avg_1_row0_128D,_mm_madd_epi16(ch01_128_tmp,ch01_128_tmp));
-    avg_0_row1_128D = _mm_add_epi32(avg_0_row1_128D,_mm_madd_epi16(ch10_128_tmp,ch10_128_tmp));
-    avg_1_row1_128D = _mm_add_epi32(avg_1_row1_128D,_mm_madd_epi16(ch11_128_tmp,ch11_128_tmp));
+    avg_0_row0_128D = simde_mm_add_epi32(avg_0_row0_128D,simde_mm_madd_epi16(ch00_128_tmp,ch00_128_tmp));
+    avg_1_row0_128D = simde_mm_add_epi32(avg_1_row0_128D,simde_mm_madd_epi16(ch01_128_tmp,ch01_128_tmp));
+    avg_0_row1_128D = simde_mm_add_epi32(avg_0_row1_128D,simde_mm_madd_epi16(ch10_128_tmp,ch10_128_tmp));
+    avg_1_row1_128D = simde_mm_add_epi32(avg_1_row1_128D,simde_mm_madd_epi16(ch11_128_tmp,ch11_128_tmp));
 
     ch00_128+=1;
     ch01_128+=1;
@@ -768,12 +768,9 @@ void dlsch_channel_level_TM34_meas(int *ch00,
   avg_0[0] = min (avg_0[0], avg_1[0]);
   avg_1[0] = avg_0[0];
 
-  _mm_empty();
-  _m_empty();
-
-#elif defined(__arm__) || defined(__aarch64__)
+  simde_mm_empty();
+  simde_m_empty();
 
-#endif
 }
 
 uint8_t rank_estimation_tm3_tm4 (int *dl_ch_estimates_00, // please respect the order of channel estimates
@@ -1019,11 +1016,7 @@ void lte_ue_measurements(PHY_VARS_UE *ue,
   //int rx_power[NUMBER_OF_CONNECTED_eNB_MAX];
   int i;
   unsigned int limit,subband;
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *dl_ch0_128,*dl_ch1_128;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *dl_ch0_128, *dl_ch1_128;
-#endif
+  simde__m128i *dl_ch0_128, *dl_ch1_128;
   int *dl_ch0=NULL,*dl_ch1=NULL;
 
   LTE_DL_FRAME_PARMS *frame_parms = &ue->frame_parms;
@@ -1215,31 +1208,17 @@ void lte_ue_measurements(PHY_VARS_UE *ue,
         //printf("aarx=%d", aarx);
         // skip the first 4 RE due to interpolation filter length of 5 (not possible to skip 5 due to 128i alignment, must be multiple of 128bit)
 
-#if defined(__x86_64__) || defined(__i386__)
-       __m128i pmi128_re,pmi128_im,mmtmpPMI0,mmtmpPMI1 /* ,mmtmpPMI2,mmtmpPMI3 */ ;
-
-        dl_ch0_128    = (__m128i *)&ue->common_vars.common_vars_rx_data_per_thread[ue->current_thread_id[subframe]].dl_ch_estimates[eNB_id][aarx][4];
-        dl_ch1_128    = (__m128i *)&ue->common_vars.common_vars_rx_data_per_thread[ue->current_thread_id[subframe]].dl_ch_estimates[eNB_id][2+aarx][4];
-#elif defined(__arm__) || defined(__aarch64__)
-        int32x4_t pmi128_re,pmi128_im,mmtmpPMI0,mmtmpPMI1,mmtmpPMI0b,mmtmpPMI1b;
+       simde__m128i pmi128_re,pmi128_im,mmtmpPMI0,mmtmpPMI1 /* ,mmtmpPMI2,mmtmpPMI3 */ ;
 
-        dl_ch0_128    = (int16x8_t *)&ue->common_vars.common_vars_rx_data_per_thread[ue->current_thread_id[subframe]].dl_ch_estimates[eNB_id][aarx][4];
-        dl_ch1_128    = (int16x8_t *)&ue->common_vars.common_vars_rx_data_per_thread[ue->current_thread_id[subframe]].dl_ch_estimates[eNB_id][2+aarx][4];
-
-#endif
+        dl_ch0_128    = (simde__m128i *)&ue->common_vars.common_vars_rx_data_per_thread[ue->current_thread_id[subframe]].dl_ch_estimates[eNB_id][aarx][4];
+        dl_ch1_128    = (simde__m128i *)&ue->common_vars.common_vars_rx_data_per_thread[ue->current_thread_id[subframe]].dl_ch_estimates[eNB_id][2+aarx][4];
         for (subband=0; subband<nb_subbands; subband++) {
 
 
           // pmi
-#if defined(__x86_64__) || defined(__i386__)
-
-          pmi128_re = _mm_xor_si128(pmi128_re,pmi128_re);
-          pmi128_im = _mm_xor_si128(pmi128_im,pmi128_im);
-#elif defined(__arm__) || defined(__aarch64__)
 
-          pmi128_re = vdupq_n_s32(0);
-          pmi128_im = vdupq_n_s32(0);
-#endif
+          pmi128_re = simde_mm_xor_si128(pmi128_re,pmi128_re);
+          pmi128_im = simde_mm_xor_si128(pmi128_im,pmi128_im);
           // limit is the number of groups of 4 REs in a subband (12 = 4 RBs, 3 = 1 RB)
           // for 5 MHz channelization, there are 7 subbands, 6 of size 4 RBs and 1 of size 1 RB
           if ((N_RB_DL==6) || (subband<(nb_subbands-1)))
@@ -1249,61 +1228,49 @@ void lte_ue_measurements(PHY_VARS_UE *ue,
 
           for (i=0; i<limit; i++) {
 
-#if defined(__x86_64__) || defined(__i386__)
-              mmtmpPMI0 = _mm_xor_si128(mmtmpPMI0,mmtmpPMI0);
-              mmtmpPMI1 = _mm_xor_si128(mmtmpPMI1,mmtmpPMI1);
+              mmtmpPMI0 = simde_mm_xor_si128(mmtmpPMI0,mmtmpPMI0);
+              mmtmpPMI1 = simde_mm_xor_si128(mmtmpPMI1,mmtmpPMI1);
 
             // For each RE in subband perform ch0 * conj(ch1)
             // multiply by conjugated channel
                 //  print_ints("ch0",&dl_ch0_128[0]);
                 //  print_ints("ch1",&dl_ch1_128[0]);
 
-            mmtmpPMI0 = _mm_madd_epi16(dl_ch0_128[0],dl_ch1_128[0]);
+            mmtmpPMI0 = simde_mm_madd_epi16(dl_ch0_128[0],dl_ch1_128[0]);
                  //  print_ints("re",&mmtmpPMI0);
-            mmtmpPMI1 = _mm_shufflelo_epi16(dl_ch1_128[0],_MM_SHUFFLE(2,3,0,1));
+            mmtmpPMI1 = simde_mm_shufflelo_epi16(dl_ch1_128[0], SIMDE_MM_SHUFFLE(2,3,0,1));
               //  print_ints("_mm_shufflelo_epi16",&mmtmpPMI1);
-            mmtmpPMI1 = _mm_shufflehi_epi16(mmtmpPMI1,_MM_SHUFFLE(2,3,0,1));
+            mmtmpPMI1 = simde_mm_shufflehi_epi16(mmtmpPMI1, SIMDE_MM_SHUFFLE(2,3,0,1));
                 //  print_ints("_mm_shufflehi_epi16",&mmtmpPMI1);
-            mmtmpPMI1 = _mm_sign_epi16(mmtmpPMI1,*(__m128i*)&conjugate[0]);
+            mmtmpPMI1 = simde_mm_sign_epi16(mmtmpPMI1,*(simde__m128i*)&conjugate[0]);
                //  print_ints("_mm_sign_epi16",&mmtmpPMI1);
-            mmtmpPMI1 = _mm_madd_epi16(mmtmpPMI1,dl_ch0_128[0]);
+            mmtmpPMI1 = simde_mm_madd_epi16(mmtmpPMI1,dl_ch0_128[0]);
                //   print_ints("mm_madd_epi16",&mmtmpPMI1);
             // mmtmpPMI1 contains imag part of 4 consecutive outputs (32-bit)
-            pmi128_re = _mm_add_epi32(pmi128_re,mmtmpPMI0);
+            pmi128_re = simde_mm_add_epi32(pmi128_re,mmtmpPMI0);
              //   print_ints(" pmi128_re 0",&pmi128_re);
-            pmi128_im = _mm_add_epi32(pmi128_im,mmtmpPMI1);
+            pmi128_im = simde_mm_add_epi32(pmi128_im,mmtmpPMI1);
                //   print_ints(" pmi128_im 0 ",&pmi128_im);
 
-          /*  mmtmpPMI0 = _mm_xor_si128(mmtmpPMI0,mmtmpPMI0);
-            mmtmpPMI1 = _mm_xor_si128(mmtmpPMI1,mmtmpPMI1);
-
-            mmtmpPMI0 = _mm_madd_epi16(dl_ch0_128[1],dl_ch1_128[1]);
-                 //  print_ints("re",&mmtmpPMI0);
-            mmtmpPMI1 = _mm_shufflelo_epi16(dl_ch1_128[1],_MM_SHUFFLE(2,3,0,1));
-              //  print_ints("_mm_shufflelo_epi16",&mmtmpPMI1);
-            mmtmpPMI1 = _mm_shufflehi_epi16(mmtmpPMI1,_MM_SHUFFLE(2,3,0,1));
-                //  print_ints("_mm_shufflehi_epi16",&mmtmpPMI1);
-            mmtmpPMI1 = _mm_sign_epi16(mmtmpPMI1,*(__m128i*)&conjugate);
-               //  print_ints("_mm_sign_epi16",&mmtmpPMI1);
-            mmtmpPMI1 = _mm_madd_epi16(mmtmpPMI1,dl_ch0_128[1]);
-               //   print_ints("mm_madd_epi16",&mmtmpPMI1);
-            // mmtmpPMI1 contains imag part of 4 consecutive outputs (32-bit)
-            pmi128_re = _mm_add_epi32(pmi128_re,mmtmpPMI0);
-                //  print_ints(" pmi128_re 1",&pmi128_re);
-            pmi128_im = _mm_add_epi32(pmi128_im,mmtmpPMI1);
-            //print_ints(" pmi128_im 1 ",&pmi128_im);*/
+            /*  mmtmpPMI0 = simde_mm_xor_si128(mmtmpPMI0,mmtmpPMI0);
+              mmtmpPMI1 = simde_mm_xor_si128(mmtmpPMI1,mmtmpPMI1);
+
+              mmtmpPMI0 = simde_mm_madd_epi16(dl_ch0_128[1],dl_ch1_128[1]);
+                   //  print_ints("re",&mmtmpPMI0);
+              mmtmpPMI1 = simde_mm_shufflelo_epi16(dl_ch1_128[1], SIMDE_MM_SHUFFLE(2,3,0,1));
+                //  print_ints("_mm_shufflelo_epi16",&mmtmpPMI1);
+              mmtmpPMI1 = simde_mm_shufflehi_epi16(mmtmpPMI1, SIMDE_MM_SHUFFLE(2,3,0,1));
+                  //  print_ints("_mm_shufflehi_epi16",&mmtmpPMI1);
+              mmtmpPMI1 = simde_mm_sign_epi16(mmtmpPMI1,*(simde__m128i*)&conjugate);
+                 //  print_ints("_mm_sign_epi16",&mmtmpPMI1);
+              mmtmpPMI1 = simde_mm_madd_epi16(mmtmpPMI1,dl_ch0_128[1]);
+                 //   print_ints("mm_madd_epi16",&mmtmpPMI1);
+              // mmtmpPMI1 contains imag part of 4 consecutive outputs (32-bit)
+              pmi128_re = simde_mm_add_epi32(pmi128_re,mmtmpPMI0);
+                  //  print_ints(" pmi128_re 1",&pmi128_re);
+              pmi128_im = simde_mm_add_epi32(pmi128_im,mmtmpPMI1);
+              //print_ints(" pmi128_im 1 ",&pmi128_im);*/
 
-#elif defined(__arm__) || defined(__aarch64__)
-
-            mmtmpPMI0 = vmull_s16(((int16x4_t*)dl_ch0_128)[0], ((int16x4_t*)dl_ch1_128)[0]);
-            mmtmpPMI1 = vmull_s16(((int16x4_t*)dl_ch0_128)[1], ((int16x4_t*)dl_ch1_128)[1]);
-            pmi128_re = vqaddq_s32(pmi128_re,vcombine_s32(vpadd_s32(vget_low_s32(mmtmpPMI0),vget_high_s32(mmtmpPMI0)),vpadd_s32(vget_low_s32(mmtmpPMI1),vget_high_s32(mmtmpPMI1))));
-
-            mmtmpPMI0b = vmull_s16(vrev32_s16(vmul_s16(((int16x4_t*)dl_ch0_128)[0],*(int16x4_t*)conjugate)), ((int16x4_t*)dl_ch1_128)[0]);
-            mmtmpPMI1b = vmull_s16(vrev32_s16(vmul_s16(((int16x4_t*)dl_ch0_128)[1],*(int16x4_t*)conjugate)), ((int16x4_t*)dl_ch1_128)[1]);
-            pmi128_im = vqaddq_s32(pmi128_im,vcombine_s32(vpadd_s32(vget_low_s32(mmtmpPMI0b),vget_high_s32(mmtmpPMI0b)),vpadd_s32(vget_low_s32(mmtmpPMI1b),vget_high_s32(mmtmpPMI1b))));
-
-#endif
             dl_ch0_128++;
             dl_ch1_128++;
           }
@@ -1372,10 +1339,8 @@ void lte_ue_measurements(PHY_VARS_UE *ue,
     // printf("in lte_ue_measurements: selected rx_antenna[eNB_id==0]:%u\n", ue->measurements.selected_rx_antennas[eNB_id][i]);
   }  // eNB_id loop
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 
diff --git a/openair1/PHY/LTE_ESTIMATION/lte_ul_channel_estimation.c b/openair1/PHY/LTE_ESTIMATION/lte_ul_channel_estimation.c
index 04852c99617a3b845e38d8f7099774798ee279ac..e081a9d8692d3ba0ea076e2f109669185d7b6cbe 100644
--- a/openair1/PHY/LTE_ESTIMATION/lte_ul_channel_estimation.c
+++ b/openair1/PHY/LTE_ESTIMATION/lte_ul_channel_estimation.c
@@ -98,15 +98,10 @@ int32_t lte_ul_channel_estimation(LTE_DL_FRAME_PARMS *frame_parms,
   uint32_t v=frame_parms->pusch_config_common.ul_ReferenceSignalsPUSCH.seqhop[Ns+(subframe<<1)];
   int symbol_offset,i;
   //debug_msg("lte_ul_channel_estimation: cyclic shift %d\n",cyclicShift);
-  const int16_t alpha_re[12] = {32767, 28377, 16383, 0, -16384, -28378, -32768, -28378, -16384, -1, 16383, 28377};
+  const int16_t alpha_re[12] = {32767, 28377, 16383,     0,-16384,  -28378,-32768,-28378,-16384,    -1, 16383, 28377};
   const int16_t alpha_im[12] = {0, 16383, 28377, 32767, 28377, 16383, 0, -16384, -28378, -32768, -28378, -16384};
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *rxdataF128,*ul_ref128,*ul_ch128;
-  __m128i mmtmpU0,mmtmpU1,mmtmpU2,mmtmpU3;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *rxdataF128,*ul_ref128,*ul_ch128;
-  int32x4_t mmtmp0,mmtmp1,mmtmp_re,mmtmp_im;
-#endif
+  simde__m128i *rxdataF128, *ul_ref128, *ul_ch128;
+  simde__m128i mmtmpU0, mmtmpU1, mmtmpU2, mmtmpU3;
   int32_t temp_in_ifft_0[2048*2] __attribute__((aligned(32)));
 
   if (ulsch->ue_type > 0) harq_pid = 0;
@@ -142,96 +137,51 @@ int32_t lte_ul_channel_estimation(LTE_DL_FRAME_PARMS *frame_parms,
   if (l == (3 - frame_parms->Ncp)) {
     symbol_offset = frame_parms->N_RB_UL*12*(l+((7-frame_parms->Ncp)*(Ns&1)));
 
-    for (aa=0; aa<nb_antennas_rx; aa++) {
-#if defined(__x86_64__) || defined(__i386__)
-      rxdataF128 = (__m128i *)&rxdataF_ext[aa][symbol_offset];
-      ul_ch128   = (__m128i *)&ul_ch_estimates[aa][symbol_offset];
-      ul_ref128  = (__m128i *)ul_ref_sigs_rx[u][v][Msc_RS_idx];
-#elif defined(__arm__) || defined(__aarch64__)
-      rxdataF128 = (int16x8_t *)&rxdataF_ext[aa][symbol_offset];
-      ul_ch128   = (int16x8_t *)&ul_ch_estimates[aa][symbol_offset];
-      ul_ref128  = (int16x8_t *)ul_ref_sigs_rx[u][v][Msc_RS_idx];
-#endif
+    for (aa = 0; aa < nb_antennas_rx; aa++) {
+      rxdataF128 = (simde__m128i *)&rxdataF_ext[aa][symbol_offset];
+      ul_ch128 = (simde__m128i *)&ul_ch_estimates[aa][symbol_offset];
+      ul_ref128 = (simde__m128i *)ul_ref_sigs_rx[u][v][Msc_RS_idx];
 
-      for (i=0; i<Msc_RS/12; i++) {
-#if defined(__x86_64__) || defined(__i386__)
+      for (i = 0; i < Msc_RS / 12; i++) {
         // multiply by conjugated channel
-        mmtmpU0 = _mm_madd_epi16(ul_ref128[0],rxdataF128[0]);
+        mmtmpU0 = simde_mm_madd_epi16(ul_ref128[0], rxdataF128[0]);
         // mmtmpU0 contains real part of 4 consecutive outputs (32-bit)
-        mmtmpU1 = _mm_shufflelo_epi16(ul_ref128[0],_MM_SHUFFLE(2,3,0,1));
-        mmtmpU1 = _mm_shufflehi_epi16(mmtmpU1,_MM_SHUFFLE(2,3,0,1));
-        mmtmpU1 = _mm_sign_epi16(mmtmpU1,*(__m128i *)&conjugate[0]);
-        mmtmpU1 = _mm_madd_epi16(mmtmpU1,rxdataF128[0]);
+        mmtmpU1 = simde_mm_shufflelo_epi16(ul_ref128[0], SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+        mmtmpU1 = simde_mm_shufflehi_epi16(mmtmpU1, SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+        mmtmpU1 = simde_mm_sign_epi16(mmtmpU1, *(simde__m128i *)&conjugate[0]);
+        mmtmpU1 = simde_mm_madd_epi16(mmtmpU1, rxdataF128[0]);
         // mmtmpU1 contains imag part of 4 consecutive outputs (32-bit)
-        mmtmpU0 = _mm_srai_epi32(mmtmpU0,15);
-        mmtmpU1 = _mm_srai_epi32(mmtmpU1,15);
-        mmtmpU2 = _mm_unpacklo_epi32(mmtmpU0,mmtmpU1);
-        mmtmpU3 = _mm_unpackhi_epi32(mmtmpU0,mmtmpU1);
-        ul_ch128[0] = _mm_packs_epi32(mmtmpU2,mmtmpU3);
+        mmtmpU0 = simde_mm_srai_epi32(mmtmpU0, 15);
+        mmtmpU1 = simde_mm_srai_epi32(mmtmpU1, 15);
+        mmtmpU2 = simde_mm_unpacklo_epi32(mmtmpU0, mmtmpU1);
+        mmtmpU3 = simde_mm_unpackhi_epi32(mmtmpU0, mmtmpU1);
+        ul_ch128[0] = simde_mm_packs_epi32(mmtmpU2, mmtmpU3);
         //  printf("rb %d ch: %d %d\n",i,((int16_t*)ul_ch128)[0],((int16_t*)ul_ch128)[1]);
         // multiply by conjugated channel
-        mmtmpU0 = _mm_madd_epi16(ul_ref128[1],rxdataF128[1]);
+        mmtmpU0 = simde_mm_madd_epi16(ul_ref128[1], rxdataF128[1]);
         // mmtmpU0 contains real part of 4 consecutive outputs (32-bit)
-        mmtmpU1 = _mm_shufflelo_epi16(ul_ref128[1],_MM_SHUFFLE(2,3,0,1));
-        mmtmpU1 = _mm_shufflehi_epi16(mmtmpU1,_MM_SHUFFLE(2,3,0,1));
-        mmtmpU1 = _mm_sign_epi16(mmtmpU1,*(__m128i *)conjugate);
-        mmtmpU1 = _mm_madd_epi16(mmtmpU1,rxdataF128[1]);
+        mmtmpU1 = simde_mm_shufflelo_epi16(ul_ref128[1], SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+        mmtmpU1 = simde_mm_shufflehi_epi16(mmtmpU1, SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+        mmtmpU1 = simde_mm_sign_epi16(mmtmpU1, *(simde__m128i *)conjugate);
+        mmtmpU1 = simde_mm_madd_epi16(mmtmpU1, rxdataF128[1]);
         // mmtmpU1 contains imag part of 4 consecutive outputs (32-bit)
-        mmtmpU0 = _mm_srai_epi32(mmtmpU0,15);
-        mmtmpU1 = _mm_srai_epi32(mmtmpU1,15);
-        mmtmpU2 = _mm_unpacklo_epi32(mmtmpU0,mmtmpU1);
-        mmtmpU3 = _mm_unpackhi_epi32(mmtmpU0,mmtmpU1);
-        ul_ch128[1] = _mm_packs_epi32(mmtmpU2,mmtmpU3);
-        mmtmpU0 = _mm_madd_epi16(ul_ref128[2],rxdataF128[2]);
+        mmtmpU0 = simde_mm_srai_epi32(mmtmpU0, 15);
+        mmtmpU1 = simde_mm_srai_epi32(mmtmpU1, 15);
+        mmtmpU2 = simde_mm_unpacklo_epi32(mmtmpU0, mmtmpU1);
+        mmtmpU3 = simde_mm_unpackhi_epi32(mmtmpU0, mmtmpU1);
+        ul_ch128[1] = simde_mm_packs_epi32(mmtmpU2, mmtmpU3);
+        mmtmpU0 = simde_mm_madd_epi16(ul_ref128[2], rxdataF128[2]);
         // mmtmpU0 contains real part of 4 consecutive outputs (32-bit)
-        mmtmpU1 = _mm_shufflelo_epi16(ul_ref128[2],_MM_SHUFFLE(2,3,0,1));
-        mmtmpU1 = _mm_shufflehi_epi16(mmtmpU1,_MM_SHUFFLE(2,3,0,1));
-        mmtmpU1 = _mm_sign_epi16(mmtmpU1,*(__m128i *)conjugate);
-        mmtmpU1 = _mm_madd_epi16(mmtmpU1,rxdataF128[2]);
+        mmtmpU1 = simde_mm_shufflelo_epi16(ul_ref128[2], SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+        mmtmpU1 = simde_mm_shufflehi_epi16(mmtmpU1, SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+        mmtmpU1 = simde_mm_sign_epi16(mmtmpU1, *(simde__m128i *)conjugate);
+        mmtmpU1 = simde_mm_madd_epi16(mmtmpU1, rxdataF128[2]);
         // mmtmpU1 contains imag part of 4 consecutive outputs (32-bit)
-        mmtmpU0 = _mm_srai_epi32(mmtmpU0,15);
-        mmtmpU1 = _mm_srai_epi32(mmtmpU1,15);
-        mmtmpU2 = _mm_unpacklo_epi32(mmtmpU0,mmtmpU1);
-        mmtmpU3 = _mm_unpackhi_epi32(mmtmpU0,mmtmpU1);
-        ul_ch128[2] = _mm_packs_epi32(mmtmpU2,mmtmpU3);
-#elif defined(__arm__) || defined(__aarch64__)
-        mmtmp0 = vmull_s16(((int16x4_t *)ul_ref128)[0],((int16x4_t *)rxdataF128)[0]);
-        mmtmp1 = vmull_s16(((int16x4_t *)ul_ref128)[1],((int16x4_t *)rxdataF128)[1]);
-        mmtmp_re = vcombine_s32(vpadd_s32(vget_low_s32(mmtmp0),vget_high_s32(mmtmp0)),
-                                vpadd_s32(vget_low_s32(mmtmp1),vget_high_s32(mmtmp1)));
-        mmtmp0 = vmull_s16(vrev32_s16(vmul_s16(((int16x4_t *)ul_ref128)[0],*(int16x4_t *)conjugate)), ((int16x4_t *)rxdataF128)[0]);
-        mmtmp1 = vmull_s16(vrev32_s16(vmul_s16(((int16x4_t *)ul_ref128)[1],*(int16x4_t *)conjugate)), ((int16x4_t *)rxdataF128)[1]);
-        mmtmp_im = vcombine_s32(vpadd_s32(vget_low_s32(mmtmp0),vget_high_s32(mmtmp0)),
-                                vpadd_s32(vget_low_s32(mmtmp1),vget_high_s32(mmtmp1)));
-        ul_ch128[0] = vcombine_s16(vmovn_s32(mmtmp_re),vmovn_s32(mmtmp_im));
-        ul_ch128++;
-        ul_ref128++;
-        rxdataF128++;
-        mmtmp0 = vmull_s16(((int16x4_t *)ul_ref128)[0],((int16x4_t *)rxdataF128)[0]);
-        mmtmp1 = vmull_s16(((int16x4_t *)ul_ref128)[1],((int16x4_t *)rxdataF128)[1]);
-        mmtmp_re = vcombine_s32(vpadd_s32(vget_low_s32(mmtmp0),vget_high_s32(mmtmp0)),
-                                vpadd_s32(vget_low_s32(mmtmp1),vget_high_s32(mmtmp1)));
-        mmtmp0 = vmull_s16(vrev32_s16(vmul_s16(((int16x4_t *)ul_ref128)[0],*(int16x4_t *)conjugate)), ((int16x4_t *)rxdataF128)[0]);
-        mmtmp1 = vmull_s16(vrev32_s16(vmul_s16(((int16x4_t *)ul_ref128)[1],*(int16x4_t *)conjugate)), ((int16x4_t *)rxdataF128)[1]);
-        mmtmp_im = vcombine_s32(vpadd_s32(vget_low_s32(mmtmp0),vget_high_s32(mmtmp0)),
-                                vpadd_s32(vget_low_s32(mmtmp1),vget_high_s32(mmtmp1)));
-        ul_ch128[0] = vcombine_s16(vmovn_s32(mmtmp_re),vmovn_s32(mmtmp_im));
-        ul_ch128++;
-        ul_ref128++;
-        rxdataF128++;
-        mmtmp0 = vmull_s16(((int16x4_t *)ul_ref128)[0],((int16x4_t *)rxdataF128)[0]);
-        mmtmp1 = vmull_s16(((int16x4_t *)ul_ref128)[1],((int16x4_t *)rxdataF128)[1]);
-        mmtmp_re = vcombine_s32(vpadd_s32(vget_low_s32(mmtmp0),vget_high_s32(mmtmp0)),
-                                vpadd_s32(vget_low_s32(mmtmp1),vget_high_s32(mmtmp1)));
-        mmtmp0 = vmull_s16(vrev32_s16(vmul_s16(((int16x4_t *)ul_ref128)[0],*(int16x4_t *)conjugate)), ((int16x4_t *)rxdataF128)[0]);
-        mmtmp1 = vmull_s16(vrev32_s16(vmul_s16(((int16x4_t *)ul_ref128)[1],*(int16x4_t *)conjugate)), ((int16x4_t *)rxdataF128)[1]);
-        mmtmp_im = vcombine_s32(vpadd_s32(vget_low_s32(mmtmp0),vget_high_s32(mmtmp0)),
-                                vpadd_s32(vget_low_s32(mmtmp1),vget_high_s32(mmtmp1)));
-        ul_ch128[0] = vcombine_s16(vmovn_s32(mmtmp_re),vmovn_s32(mmtmp_im));
-        ul_ch128++;
-        ul_ref128++;
-        rxdataF128++;
-#endif
+        mmtmpU0 = simde_mm_srai_epi32(mmtmpU0, 15);
+        mmtmpU1 = simde_mm_srai_epi32(mmtmpU1, 15);
+        mmtmpU2 = simde_mm_unpacklo_epi32(mmtmpU0, mmtmpU1);
+        mmtmpU3 = simde_mm_unpackhi_epi32(mmtmpU0, mmtmpU1);
+        ul_ch128[2] = simde_mm_packs_epi32(mmtmpU2, mmtmpU3);
         ul_ch128+=3;
         ul_ref128+=3;
         rxdataF128+=3;
@@ -401,7 +351,7 @@ int32_t lte_ul_channel_estimation(LTE_DL_FRAME_PARMS *frame_parms,
         //    multadd_complex_vector_real_scalar((int16_t*) ul_ch1,SCALE,(int16_t*) ul_ch1,1,Msc_RS);
         //    multadd_complex_vector_real_scalar((int16_t*) ul_ch2,SCALE,(int16_t*) ul_ch2,1,Msc_RS);
       } //if (Ns&1)
-    } //for(aa=...
+    } // for(aa=...
   } //if(l==...
 
   return(0);
@@ -434,15 +384,10 @@ int32_t lte_ul_channel_estimation_RRU(LTE_DL_FRAME_PARMS *frame_parms,
   int32_t tmp_estimates[N_rb_alloc*12] __attribute__((aligned(16)));
   int symbol_offset,i;
   //debug_msg("lte_ul_channel_estimation_RRU: cyclic shift %d\n",cyclicShift);
-  const int16_t alpha_re[12] = {32767, 28377, 16383, 0, -16384, -28378, -32768, -28378, -16384, -1, 16383, 28377};
+  const int16_t alpha_re[12] = {32767, 28377, 16383,     0,-16384,  -28378,-32768,-28378,-16384,    -1, 16383, 28377};
   const int16_t alpha_im[12] = {0, 16383, 28377, 32767, 28377, 16383, 0, -16384, -28378, -32768, -28378, -16384};
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *rxdataF128,*ul_ref128,*ul_ch128;
-  __m128i mmtmpU0,mmtmpU1,mmtmpU2,mmtmpU3;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *rxdataF128,*ul_ref128,*ul_ch128;
-  int32x4_t mmtmp0,mmtmp1,mmtmp_re,mmtmp_im;
-#endif
+  simde__m128i *rxdataF128, *ul_ref128, *ul_ch128;
+  simde__m128i mmtmpU0, mmtmpU1, mmtmpU2, mmtmpU3;
   int32_t temp_in_ifft_0[2048*2] __attribute__((aligned(32)));
   AssertFatal(l==pilot_pos1 || l==pilot_pos2,"%d is not a valid symbol for DMRS, should be %d or %d\n",
               l,pilot_pos1,pilot_pos2);
@@ -470,96 +415,51 @@ int32_t lte_ul_channel_estimation_RRU(LTE_DL_FRAME_PARMS *frame_parms,
 #endif
   symbol_offset = frame_parms->N_RB_UL*12*l;
 
-  for (aa=0; aa<nb_antennas_rx; aa++) {
-#if defined(__x86_64__) || defined(__i386__)
-    rxdataF128 = (__m128i *)&rxdataF_ext[aa][symbol_offset];
-    ul_ch128   = (__m128i *)&ul_ch_estimates[aa][symbol_offset];
-    ul_ref128  = (__m128i *)ul_ref_sigs_rx[u][v][Msc_RS_idx];
-#elif defined(__arm__) || defined(__aarch64__)
-    rxdataF128 = (int16x8_t *)&rxdataF_ext[aa][symbol_offset];
-    ul_ch128   = (int16x8_t *)&ul_ch_estimates[aa][symbol_offset];
-    ul_ref128  = (int16x8_t *)ul_ref_sigs_rx[u][v][Msc_RS_idx];
-#endif
+  for (aa = 0; aa < nb_antennas_rx; aa++) {
+    rxdataF128 = (simde__m128i *)&rxdataF_ext[aa][symbol_offset];
+    ul_ch128 = (simde__m128i *)&ul_ch_estimates[aa][symbol_offset];
+    ul_ref128 = (simde__m128i *)ul_ref_sigs_rx[u][v][Msc_RS_idx];
 
-    for (i=0; i<Msc_RS/12; i++) {
-#if defined(__x86_64__) || defined(__i386__)
+    for (i = 0; i < Msc_RS / 12; i++) {
       // multiply by conjugated channel
-      mmtmpU0 = _mm_madd_epi16(ul_ref128[0],rxdataF128[0]);
+      mmtmpU0 = simde_mm_madd_epi16(ul_ref128[0], rxdataF128[0]);
       // mmtmpU0 contains real part of 4 consecutive outputs (32-bit)
-      mmtmpU1 = _mm_shufflelo_epi16(ul_ref128[0],_MM_SHUFFLE(2,3,0,1));
-      mmtmpU1 = _mm_shufflehi_epi16(mmtmpU1,_MM_SHUFFLE(2,3,0,1));
-      mmtmpU1 = _mm_sign_epi16(mmtmpU1,*(__m128i *)&conjugate[0]);
-      mmtmpU1 = _mm_madd_epi16(mmtmpU1,rxdataF128[0]);
+      mmtmpU1 = simde_mm_shufflelo_epi16(ul_ref128[0], SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+      mmtmpU1 = simde_mm_shufflehi_epi16(mmtmpU1, SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+      mmtmpU1 = simde_mm_sign_epi16(mmtmpU1, *(simde__m128i *)&conjugate[0]);
+      mmtmpU1 = simde_mm_madd_epi16(mmtmpU1, rxdataF128[0]);
       // mmtmpU1 contains imag part of 4 consecutive outputs (32-bit)
-      mmtmpU0 = _mm_srai_epi32(mmtmpU0,15);
-      mmtmpU1 = _mm_srai_epi32(mmtmpU1,15);
-      mmtmpU2 = _mm_unpacklo_epi32(mmtmpU0,mmtmpU1);
-      mmtmpU3 = _mm_unpackhi_epi32(mmtmpU0,mmtmpU1);
-      ul_ch128[0] = _mm_packs_epi32(mmtmpU2,mmtmpU3);
+      mmtmpU0 = simde_mm_srai_epi32(mmtmpU0, 15);
+      mmtmpU1 = simde_mm_srai_epi32(mmtmpU1, 15);
+      mmtmpU2 = simde_mm_unpacklo_epi32(mmtmpU0, mmtmpU1);
+      mmtmpU3 = simde_mm_unpackhi_epi32(mmtmpU0, mmtmpU1);
+      ul_ch128[0] = simde_mm_packs_epi32(mmtmpU2, mmtmpU3);
       //      printf("rb %d ch: %d %d\n",i,((int16_t*)ul_ch128)[0],((int16_t*)ul_ch128)[1]);
       // multiply by conjugated channel
-      mmtmpU0 = _mm_madd_epi16(ul_ref128[1],rxdataF128[1]);
+      mmtmpU0 = simde_mm_madd_epi16(ul_ref128[1], rxdataF128[1]);
       // mmtmpU0 contains real part of 4 consecutive outputs (32-bit)
-      mmtmpU1 = _mm_shufflelo_epi16(ul_ref128[1],_MM_SHUFFLE(2,3,0,1));
-      mmtmpU1 = _mm_shufflehi_epi16(mmtmpU1,_MM_SHUFFLE(2,3,0,1));
-      mmtmpU1 = _mm_sign_epi16(mmtmpU1,*(__m128i *)conjugate);
-      mmtmpU1 = _mm_madd_epi16(mmtmpU1,rxdataF128[1]);
+      mmtmpU1 = simde_mm_shufflelo_epi16(ul_ref128[1], SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+      mmtmpU1 = simde_mm_shufflehi_epi16(mmtmpU1, SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+      mmtmpU1 = simde_mm_sign_epi16(mmtmpU1, *(simde__m128i *)conjugate);
+      mmtmpU1 = simde_mm_madd_epi16(mmtmpU1, rxdataF128[1]);
       // mmtmpU1 contains imag part of 4 consecutive outputs (32-bit)
-      mmtmpU0 = _mm_srai_epi32(mmtmpU0,15);
-      mmtmpU1 = _mm_srai_epi32(mmtmpU1,15);
-      mmtmpU2 = _mm_unpacklo_epi32(mmtmpU0,mmtmpU1);
-      mmtmpU3 = _mm_unpackhi_epi32(mmtmpU0,mmtmpU1);
-      ul_ch128[1] = _mm_packs_epi32(mmtmpU2,mmtmpU3);
-      mmtmpU0 = _mm_madd_epi16(ul_ref128[2],rxdataF128[2]);
+      mmtmpU0 = simde_mm_srai_epi32(mmtmpU0, 15);
+      mmtmpU1 = simde_mm_srai_epi32(mmtmpU1, 15);
+      mmtmpU2 = simde_mm_unpacklo_epi32(mmtmpU0, mmtmpU1);
+      mmtmpU3 = simde_mm_unpackhi_epi32(mmtmpU0, mmtmpU1);
+      ul_ch128[1] = simde_mm_packs_epi32(mmtmpU2, mmtmpU3);
+      mmtmpU0 = simde_mm_madd_epi16(ul_ref128[2], rxdataF128[2]);
       // mmtmpU0 contains real part of 4 consecutive outputs (32-bit)
-      mmtmpU1 = _mm_shufflelo_epi16(ul_ref128[2],_MM_SHUFFLE(2,3,0,1));
-      mmtmpU1 = _mm_shufflehi_epi16(mmtmpU1,_MM_SHUFFLE(2,3,0,1));
-      mmtmpU1 = _mm_sign_epi16(mmtmpU1,*(__m128i *)conjugate);
-      mmtmpU1 = _mm_madd_epi16(mmtmpU1,rxdataF128[2]);
+      mmtmpU1 = simde_mm_shufflelo_epi16(ul_ref128[2], SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+      mmtmpU1 = simde_mm_shufflehi_epi16(mmtmpU1, SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+      mmtmpU1 = simde_mm_sign_epi16(mmtmpU1, *(simde__m128i *)conjugate);
+      mmtmpU1 = simde_mm_madd_epi16(mmtmpU1, rxdataF128[2]);
       // mmtmpU1 contains imag part of 4 consecutive outputs (32-bit)
-      mmtmpU0 = _mm_srai_epi32(mmtmpU0,15);
-      mmtmpU1 = _mm_srai_epi32(mmtmpU1,15);
-      mmtmpU2 = _mm_unpacklo_epi32(mmtmpU0,mmtmpU1);
-      mmtmpU3 = _mm_unpackhi_epi32(mmtmpU0,mmtmpU1);
-      ul_ch128[2] = _mm_packs_epi32(mmtmpU2,mmtmpU3);
-#elif defined(__arm__) || defined(__aarch64__)
-      mmtmp0 = vmull_s16(((int16x4_t *)ul_ref128)[0],((int16x4_t *)rxdataF128)[0]);
-      mmtmp1 = vmull_s16(((int16x4_t *)ul_ref128)[1],((int16x4_t *)rxdataF128)[1]);
-      mmtmp_re = vcombine_s32(vpadd_s32(vget_low_s32(mmtmp0),vget_high_s32(mmtmp0)),
-                              vpadd_s32(vget_low_s32(mmtmp1),vget_high_s32(mmtmp1)));
-      mmtmp0 = vmull_s16(vrev32_s16(vmul_s16(((int16x4_t *)ul_ref128)[0],*(int16x4_t *)conjugate)), ((int16x4_t *)rxdataF128)[0]);
-      mmtmp1 = vmull_s16(vrev32_s16(vmul_s16(((int16x4_t *)ul_ref128)[1],*(int16x4_t *)conjugate)), ((int16x4_t *)rxdataF128)[1]);
-      mmtmp_im = vcombine_s32(vpadd_s32(vget_low_s32(mmtmp0),vget_high_s32(mmtmp0)),
-                              vpadd_s32(vget_low_s32(mmtmp1),vget_high_s32(mmtmp1)));
-      ul_ch128[0] = vcombine_s16(vmovn_s32(mmtmp_re),vmovn_s32(mmtmp_im));
-      ul_ch128++;
-      ul_ref128++;
-      rxdataF128++;
-      mmtmp0 = vmull_s16(((int16x4_t *)ul_ref128)[0],((int16x4_t *)rxdataF128)[0]);
-      mmtmp1 = vmull_s16(((int16x4_t *)ul_ref128)[1],((int16x4_t *)rxdataF128)[1]);
-      mmtmp_re = vcombine_s32(vpadd_s32(vget_low_s32(mmtmp0),vget_high_s32(mmtmp0)),
-                              vpadd_s32(vget_low_s32(mmtmp1),vget_high_s32(mmtmp1)));
-      mmtmp0 = vmull_s16(vrev32_s16(vmul_s16(((int16x4_t *)ul_ref128)[0],*(int16x4_t *)conjugate)), ((int16x4_t *)rxdataF128)[0]);
-      mmtmp1 = vmull_s16(vrev32_s16(vmul_s16(((int16x4_t *)ul_ref128)[1],*(int16x4_t *)conjugate)), ((int16x4_t *)rxdataF128)[1]);
-      mmtmp_im = vcombine_s32(vpadd_s32(vget_low_s32(mmtmp0),vget_high_s32(mmtmp0)),
-                              vpadd_s32(vget_low_s32(mmtmp1),vget_high_s32(mmtmp1)));
-      ul_ch128[0] = vcombine_s16(vmovn_s32(mmtmp_re),vmovn_s32(mmtmp_im));
-      ul_ch128++;
-      ul_ref128++;
-      rxdataF128++;
-      mmtmp0 = vmull_s16(((int16x4_t *)ul_ref128)[0],((int16x4_t *)rxdataF128)[0]);
-      mmtmp1 = vmull_s16(((int16x4_t *)ul_ref128)[1],((int16x4_t *)rxdataF128)[1]);
-      mmtmp_re = vcombine_s32(vpadd_s32(vget_low_s32(mmtmp0),vget_high_s32(mmtmp0)),
-                              vpadd_s32(vget_low_s32(mmtmp1),vget_high_s32(mmtmp1)));
-      mmtmp0 = vmull_s16(vrev32_s16(vmul_s16(((int16x4_t *)ul_ref128)[0],*(int16x4_t *)conjugate)), ((int16x4_t *)rxdataF128)[0]);
-      mmtmp1 = vmull_s16(vrev32_s16(vmul_s16(((int16x4_t *)ul_ref128)[1],*(int16x4_t *)conjugate)), ((int16x4_t *)rxdataF128)[1]);
-      mmtmp_im = vcombine_s32(vpadd_s32(vget_low_s32(mmtmp0),vget_high_s32(mmtmp0)),
-                              vpadd_s32(vget_low_s32(mmtmp1),vget_high_s32(mmtmp1)));
-      ul_ch128[0] = vcombine_s16(vmovn_s32(mmtmp_re),vmovn_s32(mmtmp_im));
-      ul_ch128++;
-      ul_ref128++;
-      rxdataF128++;
-#endif
+      mmtmpU0 = simde_mm_srai_epi32(mmtmpU0, 15);
+      mmtmpU1 = simde_mm_srai_epi32(mmtmpU1, 15);
+      mmtmpU2 = simde_mm_unpacklo_epi32(mmtmpU0, mmtmpU1);
+      mmtmpU3 = simde_mm_unpackhi_epi32(mmtmpU0, mmtmpU1);
+      ul_ch128[2] = simde_mm_packs_epi32(mmtmpU2, mmtmpU3);
       ul_ch128+=3;
       ul_ref128+=3;
       rxdataF128+=3;
@@ -725,7 +625,7 @@ int32_t lte_ul_channel_estimation_RRU(LTE_DL_FRAME_PARMS *frame_parms,
                     N_rb_alloc);
       LOG_D(PHY,"delta_phase = %d\n",delta_phase);
     }
-  } //for(aa=...
+  } // for(aa=...
 
   return(0);
 }
@@ -801,40 +701,38 @@ int32_t lte_srs_channel_estimation(LTE_DL_FRAME_PARMS *frame_parms,
   return(0);
 }
 
-int16_t lte_ul_freq_offset_estimation(LTE_DL_FRAME_PARMS *frame_parms,
-                                      int32_t *ul_ch_estimates,
-                                      uint16_t nb_rb) {
-#if defined(__x86_64__) || defined(__i386__)
+int16_t lte_ul_freq_offset_estimation(LTE_DL_FRAME_PARMS *frame_parms, int32_t *ul_ch_estimates, uint16_t nb_rb)
+{
   int k, rb;
   int a_idx = 64;
   uint8_t conj_flag = 0;
   uint8_t output_shift;
   int pilot_pos1 = 3 - frame_parms->Ncp;
   int pilot_pos2 = 10 - 2*frame_parms->Ncp;
-  __m128i *ul_ch1 = (__m128i *)&ul_ch_estimates[pilot_pos1*frame_parms->N_RB_UL*12];
-  __m128i *ul_ch2 = (__m128i *)&ul_ch_estimates[pilot_pos2*frame_parms->N_RB_UL*12];
+  simde__m128i *ul_ch1 = (simde__m128i *)&ul_ch_estimates[pilot_pos1 * frame_parms->N_RB_UL * 12];
+  simde__m128i *ul_ch2 = (simde__m128i *)&ul_ch_estimates[pilot_pos2 * frame_parms->N_RB_UL * 12];
   int32_t avg[2];
   int16_t Ravg[2];
   Ravg[0]=0;
   Ravg[1]=0;
   int16_t iv, rv, phase_idx = 0;
-  __m128i R[3], mmtmpD0,mmtmpD1,mmtmpD2,mmtmpD3;
-  __m128 avg128U1, avg128U2;
+  simde__m128i R[3], mmtmpD0, mmtmpD1, mmtmpD2, mmtmpD3;
+  simde__m128 avg128U1, avg128U2;
 
   // round(tan((pi/4)*[1:1:N]/N)*pow2(15))
   int16_t alpha[128] = {201, 402, 603, 804, 1006, 1207, 1408, 1610, 1811, 2013, 2215, 2417, 2619, 2822, 3024, 3227, 3431, 3634, 3838, 4042, 4246, 4450, 4655, 4861, 5066, 5272, 5479, 5686, 5893, 6101, 6309, 6518, 6727, 6937, 7147, 7358, 7570, 7782, 7995, 8208, 8422, 8637, 8852, 9068, 9285, 9503, 9721, 9940, 10160, 10381, 10603, 10825, 11049, 11273, 11498, 11725, 11952, 12180, 12410, 12640, 12872, 13104, 13338, 13573, 13809, 14046, 14285, 14525, 14766, 15009, 15253, 15498, 15745, 15993, 16243, 16494, 16747, 17001, 17257, 17515, 17774, 18035, 18298, 18563, 18829, 19098, 19368, 19640, 19915, 20191, 20470, 20750, 21033, 21318, 21605, 21895, 22187, 22481, 22778, 23078, 23380, 23685, 23992, 24302, 24615, 24931, 25250, 25572, 25897, 26226, 26557, 26892, 27230, 27572, 27917, 28266, 28618, 28975, 29335, 29699, 30067, 30440, 30817, 31198, 31583, 31973, 32368, 32767};
   // compute log2_maxh (output_shift)
-  avg128U1 = _mm_setzero_ps();
-  avg128U2 = _mm_setzero_ps();
+  avg128U1 = simde_mm_setzero_ps();
+  avg128U2 = simde_mm_setzero_ps();
 
   for (rb=0; rb<nb_rb; rb++) {
-    avg128U1 = _mm_add_ps(avg128U1,_mm_cvtepi32_ps(_mm_madd_epi16(ul_ch1[0],ul_ch1[0])));
-    avg128U1 = _mm_add_ps(avg128U1,_mm_cvtepi32_ps(_mm_madd_epi16(ul_ch1[1],ul_ch1[1])));
-    avg128U1 = _mm_add_ps(avg128U1,_mm_cvtepi32_ps(_mm_madd_epi16(ul_ch1[2],ul_ch1[2])));
+    avg128U1 = simde_mm_add_ps(avg128U1, simde_mm_cvtepi32_ps(simde_mm_madd_epi16(ul_ch1[0], ul_ch1[0])));
+    avg128U1 = simde_mm_add_ps(avg128U1, simde_mm_cvtepi32_ps(simde_mm_madd_epi16(ul_ch1[1], ul_ch1[1])));
+    avg128U1 = simde_mm_add_ps(avg128U1, simde_mm_cvtepi32_ps(simde_mm_madd_epi16(ul_ch1[2], ul_ch1[2])));
 
-    avg128U2 = _mm_add_ps(avg128U2,_mm_cvtepi32_ps(_mm_madd_epi16(ul_ch2[0],ul_ch2[0])));
-    avg128U2 = _mm_add_ps(avg128U2,_mm_cvtepi32_ps(_mm_madd_epi16(ul_ch2[1],ul_ch2[1])));
-    avg128U2 = _mm_add_ps(avg128U2,_mm_cvtepi32_ps(_mm_madd_epi16(ul_ch2[2],ul_ch2[2])));
+    avg128U2 = simde_mm_add_ps(avg128U2, simde_mm_cvtepi32_ps(simde_mm_madd_epi16(ul_ch2[0], ul_ch2[0])));
+    avg128U2 = simde_mm_add_ps(avg128U2, simde_mm_cvtepi32_ps(simde_mm_madd_epi16(ul_ch2[1], ul_ch2[1])));
+    avg128U2 = simde_mm_add_ps(avg128U2, simde_mm_cvtepi32_ps(simde_mm_madd_epi16(ul_ch2[2], ul_ch2[2])));
 
     ul_ch1+=3;
     ul_ch2+=3;
@@ -856,43 +754,43 @@ int16_t lte_ul_freq_offset_estimation(LTE_DL_FRAME_PARMS *frame_parms,
   output_shift = cmax(0,avg[1]-10);
   //output_shift  = (log2_approx(avg[0])/2)+ log2_approx(frame_parms->nb_antennas_rx-1)+1;
   //    msg("avg= %d, shift = %d\n",avg[0],output_shift);
-  ul_ch1 = (__m128i *)&ul_ch_estimates[pilot_pos1*frame_parms->N_RB_UL*12];
-  ul_ch2 = (__m128i *)&ul_ch_estimates[pilot_pos2*frame_parms->N_RB_UL*12];
+  ul_ch1 = (simde__m128i *)&ul_ch_estimates[pilot_pos1 * frame_parms->N_RB_UL * 12];
+  ul_ch2 = (simde__m128i *)&ul_ch_estimates[pilot_pos2 * frame_parms->N_RB_UL * 12];
 
   // correlate and average the 2 channel estimates ul_ch1*ul_ch2
   for (rb=0; rb<nb_rb; rb++) {
-    mmtmpD0 = _mm_madd_epi16(ul_ch1[0],ul_ch2[0]);
-    mmtmpD1 = _mm_shufflelo_epi16(ul_ch1[0],_MM_SHUFFLE(2,3,0,1));
-    mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-    mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i *)&conjugate);
-    mmtmpD1 = _mm_madd_epi16(mmtmpD1,ul_ch2[0]);
-    mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-    mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
-    mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-    mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
-    R[0] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
-    mmtmpD0 = _mm_madd_epi16(ul_ch1[1],ul_ch2[1]);
-    mmtmpD1 = _mm_shufflelo_epi16(ul_ch1[1],_MM_SHUFFLE(2,3,0,1));
-    mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-    mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i *)&conjugate);
-    mmtmpD1 = _mm_madd_epi16(mmtmpD1,ul_ch2[1]);
-    mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-    mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
-    mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-    mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
-    R[1] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
-    mmtmpD0 = _mm_madd_epi16(ul_ch1[2],ul_ch2[2]);
-    mmtmpD1 = _mm_shufflelo_epi16(ul_ch1[2],_MM_SHUFFLE(2,3,0,1));
-    mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-    mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i *)&conjugate);
-    mmtmpD1 = _mm_madd_epi16(mmtmpD1,ul_ch2[2]);
-    mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-    mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
-    mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-    mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
-    R[2] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
-    R[0] = _mm_add_epi16(_mm_srai_epi16(R[0],1),_mm_srai_epi16(R[1],1));
-    R[0] = _mm_add_epi16(_mm_srai_epi16(R[0],1),_mm_srai_epi16(R[2],1));
+    mmtmpD0 = simde_mm_madd_epi16(ul_ch1[0], ul_ch2[0]);
+    mmtmpD1 = simde_mm_shufflelo_epi16(ul_ch1[0], SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+    mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+    mmtmpD1 = simde_mm_sign_epi16(mmtmpD1, *(simde__m128i *)&conjugate);
+    mmtmpD1 = simde_mm_madd_epi16(mmtmpD1, ul_ch2[0]);
+    mmtmpD0 = simde_mm_srai_epi32(mmtmpD0, output_shift);
+    mmtmpD1 = simde_mm_srai_epi32(mmtmpD1, output_shift);
+    mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0, mmtmpD1);
+    mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0, mmtmpD1);
+    R[0] = simde_mm_packs_epi32(mmtmpD2, mmtmpD3);
+    mmtmpD0 = simde_mm_madd_epi16(ul_ch1[1], ul_ch2[1]);
+    mmtmpD1 = simde_mm_shufflelo_epi16(ul_ch1[1], SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+    mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+    mmtmpD1 = simde_mm_sign_epi16(mmtmpD1, *(simde__m128i *)&conjugate);
+    mmtmpD1 = simde_mm_madd_epi16(mmtmpD1, ul_ch2[1]);
+    mmtmpD0 = simde_mm_srai_epi32(mmtmpD0, output_shift);
+    mmtmpD1 = simde_mm_srai_epi32(mmtmpD1, output_shift);
+    mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0, mmtmpD1);
+    mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0, mmtmpD1);
+    R[1] = simde_mm_packs_epi32(mmtmpD2, mmtmpD3);
+    mmtmpD0 = simde_mm_madd_epi16(ul_ch1[2], ul_ch2[2]);
+    mmtmpD1 = simde_mm_shufflelo_epi16(ul_ch1[2], SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+    mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+    mmtmpD1 = simde_mm_sign_epi16(mmtmpD1, *(simde__m128i *)&conjugate);
+    mmtmpD1 = simde_mm_madd_epi16(mmtmpD1, ul_ch2[2]);
+    mmtmpD0 = simde_mm_srai_epi32(mmtmpD0, output_shift);
+    mmtmpD1 = simde_mm_srai_epi32(mmtmpD1, output_shift);
+    mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0, mmtmpD1);
+    mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0, mmtmpD1);
+    R[2] = simde_mm_packs_epi32(mmtmpD2, mmtmpD3);
+    R[0] = simde_mm_add_epi16(simde_mm_srai_epi16(R[0], 1), simde_mm_srai_epi16(R[1], 1));
+    R[0] = simde_mm_add_epi16(simde_mm_srai_epi16(R[0], 1), simde_mm_srai_epi16(R[2], 1));
     Ravg[0] += (((short *)&R)[0] +
                 ((short *)&R)[2] +
                 ((short *)&R)[4] +
@@ -932,8 +830,5 @@ int16_t lte_ul_freq_offset_estimation(LTE_DL_FRAME_PARMS *frame_parms,
   if (Ravg[1]<0)
     phase_idx = -phase_idx;
 
-  return(phase_idx);
-#elif defined(__arm__) || defined(__aarch64__)
-  return(0);
-#endif
+  return (phase_idx);
 }
diff --git a/openair1/PHY/LTE_TRANSPORT/dlsch_scrambling.c b/openair1/PHY/LTE_TRANSPORT/dlsch_scrambling.c
index e1fd9b505b09daf25115e3cd516b7278c58a5947..8bdceceed817a929d9b7f4e474fe7616e6c02630 100644
--- a/openair1/PHY/LTE_TRANSPORT/dlsch_scrambling.c
+++ b/openair1/PHY/LTE_TRANSPORT/dlsch_scrambling.c
@@ -154,8 +154,8 @@ void dlsch_scrambling(LTE_DL_FRAME_PARMS *frame_parms,
     e[30] = (e[30]) ^ ((s>>30)&1);
     e[31] = (e[31]) ^ ((s>>31)&1);
     // This is not faster for some unknown reason
-    //    ((__m128i *)e)[0] = _mm_xor_si128(((__m128i *)e)[0],((__m128i *)scrambling_lut)[s&65535]);
-    //    ((__m128i *)e)[1] = _mm_xor_si128(((__m128i *)e)[1],((__m128i *)scrambling_lut)[s>>16]);
+    //    ((simde__m128i *)e)[0] = simde_mm_xor_si128(((simde__m128i *)e)[0],((simde__m128i *)scrambling_lut)[s&65535]);
+    //    ((simde__m128i *)e)[1] = simde_mm_xor_si128(((simde__m128i *)e)[1],((simde__m128i *)scrambling_lut)[s>>16]);
     s = lte_gold_generic(&x1, &x2, 0);
     e += 32;
   }
diff --git a/openair1/PHY/LTE_TRANSPORT/ulsch_decoding.c b/openair1/PHY/LTE_TRANSPORT/ulsch_decoding.c
index fd682e917c66f666b68072f87f51fb9a7f3d9b65..b9331a283e7694d46b0c171c5068fd95142ff717 100644
--- a/openair1/PHY/LTE_TRANSPORT/ulsch_decoding.c
+++ b/openair1/PHY/LTE_TRANSPORT/ulsch_decoding.c
@@ -620,16 +620,8 @@ unsigned int  ulsch_decoding(PHY_VARS_eNB *eNB,
       cseq[i2++] = (int16_t)((((s>>j)&1)<<1)-1);
     }
     */
-#if defined(__x86_64__) || defined(__i386__)
-    ((__m256i *)cseq)[i2++] = ((__m256i *)unscrambling_lut)[s&65535];
-    ((__m256i *)cseq)[i2++] = ((__m256i *)unscrambling_lut)[(s>>16)&65535];
-#elif defined(__arm__) || defined(__aarch64__)
-    ((int16x8_t *)cseq)[i2++] = ((int16x8_t *)unscrambling_lut)[(s&65535)<<1];
-    ((int16x8_t *)cseq)[i2++] = ((int16x8_t *)unscrambling_lut)[1+((s&65535)<<1)];
-    s>>=16;
-    ((int16x8_t *)cseq)[i2++] = ((int16x8_t *)unscrambling_lut)[(s&65535)<<1];
-    ((int16x8_t *)cseq)[i2++] = ((int16x8_t *)unscrambling_lut)[1+((s&65535)<<1)];
-#endif
+    ((simde__m256i *)cseq)[i2++] = ((simde__m256i *)unscrambling_lut)[s & 65535];
+    ((simde__m256i *)cseq)[i2++] = ((simde__m256i *)unscrambling_lut)[(s >> 16) & 65535];
     s = lte_gold_unscram(&x1, &x2, 0);
   }
 
@@ -727,7 +719,7 @@ unsigned int  ulsch_decoding(PHY_VARS_eNB *eNB,
                 i2=(i2+(Cmux<<2)-3);
           */
           // slightly more optimized version (equivalent to above) for 16QAM to improve computational performance
-          *(__m64 *)&y[i2] = _mm_sign_pi16(*(__m64 *)&ulsch_llr[i],*(__m64 *)&cseq[i]);
+          *(simde__m64 *)&y[i2] = simde_mm_sign_pi16(*(simde__m64 *)&ulsch_llr[i],*(simde__m64 *)&cseq[i]);
           i+=4;
           i2+=(Cmux<<2);
         }
@@ -961,13 +953,8 @@ unsigned int  ulsch_decoding(PHY_VARS_eNB *eNB,
     }
 
     /* To be improved according to alignment of j2
-    #if defined(__x86_64__)||defined(__i386__)
     for (iprime=0; iprime<G;iprime+=16,j2+=16)
-      *((__m256i *)&ulsch_harq->e[iprime]) = *((__m256i *)&y[j2]);
-    #elif defined(__arm__) || defined(__aarch64__)
-    for (iprime=0; iprime<G;iprime+=8,j2+=8)
-      *((int16x8_t *)&ulsch_harq->e[iprime]) = *((int16x8_t *)&y[j2]);
-    #endif
+      *((simde__m256i *)&ulsch_harq->e[iprime]) = *((simde__m256i *)&y[j2]);
     */
     int16_t *yp,*ep;
 
diff --git a/openair1/PHY/LTE_TRANSPORT/ulsch_demodulation.c b/openair1/PHY/LTE_TRANSPORT/ulsch_demodulation.c
index aab4f5cb19569452c15fec92d0620b8c0a7ce5d6..33425766ab1fcdb5c779e5ae5f9f20b35761609a 100644
--- a/openair1/PHY/LTE_TRANSPORT/ulsch_demodulation.c
+++ b/openair1/PHY/LTE_TRANSPORT/ulsch_demodulation.c
@@ -50,13 +50,8 @@ static const short conjugate2[8]__attribute__((aligned(16))) = {1,-1,1,-1,1,-1,1
 
 
 void lte_idft(LTE_DL_FRAME_PARMS *frame_parms,uint32_t *z, uint16_t Msc_PUSCH) {
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i idft_in128[3][1200],idft_out128[3][1200];
-  __m128i norm128;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t idft_in128[3][1200],idft_out128[3][1200];
-  int16x8_t norm128;
-#endif
+  simde__m128i idft_in128[3][1200],idft_out128[3][1200];
+  simde__m128i norm128;
   int16_t *idft_in0=(int16_t *)idft_in128[0],*idft_out0=(int16_t *)idft_out128[0];
   int16_t *idft_in1=(int16_t *)idft_in128[1],*idft_out1=(int16_t *)idft_out128[1];
   int16_t *idft_in2=(int16_t *)idft_in128[2],*idft_out2=(int16_t *)idft_out128[2];
@@ -98,41 +93,21 @@ void lte_idft(LTE_DL_FRAME_PARMS *frame_parms,uint32_t *z, uint16_t Msc_PUSCH) {
 
   // conjugate input
   for (i=0; i<(Msc_PUSCH>>2); i++) {
-#if defined(__x86_64__)||defined(__i386__)
-    * &(((__m128i *)z0)[i])=_mm_sign_epi16( *&(((__m128i *)z0)[i]),*(__m128i *)&conjugate2[0]);
-    * &(((__m128i *)z1)[i])=_mm_sign_epi16( *&(((__m128i *)z1)[i]),*(__m128i *)&conjugate2[0]);
-    * &(((__m128i *)z2)[i])=_mm_sign_epi16( *&(((__m128i *)z2)[i]),*(__m128i *)&conjugate2[0]);
-    * &(((__m128i *)z3)[i])=_mm_sign_epi16( *&(((__m128i *)z3)[i]),*(__m128i *)&conjugate2[0]);
-    * &(((__m128i *)z4)[i])=_mm_sign_epi16( *&(((__m128i *)z4)[i]),*(__m128i *)&conjugate2[0]);
-    * &(((__m128i *)z5)[i])=_mm_sign_epi16( *&(((__m128i *)z5)[i]),*(__m128i *)&conjugate2[0]);
-    * &(((__m128i *)z6)[i])=_mm_sign_epi16( *&(((__m128i *)z6)[i]),*(__m128i *)&conjugate2[0]);
-    * &(((__m128i *)z7)[i])=_mm_sign_epi16( *&(((__m128i *)z7)[i]),*(__m128i *)&conjugate2[0]);
-    * &(((__m128i *)z8)[i])=_mm_sign_epi16( *&(((__m128i *)z8)[i]),*(__m128i *)&conjugate2[0]);
-    * &(((__m128i *)z9)[i])=_mm_sign_epi16( *&(((__m128i *)z9)[i]),*(__m128i *)&conjugate2[0]);
+    * &(((simde__m128i *)z0)[i])=simde_mm_sign_epi16( *&(((simde__m128i *)z0)[i]),*(simde__m128i *)&conjugate2[0]);
+    * &(((simde__m128i *)z1)[i])=simde_mm_sign_epi16( *&(((simde__m128i *)z1)[i]),*(simde__m128i *)&conjugate2[0]);
+    * &(((simde__m128i *)z2)[i])=simde_mm_sign_epi16( *&(((simde__m128i *)z2)[i]),*(simde__m128i *)&conjugate2[0]);
+    * &(((simde__m128i *)z3)[i])=simde_mm_sign_epi16( *&(((simde__m128i *)z3)[i]),*(simde__m128i *)&conjugate2[0]);
+    * &(((simde__m128i *)z4)[i])=simde_mm_sign_epi16( *&(((simde__m128i *)z4)[i]),*(simde__m128i *)&conjugate2[0]);
+    * &(((simde__m128i *)z5)[i])=simde_mm_sign_epi16( *&(((simde__m128i *)z5)[i]),*(simde__m128i *)&conjugate2[0]);
+    * &(((simde__m128i *)z6)[i])=simde_mm_sign_epi16( *&(((simde__m128i *)z6)[i]),*(simde__m128i *)&conjugate2[0]);
+    * &(((simde__m128i *)z7)[i])=simde_mm_sign_epi16( *&(((simde__m128i *)z7)[i]),*(simde__m128i *)&conjugate2[0]);
+    * &(((simde__m128i *)z8)[i])=simde_mm_sign_epi16( *&(((simde__m128i *)z8)[i]),*(simde__m128i *)&conjugate2[0]);
+    * &(((simde__m128i *)z9)[i])=simde_mm_sign_epi16( *&(((simde__m128i *)z9)[i]),*(simde__m128i *)&conjugate2[0]);
 
     if (frame_parms->Ncp==NORMAL) {
-      * &(((__m128i *)z10)[i])=_mm_sign_epi16( *&(((__m128i *)z10)[i]),*(__m128i *)&conjugate2[0]);
-      * &(((__m128i *)z11)[i])=_mm_sign_epi16( *&(((__m128i *)z11)[i]),*(__m128i *)&conjugate2[0]);
+      * &(((simde__m128i *)z10)[i])=simde_mm_sign_epi16( *&(((simde__m128i *)z10)[i]),*(simde__m128i *)&conjugate2[0]);
+      * &(((simde__m128i *)z11)[i])=simde_mm_sign_epi16( *&(((simde__m128i *)z11)[i]),*(simde__m128i *)&conjugate2[0]);
     }
-
-#elif defined(__arm__) || defined(__aarch64__)
-    * &(((int16x8_t *)z0)[i])=vmulq_s16( *&(((int16x8_t *)z0)[i]),*(int16x8_t *)&conjugate2[0]);
-    * &(((int16x8_t *)z1)[i])=vmulq_s16( *&(((int16x8_t *)z1)[i]),*(int16x8_t *)&conjugate2[0]);
-    * &(((int16x8_t *)z2)[i])=vmulq_s16( *&(((int16x8_t *)z2)[i]),*(int16x8_t *)&conjugate2[0]);
-    * &(((int16x8_t *)z3)[i])=vmulq_s16( *&(((int16x8_t *)z3)[i]),*(int16x8_t *)&conjugate2[0]);
-    * &(((int16x8_t *)z4)[i])=vmulq_s16( *&(((int16x8_t *)z4)[i]),*(int16x8_t *)&conjugate2[0]);
-    * &(((int16x8_t *)z5)[i])=vmulq_s16( *&(((int16x8_t *)z5)[i]),*(int16x8_t *)&conjugate2[0]);
-    * &(((int16x8_t *)z6)[i])=vmulq_s16( *&(((int16x8_t *)z6)[i]),*(int16x8_t *)&conjugate2[0]);
-    * &(((int16x8_t *)z7)[i])=vmulq_s16( *&(((int16x8_t *)z7)[i]),*(int16x8_t *)&conjugate2[0]);
-    * &(((int16x8_t *)z8)[i])=vmulq_s16( *&(((int16x8_t *)z8)[i]),*(int16x8_t *)&conjugate2[0]);
-    * &(((int16x8_t *)z9)[i])=vmulq_s16( *&(((int16x8_t *)z9)[i]),*(int16x8_t *)&conjugate2[0]);
-
-    if (frame_parms->Ncp==NORMAL) {
-      * &(((int16x8_t *)z10)[i])=vmulq_s16( *&(((int16x8_t *)z10)[i]),*(int16x8_t *)&conjugate2[0]);
-      * &(((int16x8_t *)z11)[i])=vmulq_s16( *&(((int16x8_t *)z11)[i]),*(int16x8_t *)&conjugate2[0]);
-    }
-
-#endif
   }
 
   for (i=0,ip=0; i<Msc_PUSCH; i++,ip+=4) {
@@ -158,22 +133,12 @@ void lte_idft(LTE_DL_FRAME_PARMS *frame_parms,uint32_t *z, uint16_t Msc_PUSCH) {
       dft(DFT_12,(int16_t *)idft_in0,(int16_t *)idft_out0,0);
       dft(DFT_12,(int16_t *)idft_in1,(int16_t *)idft_out1,0);
       dft(DFT_12,(int16_t *)idft_in2,(int16_t *)idft_out2,0);
-#if defined(__x86_64__)||defined(__i386__)
-      norm128 = _mm_set1_epi16(9459);
-#elif defined(__arm__) || defined(__aarch64__)
-      norm128 = vdupq_n_s16(9459);
-#endif
+      norm128 = simde_mm_set1_epi16(9459);
 
       for (i=0; i<12; i++) {
-#if defined(__x86_64__)||defined(__i386__)
-        ((__m128i *)idft_out0)[i] = _mm_slli_epi16(_mm_mulhi_epi16(((__m128i *)idft_out0)[i],norm128),1);
-        ((__m128i *)idft_out1)[i] = _mm_slli_epi16(_mm_mulhi_epi16(((__m128i *)idft_out1)[i],norm128),1);
-        ((__m128i *)idft_out2)[i] = _mm_slli_epi16(_mm_mulhi_epi16(((__m128i *)idft_out2)[i],norm128),1);
-#elif defined(__arm__) || defined(__aarch64__)
-        ((int16x8_t *)idft_out0)[i] = vqdmulhq_s16(((int16x8_t *)idft_out0)[i],norm128);
-        ((int16x8_t *)idft_out1)[i] = vqdmulhq_s16(((int16x8_t *)idft_out1)[i],norm128);
-        ((int16x8_t *)idft_out2)[i] = vqdmulhq_s16(((int16x8_t *)idft_out2)[i],norm128);
-#endif
+        ((simde__m128i *)idft_out0)[i] = simde_mm_slli_epi16(simde_mm_mulhi_epi16(((simde__m128i *)idft_out0)[i],norm128),1);
+        ((simde__m128i *)idft_out1)[i] = simde_mm_slli_epi16(simde_mm_mulhi_epi16(((simde__m128i *)idft_out1)[i],norm128),1);
+        ((simde__m128i *)idft_out2)[i] = simde_mm_slli_epi16(simde_mm_mulhi_epi16(((simde__m128i *)idft_out2)[i],norm128),1);
       }
 
       break;
@@ -411,47 +376,24 @@ void lte_idft(LTE_DL_FRAME_PARMS *frame_parms,uint32_t *z, uint16_t Msc_PUSCH) {
 
   // conjugate output
   for (i=0; i<(Msc_PUSCH>>2); i++) {
-#if defined(__x86_64__) || defined(__i386__)
-    ((__m128i *)z0)[i]=_mm_sign_epi16(((__m128i *)z0)[i],*(__m128i *)&conjugate2[0]);
-    ((__m128i *)z1)[i]=_mm_sign_epi16(((__m128i *)z1)[i],*(__m128i *)&conjugate2[0]);
-    ((__m128i *)z2)[i]=_mm_sign_epi16(((__m128i *)z2)[i],*(__m128i *)&conjugate2[0]);
-    ((__m128i *)z3)[i]=_mm_sign_epi16(((__m128i *)z3)[i],*(__m128i *)&conjugate2[0]);
-    ((__m128i *)z4)[i]=_mm_sign_epi16(((__m128i *)z4)[i],*(__m128i *)&conjugate2[0]);
-    ((__m128i *)z5)[i]=_mm_sign_epi16(((__m128i *)z5)[i],*(__m128i *)&conjugate2[0]);
-    ((__m128i *)z6)[i]=_mm_sign_epi16(((__m128i *)z6)[i],*(__m128i *)&conjugate2[0]);
-    ((__m128i *)z7)[i]=_mm_sign_epi16(((__m128i *)z7)[i],*(__m128i *)&conjugate2[0]);
-    ((__m128i *)z8)[i]=_mm_sign_epi16(((__m128i *)z8)[i],*(__m128i *)&conjugate2[0]);
-    ((__m128i *)z9)[i]=_mm_sign_epi16(((__m128i *)z9)[i],*(__m128i *)&conjugate2[0]);
-
+    ((simde__m128i *)z0)[i]=simde_mm_sign_epi16(((simde__m128i *)z0)[i],*(simde__m128i *)&conjugate2[0]);
+    ((simde__m128i *)z1)[i]=simde_mm_sign_epi16(((simde__m128i *)z1)[i],*(simde__m128i *)&conjugate2[0]);
+    ((simde__m128i *)z2)[i]=simde_mm_sign_epi16(((simde__m128i *)z2)[i],*(simde__m128i *)&conjugate2[0]);
+    ((simde__m128i *)z3)[i]=simde_mm_sign_epi16(((simde__m128i *)z3)[i],*(simde__m128i *)&conjugate2[0]);
+    ((simde__m128i *)z4)[i]=simde_mm_sign_epi16(((simde__m128i *)z4)[i],*(simde__m128i *)&conjugate2[0]);
+    ((simde__m128i *)z5)[i]=simde_mm_sign_epi16(((simde__m128i *)z5)[i],*(simde__m128i *)&conjugate2[0]);
+    ((simde__m128i *)z6)[i]=simde_mm_sign_epi16(((simde__m128i *)z6)[i],*(simde__m128i *)&conjugate2[0]);
+    ((simde__m128i *)z7)[i]=simde_mm_sign_epi16(((simde__m128i *)z7)[i],*(simde__m128i *)&conjugate2[0]);
+    ((simde__m128i *)z8)[i]=simde_mm_sign_epi16(((simde__m128i *)z8)[i],*(simde__m128i *)&conjugate2[0]);
+    ((simde__m128i *)z9)[i]=simde_mm_sign_epi16(((simde__m128i *)z9)[i],*(simde__m128i *)&conjugate2[0]);
     if (frame_parms->Ncp==NORMAL) {
-      ((__m128i *)z10)[i]=_mm_sign_epi16(((__m128i *)z10)[i],*(__m128i *)&conjugate2[0]);
-      ((__m128i *)z11)[i]=_mm_sign_epi16(((__m128i *)z11)[i],*(__m128i *)&conjugate2[0]);
+      ((simde__m128i *)z10)[i]=simde_mm_sign_epi16(((simde__m128i *)z10)[i],*(simde__m128i *)&conjugate2[0]);
+      ((simde__m128i *)z11)[i]=simde_mm_sign_epi16(((simde__m128i *)z11)[i],*(simde__m128i *)&conjugate2[0]);
     }
-
-#elif defined(__arm__) || defined(__aarch64__)
-    * &(((int16x8_t *)z0)[i])=vmulq_s16( *&(((int16x8_t *)z0)[i]),*(int16x8_t *)&conjugate2[0]);
-    * &(((int16x8_t *)z1)[i])=vmulq_s16( *&(((int16x8_t *)z1)[i]),*(int16x8_t *)&conjugate2[0]);
-    * &(((int16x8_t *)z2)[i])=vmulq_s16( *&(((int16x8_t *)z2)[i]),*(int16x8_t *)&conjugate2[0]);
-    * &(((int16x8_t *)z3)[i])=vmulq_s16( *&(((int16x8_t *)z3)[i]),*(int16x8_t *)&conjugate2[0]);
-    * &(((int16x8_t *)z4)[i])=vmulq_s16( *&(((int16x8_t *)z4)[i]),*(int16x8_t *)&conjugate2[0]);
-    * &(((int16x8_t *)z5)[i])=vmulq_s16( *&(((int16x8_t *)z5)[i]),*(int16x8_t *)&conjugate2[0]);
-    * &(((int16x8_t *)z6)[i])=vmulq_s16( *&(((int16x8_t *)z6)[i]),*(int16x8_t *)&conjugate2[0]);
-    * &(((int16x8_t *)z7)[i])=vmulq_s16( *&(((int16x8_t *)z7)[i]),*(int16x8_t *)&conjugate2[0]);
-    * &(((int16x8_t *)z8)[i])=vmulq_s16( *&(((int16x8_t *)z8)[i]),*(int16x8_t *)&conjugate2[0]);
-    * &(((int16x8_t *)z9)[i])=vmulq_s16( *&(((int16x8_t *)z9)[i]),*(int16x8_t *)&conjugate2[0]);
-
-    if (frame_parms->Ncp==NORMAL) {
-      * &(((int16x8_t *)z10)[i])=vmulq_s16( *&(((int16x8_t *)z10)[i]),*(int16x8_t *)&conjugate2[0]);
-      * &(((int16x8_t *)z11)[i])=vmulq_s16( *&(((int16x8_t *)z11)[i]),*(int16x8_t *)&conjugate2[0]);
-    }
-
-#endif
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 
@@ -465,13 +407,8 @@ int32_t ulsch_qpsk_llr(LTE_DL_FRAME_PARMS *frame_parms,
                        uint8_t symbol,
                        uint16_t nb_rb,
                        int16_t **llrp) {
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *rxF=(__m128i *)&rxdataF_comp[0][(symbol*frame_parms->N_RB_DL*12)];
-  __m128i **llrp128 = (__m128i **)llrp;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *rxF= (int16x8_t *)&rxdataF_comp[0][(symbol*frame_parms->N_RB_DL*12)];
-  int16x8_t **llrp128 = (int16x8_t **)llrp;
-#endif
+  simde__m128i *rxF=(simde__m128i *)&rxdataF_comp[0][(symbol*frame_parms->N_RB_DL*12)];
+  simde__m128i **llrp128 = (simde__m128i **)llrp;
   int i;
 
   for (i=0; i<(nb_rb*3); i++) {
@@ -480,10 +417,8 @@ int32_t ulsch_qpsk_llr(LTE_DL_FRAME_PARMS *frame_parms,
     (*llrp128)++;
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
   return(0);
 }
 
@@ -495,56 +430,24 @@ void ulsch_16qam_llr(LTE_DL_FRAME_PARMS *frame_parms,
                      uint16_t nb_rb,
                      int16_t **llrp) {
   int i;
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *rxF=(__m128i *)&rxdataF_comp[0][(symbol*frame_parms->N_RB_DL*12)];
-  __m128i *ch_mag;
-  __m128i mmtmpU0;
-  __m128i **llrp128=(__m128i **)llrp;
-  ch_mag =(__m128i *)&ul_ch_mag[0][(symbol*frame_parms->N_RB_DL*12)];
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *rxF=(int16x8_t *)&rxdataF_comp[0][(symbol*frame_parms->N_RB_DL*12)];
-  int16x8_t *ch_mag;
-  int16x8_t xmm0;
-  int16_t **llrp16=llrp;
-  ch_mag =(int16x8_t *)&ul_ch_mag[0][(symbol*frame_parms->N_RB_DL*12)];
-#endif
+  simde__m128i *rxF=(simde__m128i *)&rxdataF_comp[0][(symbol*frame_parms->N_RB_DL*12)];
+  simde__m128i *ch_mag;
+  simde__m128i mmtmpU0;
+  simde__m128i **llrp128=(simde__m128i **)llrp;
+  ch_mag =(simde__m128i *)&ul_ch_mag[0][(symbol*frame_parms->N_RB_DL*12)];
 
   for (i=0; i<(nb_rb*3); i++) {
-#if defined(__x86_64__) || defined(__i386__)
-    mmtmpU0 = _mm_abs_epi16(rxF[i]);
-    mmtmpU0 = _mm_subs_epi16(ch_mag[i],mmtmpU0);
-    (*llrp128)[0] = _mm_unpacklo_epi32(rxF[i],mmtmpU0);
-    (*llrp128)[1] = _mm_unpackhi_epi32(rxF[i],mmtmpU0);
+    mmtmpU0 = simde_mm_abs_epi16(rxF[i]);
+    mmtmpU0 = simde_mm_subs_epi16(ch_mag[i],mmtmpU0);
+    (*llrp128)[0] = simde_mm_unpacklo_epi32(rxF[i],mmtmpU0);
+    (*llrp128)[1] = simde_mm_unpackhi_epi32(rxF[i],mmtmpU0);
     (*llrp128)+=2;
-#elif defined(__arm__) || defined(__aarch64__)
-    xmm0 = vabsq_s16(rxF[i]);
-    xmm0 = vqsubq_s16(ch_mag[i],xmm0);
-    (*llrp16)[0] = vgetq_lane_s16(rxF[i],0);
-    (*llrp16)[1] = vgetq_lane_s16(xmm0,0);
-    (*llrp16)[2] = vgetq_lane_s16(rxF[i],1);
-    (*llrp16)[3] = vgetq_lane_s16(xmm0,1);
-    (*llrp16)[4] = vgetq_lane_s16(rxF[i],2);
-    (*llrp16)[5] = vgetq_lane_s16(xmm0,2);
-    (*llrp16)[6] = vgetq_lane_s16(rxF[i],2);
-    (*llrp16)[7] = vgetq_lane_s16(xmm0,3);
-    (*llrp16)[8] = vgetq_lane_s16(rxF[i],4);
-    (*llrp16)[9] = vgetq_lane_s16(xmm0,4);
-    (*llrp16)[10] = vgetq_lane_s16(rxF[i],5);
-    (*llrp16)[11] = vgetq_lane_s16(xmm0,5);
-    (*llrp16)[12] = vgetq_lane_s16(rxF[i],6);
-    (*llrp16)[13] = vgetq_lane_s16(xmm0,6);
-    (*llrp16)[14] = vgetq_lane_s16(rxF[i],7);
-    (*llrp16)[15] = vgetq_lane_s16(xmm0,7);
-    (*llrp16)+=16;
-#endif
     //    print_bytes("rxF[i]",&rxF[i]);
     //    print_bytes("rxF[i+1]",&rxF[i+1]);
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void ulsch_64qam_llr(LTE_DL_FRAME_PARMS *frame_parms,
@@ -557,67 +460,38 @@ void ulsch_64qam_llr(LTE_DL_FRAME_PARMS *frame_parms,
                      int16_t **llrp) {
   int i;
   int32_t **llrp32=(int32_t **)llrp;
-#if defined(__x86_64__) || defined(__i386)
-  __m128i *rxF=(__m128i *)&rxdataF_comp[0][(symbol*frame_parms->N_RB_DL*12)];
-  __m128i *ch_mag,*ch_magb;
-  __m128i mmtmpU1,mmtmpU2;
-  ch_mag =(__m128i *)&ul_ch_mag[0][(symbol*frame_parms->N_RB_DL*12)];
-  ch_magb =(__m128i *)&ul_ch_magb[0][(symbol*frame_parms->N_RB_DL*12)];
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *rxF=(int16x8_t *)&rxdataF_comp[0][(symbol*frame_parms->N_RB_DL*12)];
-  int16x8_t *ch_mag,*ch_magb;
-  int16x8_t mmtmpU1,mmtmpU2;
-  ch_mag =(int16x8_t *)&ul_ch_mag[0][(symbol*frame_parms->N_RB_DL*12)];
-  ch_magb =(int16x8_t *)&ul_ch_magb[0][(symbol*frame_parms->N_RB_DL*12)];
-#endif
+  simde__m128i *rxF=(simde__m128i *)&rxdataF_comp[0][(symbol*frame_parms->N_RB_DL*12)];
+  simde__m128i *ch_mag,*ch_magb;
+  simde__m128i mmtmpU1,mmtmpU2;
+  ch_mag =(simde__m128i *)&ul_ch_mag[0][(symbol*frame_parms->N_RB_DL*12)];
+  ch_magb =(simde__m128i *)&ul_ch_magb[0][(symbol*frame_parms->N_RB_DL*12)];
 
   if(LOG_DEBUGFLAG(DEBUG_ULSCH)) {
-    LOG_UI(PHY,"symbol %d: mag %d, magb %d\n",symbol,_mm_extract_epi16(ch_mag[0],0),_mm_extract_epi16(ch_magb[0],0));
+    LOG_UI(PHY,"symbol %d: mag %d, magb %d\n",symbol,simde_mm_extract_epi16(ch_mag[0],0),simde_mm_extract_epi16(ch_magb[0],0));
   }
 
   for (i=0; i<(nb_rb*3); i++) {
-#if defined(__x86_64__) || defined(__i386__)
-    mmtmpU1 = _mm_abs_epi16(rxF[i]);
-    mmtmpU1  = _mm_subs_epi16(ch_mag[i],mmtmpU1);
-    mmtmpU2 = _mm_abs_epi16(mmtmpU1);
-    mmtmpU2 = _mm_subs_epi16(ch_magb[i],mmtmpU2);
-    (*llrp32)[0]  = _mm_extract_epi32(rxF[i],0);
-    (*llrp32)[1]  = _mm_extract_epi32(mmtmpU1,0);
-    (*llrp32)[2]  = _mm_extract_epi32(mmtmpU2,0);
-    (*llrp32)[3]  = _mm_extract_epi32(rxF[i],1);
-    (*llrp32)[4]  = _mm_extract_epi32(mmtmpU1,1);
-    (*llrp32)[5]  = _mm_extract_epi32(mmtmpU2,1);
-    (*llrp32)[6]  = _mm_extract_epi32(rxF[i],2);
-    (*llrp32)[7]  = _mm_extract_epi32(mmtmpU1,2);
-    (*llrp32)[8]  = _mm_extract_epi32(mmtmpU2,2);
-    (*llrp32)[9]  = _mm_extract_epi32(rxF[i],3);
-    (*llrp32)[10] = _mm_extract_epi32(mmtmpU1,3);
-    (*llrp32)[11] = _mm_extract_epi32(mmtmpU2,3);
-#elif defined(__arm__) || defined(__aarch64__)
-    mmtmpU1 = vabsq_s16(rxF[i]);
-    mmtmpU1 = vqsubq_s16(ch_mag[i],mmtmpU1);
-    mmtmpU2 = vabsq_s16(mmtmpU1);
-    mmtmpU2 = vqsubq_s16(ch_magb[i],mmtmpU2);
-    (*llrp32)[0]  = vgetq_lane_s32((int32x4_t)rxF[i],0);
-    (*llrp32)[1]  = vgetq_lane_s32((int32x4_t)mmtmpU1,0);
-    (*llrp32)[2]  = vgetq_lane_s32((int32x4_t)mmtmpU2,0);
-    (*llrp32)[3]  = vgetq_lane_s32((int32x4_t)rxF[i],1);
-    (*llrp32)[4]  = vgetq_lane_s32((int32x4_t)mmtmpU1,1);
-    (*llrp32)[5]  = vgetq_lane_s32((int32x4_t)mmtmpU2,1);
-    (*llrp32)[6]  = vgetq_lane_s32((int32x4_t)rxF[i],2);
-    (*llrp32)[7]  = vgetq_lane_s32((int32x4_t)mmtmpU1,2);
-    (*llrp32)[8]  = vgetq_lane_s32((int32x4_t)mmtmpU2,2);
-    (*llrp32)[9]  = vgetq_lane_s32((int32x4_t)rxF[i],3);
-    (*llrp32)[10] = vgetq_lane_s32((int32x4_t)mmtmpU1,3);
-    (*llrp32)[11] = vgetq_lane_s32((int32x4_t)mmtmpU2,3);
-#endif
+    mmtmpU1 = simde_mm_abs_epi16(rxF[i]);
+    mmtmpU1  = simde_mm_subs_epi16(ch_mag[i],mmtmpU1);
+    mmtmpU2 = simde_mm_abs_epi16(mmtmpU1);
+    mmtmpU2 = simde_mm_subs_epi16(ch_magb[i],mmtmpU2);
+    (*llrp32)[0]  = simde_mm_extract_epi32(rxF[i],0);
+    (*llrp32)[1]  = simde_mm_extract_epi32(mmtmpU1,0);
+    (*llrp32)[2]  = simde_mm_extract_epi32(mmtmpU2,0);
+    (*llrp32)[3]  = simde_mm_extract_epi32(rxF[i],1);
+    (*llrp32)[4]  = simde_mm_extract_epi32(mmtmpU1,1);
+    (*llrp32)[5]  = simde_mm_extract_epi32(mmtmpU2,1);
+    (*llrp32)[6]  = simde_mm_extract_epi32(rxF[i],2);
+    (*llrp32)[7]  = simde_mm_extract_epi32(mmtmpU1,2);
+    (*llrp32)[8]  = simde_mm_extract_epi32(mmtmpU2,2);
+    (*llrp32)[9]  = simde_mm_extract_epi32(rxF[i],3);
+    (*llrp32)[10] = simde_mm_extract_epi32(mmtmpU1,3);
+    (*llrp32)[11] = simde_mm_extract_epi32(mmtmpU2,3);
     (*llrp32)+=12;
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void ulsch_detection_mrc(LTE_DL_FRAME_PARMS *frame_parms,
@@ -626,83 +500,56 @@ void ulsch_detection_mrc(LTE_DL_FRAME_PARMS *frame_parms,
                          int32_t **ul_ch_magb,
                          uint8_t symbol,
                          uint16_t nb_rb) {
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *rxdataF_comp128_0=NULL,*ul_ch_mag128_0=NULL,*ul_ch_mag128_0b=NULL;
-  __m128i *rxdataF_comp128_1=NULL,*ul_ch_mag128_1=NULL,*ul_ch_mag128_1b=NULL;
-  __m128i *rxdataF_comp128_2=NULL,*ul_ch_mag128_2=NULL,*ul_ch_mag128_2b=NULL;
-  __m128i *rxdataF_comp128_3=NULL,*ul_ch_mag128_3=NULL,*ul_ch_mag128_3b=NULL;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *rxdataF_comp128_0,*ul_ch_mag128_0,*ul_ch_mag128_0b;
-  int16x8_t *rxdataF_comp128_1,*ul_ch_mag128_1,*ul_ch_mag128_1b;
-  int16x8_t *rxdataF_comp128_2,*ul_ch_mag128_2,*ul_ch_mag128_2b;
-  int16x8_t *rxdataF_comp128_3,*ul_ch_mag128_3,*ul_ch_mag128_3b;
-#endif
+  simde__m128i *rxdataF_comp128_0=NULL,*ul_ch_mag128_0=NULL,*ul_ch_mag128_0b=NULL;
+  simde__m128i *rxdataF_comp128_1=NULL,*ul_ch_mag128_1=NULL,*ul_ch_mag128_1b=NULL;
+  simde__m128i *rxdataF_comp128_2=NULL,*ul_ch_mag128_2=NULL,*ul_ch_mag128_2b=NULL;
+  simde__m128i *rxdataF_comp128_3=NULL,*ul_ch_mag128_3=NULL,*ul_ch_mag128_3b=NULL;
   int32_t i;
 
   if (frame_parms->nb_antennas_rx>1) {
-#if defined(__x86_64__) || defined(__i386__)
-    rxdataF_comp128_0   = (__m128i *)&rxdataF_comp[0][symbol*frame_parms->N_RB_DL*12];
-    rxdataF_comp128_1   = (__m128i *)&rxdataF_comp[1][symbol*frame_parms->N_RB_DL*12];
-    ul_ch_mag128_0      = (__m128i *)&ul_ch_mag[0][symbol*frame_parms->N_RB_DL*12];
-    ul_ch_mag128_1      = (__m128i *)&ul_ch_mag[1][symbol*frame_parms->N_RB_DL*12];
-    ul_ch_mag128_0b     = (__m128i *)&ul_ch_magb[0][symbol*frame_parms->N_RB_DL*12];
-    ul_ch_mag128_1b     = (__m128i *)&ul_ch_magb[1][symbol*frame_parms->N_RB_DL*12];
+    rxdataF_comp128_0   = (simde__m128i *)&rxdataF_comp[0][symbol*frame_parms->N_RB_DL*12];
+    rxdataF_comp128_1   = (simde__m128i *)&rxdataF_comp[1][symbol*frame_parms->N_RB_DL*12];
+    ul_ch_mag128_0      = (simde__m128i *)&ul_ch_mag[0][symbol*frame_parms->N_RB_DL*12];
+    ul_ch_mag128_1      = (simde__m128i *)&ul_ch_mag[1][symbol*frame_parms->N_RB_DL*12];
+    ul_ch_mag128_0b     = (simde__m128i *)&ul_ch_magb[0][symbol*frame_parms->N_RB_DL*12];
+    ul_ch_mag128_1b     = (simde__m128i *)&ul_ch_magb[1][symbol*frame_parms->N_RB_DL*12];
     if (frame_parms->nb_antennas_rx>2) { 
-      rxdataF_comp128_2   = (__m128i *)&rxdataF_comp[2][symbol*frame_parms->N_RB_DL*12];
-      ul_ch_mag128_2      = (__m128i *)&ul_ch_mag[2][symbol*frame_parms->N_RB_DL*12];
-      ul_ch_mag128_2b     = (__m128i *)&ul_ch_magb[2][symbol*frame_parms->N_RB_DL*12];
+      rxdataF_comp128_2   = (simde__m128i *)&rxdataF_comp[2][symbol*frame_parms->N_RB_DL*12];
+      ul_ch_mag128_2      = (simde__m128i *)&ul_ch_mag[2][symbol*frame_parms->N_RB_DL*12];
+      ul_ch_mag128_2b     = (simde__m128i *)&ul_ch_magb[2][symbol*frame_parms->N_RB_DL*12];
     }
     if (frame_parms->nb_antennas_rx>3) { 
-      rxdataF_comp128_3   = (__m128i *)&rxdataF_comp[3][symbol*frame_parms->N_RB_DL*12];
-      ul_ch_mag128_3      = (__m128i *)&ul_ch_mag[3][symbol*frame_parms->N_RB_DL*12];
-      ul_ch_mag128_3b     = (__m128i *)&ul_ch_magb[3][symbol*frame_parms->N_RB_DL*12];
+      rxdataF_comp128_3   = (simde__m128i *)&rxdataF_comp[3][symbol*frame_parms->N_RB_DL*12];
+      ul_ch_mag128_3      = (simde__m128i *)&ul_ch_mag[3][symbol*frame_parms->N_RB_DL*12];
+      ul_ch_mag128_3b     = (simde__m128i *)&ul_ch_magb[3][symbol*frame_parms->N_RB_DL*12];
     }
 
     // MRC on each re of rb, both on MF output and magnitude (for 16QAM/64QAM llr computation)
     if (frame_parms->nb_antennas_rx==2) 
       for (i=0; i<nb_rb*3; i++) {
-        rxdataF_comp128_0[i] = _mm_srai_epi16(_mm_adds_epi16(rxdataF_comp128_0[i],rxdataF_comp128_1[i]),1);
-        ul_ch_mag128_0[i]    = _mm_srai_epi16(_mm_adds_epi16(ul_ch_mag128_0[i],ul_ch_mag128_1[i]),1);
-        ul_ch_mag128_0b[i]   = _mm_srai_epi16(_mm_adds_epi16(ul_ch_mag128_0b[i],ul_ch_mag128_1b[i]),1);
-        rxdataF_comp128_0[i] = _mm_add_epi16(rxdataF_comp128_0[i],(*(__m128i *)&jitterc[0]));
+        rxdataF_comp128_0[i] = simde_mm_srai_epi16(simde_mm_adds_epi16(rxdataF_comp128_0[i],rxdataF_comp128_1[i]),1);
+        ul_ch_mag128_0[i]    = simde_mm_srai_epi16(simde_mm_adds_epi16(ul_ch_mag128_0[i],ul_ch_mag128_1[i]),1);
+        ul_ch_mag128_0b[i]   = simde_mm_srai_epi16(simde_mm_adds_epi16(ul_ch_mag128_0b[i],ul_ch_mag128_1b[i]),1);
+        rxdataF_comp128_0[i] = simde_mm_add_epi16(rxdataF_comp128_0[i],(*(simde__m128i *)&jitterc[0]));
       }
     if (frame_parms->nb_antennas_rx==3)
       for (i=0; i<nb_rb*3; i++) {
-        rxdataF_comp128_0[i] = _mm_srai_epi16(_mm_adds_epi16(rxdataF_comp128_0[i],_mm_adds_epi16(rxdataF_comp128_1[i],rxdataF_comp128_2[i])),1);
-        ul_ch_mag128_0[i]    = _mm_srai_epi16(_mm_adds_epi16(ul_ch_mag128_0[i],_mm_adds_epi16(ul_ch_mag128_1[i],ul_ch_mag128_2[i])),1);
-        ul_ch_mag128_0b[i]   = _mm_srai_epi16(_mm_adds_epi16(ul_ch_mag128_0b[i],_mm_adds_epi16(ul_ch_mag128_1b[i],ul_ch_mag128_2b[i])),1);
-        rxdataF_comp128_0[i] = _mm_add_epi16(rxdataF_comp128_0[i],(*(__m128i *)&jitterc[0]));
+        rxdataF_comp128_0[i] = simde_mm_srai_epi16(simde_mm_adds_epi16(rxdataF_comp128_0[i],simde_mm_adds_epi16(rxdataF_comp128_1[i],rxdataF_comp128_2[i])),1);
+        ul_ch_mag128_0[i]    = simde_mm_srai_epi16(simde_mm_adds_epi16(ul_ch_mag128_0[i],simde_mm_adds_epi16(ul_ch_mag128_1[i],ul_ch_mag128_2[i])),1);
+        ul_ch_mag128_0b[i]   = simde_mm_srai_epi16(simde_mm_adds_epi16(ul_ch_mag128_0b[i],simde_mm_adds_epi16(ul_ch_mag128_1b[i],ul_ch_mag128_2b[i])),1);
+        rxdataF_comp128_0[i] = simde_mm_add_epi16(rxdataF_comp128_0[i],(*(simde__m128i *)&jitterc[0]));
       }
      if (frame_parms->nb_antennas_rx==4)
       for (i=0; i<nb_rb*3; i++) {
-        rxdataF_comp128_0[i] = _mm_srai_epi16(_mm_adds_epi16(rxdataF_comp128_0[i],_mm_adds_epi16(rxdataF_comp128_1[i],_mm_adds_epi16(rxdataF_comp128_2[i],rxdataF_comp128_3[i]))),2);
-        ul_ch_mag128_0[i]    = _mm_srai_epi16(_mm_adds_epi16(ul_ch_mag128_0[i],_mm_adds_epi16(ul_ch_mag128_1[i],_mm_adds_epi16(ul_ch_mag128_2[i],ul_ch_mag128_3[i]))),2);
-        ul_ch_mag128_0b[i]   = _mm_srai_epi16(_mm_adds_epi16(ul_ch_mag128_0b[i],_mm_adds_epi16(ul_ch_mag128_1b[i],_mm_adds_epi16(ul_ch_mag128_2b[i],ul_ch_mag128_3b[i]))),2);
-        rxdataF_comp128_0[i] = _mm_add_epi16(rxdataF_comp128_0[i],(*(__m128i *)&jitterc[0]));
+        rxdataF_comp128_0[i] = simde_mm_srai_epi16(simde_mm_adds_epi16(rxdataF_comp128_0[i],simde_mm_adds_epi16(rxdataF_comp128_1[i],simde_mm_adds_epi16(rxdataF_comp128_2[i],rxdataF_comp128_3[i]))),2);
+        ul_ch_mag128_0[i]    = simde_mm_srai_epi16(simde_mm_adds_epi16(ul_ch_mag128_0[i],simde_mm_adds_epi16(ul_ch_mag128_1[i],simde_mm_adds_epi16(ul_ch_mag128_2[i],ul_ch_mag128_3[i]))),2);
+        ul_ch_mag128_0b[i]   = simde_mm_srai_epi16(simde_mm_adds_epi16(ul_ch_mag128_0b[i],simde_mm_adds_epi16(ul_ch_mag128_1b[i],simde_mm_adds_epi16(ul_ch_mag128_2b[i],ul_ch_mag128_3b[i]))),2);
+        rxdataF_comp128_0[i] = simde_mm_add_epi16(rxdataF_comp128_0[i],(*(simde__m128i *)&jitterc[0]));
       }
-#elif defined(__arm__) || defined(__aarch64__)
-    rxdataF_comp128_0   = (int16x8_t *)&rxdataF_comp[0][symbol*frame_parms->N_RB_DL*12];
-    rxdataF_comp128_1   = (int16x8_t *)&rxdataF_comp[1][symbol*frame_parms->N_RB_DL*12];
-    ul_ch_mag128_0      = (int16x8_t *)&ul_ch_mag[0][symbol*frame_parms->N_RB_DL*12];
-    ul_ch_mag128_1      = (int16x8_t *)&ul_ch_mag[1][symbol*frame_parms->N_RB_DL*12];
-    ul_ch_mag128_0b     = (int16x8_t *)&ul_ch_magb[0][symbol*frame_parms->N_RB_DL*12];
-    ul_ch_mag128_1b     = (int16x8_t *)&ul_ch_magb[1][symbol*frame_parms->N_RB_DL*12];
-
-    // MRC on each re of rb, both on MF output and magnitude (for 16QAM/64QAM llr computation)
-    for (i=0; i<nb_rb*3; i++) {
-      rxdataF_comp128_0[i] = vhaddq_s16(rxdataF_comp128_0[i],rxdataF_comp128_1[i]);
-      ul_ch_mag128_0[i]    = vhaddq_s16(ul_ch_mag128_0[i],ul_ch_mag128_1[i]);
-      ul_ch_mag128_0b[i]   = vhaddq_s16(ul_ch_mag128_0b[i],ul_ch_mag128_1b[i]);
-      rxdataF_comp128_0[i] = vqaddq_s16(rxdataF_comp128_0[i],(*(int16x8_t *)&jitterc[0]));
-    }
-
-#endif
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void ulsch_extract_rbs_single(int32_t **rxdataF,
@@ -778,86 +625,50 @@ void ulsch_channel_compensation(int32_t **rxdataF_ext,
                                 uint16_t nb_rb,
                                 uint8_t output_shift) {
   uint16_t rb;
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *ul_ch128,*ul_ch_mag128,*rxdataF128,*rxdataF_comp128;
-  uint8_t aarx;//,symbol_mod;
-  __m128i mmtmpU0,mmtmpU1,mmtmpU2,mmtmpU3;
 
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x4_t *ul_ch128,*rxdataF128;
-  int16x8_t *ul_ch_mag128,*rxdataF_comp128;
+  simde__m128i *ul_ch128, *ul_ch_mag128, *rxdataF128, *rxdataF_comp128;
   uint8_t aarx;//,symbol_mod;
-  int32x4_t mmtmpU0,mmtmpU1,mmtmpU0b,mmtmpU1b;
-  int16_t conj[4]__attribute__((aligned(16))) = {1,-1,1,-1};
-  int32x4_t output_shift128 = vmovq_n_s32(-(int32_t)output_shift);
-
-#endif
+  simde__m128i mmtmpU0,mmtmpU1,mmtmpU2,mmtmpU3;
 
   for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-#if defined(__x86_64__) || defined(__i386__)
-    ul_ch128          = (__m128i *)&ul_ch_estimates_ext[aarx][symbol*frame_parms->N_RB_DL*12];
-    ul_ch_mag128      = (__m128i *)&ul_ch_mag[aarx][symbol*frame_parms->N_RB_DL*12];
-    rxdataF128        = (__m128i *)&rxdataF_ext[aarx][symbol*frame_parms->N_RB_DL*12];
-    rxdataF_comp128   = (__m128i *)&rxdataF_comp[aarx][symbol*frame_parms->N_RB_DL*12];
-#elif defined(__arm__) || defined(__aarch64__)
-    ul_ch128          = (int16x4_t *)&ul_ch_estimates_ext[aarx][symbol*frame_parms->N_RB_DL*12];
-    ul_ch_mag128      = (int16x8_t *)&ul_ch_mag[aarx][symbol*frame_parms->N_RB_DL*12];
-    rxdataF128        = (int16x4_t *)&rxdataF_ext[aarx][symbol*frame_parms->N_RB_DL*12];
-    rxdataF_comp128   = (int16x8_t *)&rxdataF_comp[aarx][symbol*frame_parms->N_RB_DL*12];
-#endif
+    ul_ch128          = (simde__m128i *)&ul_ch_estimates_ext[aarx][symbol*frame_parms->N_RB_DL*12];
+    ul_ch_mag128      = (simde__m128i *)&ul_ch_mag[aarx][symbol*frame_parms->N_RB_DL*12];
+    rxdataF128        = (simde__m128i *)&rxdataF_ext[aarx][symbol*frame_parms->N_RB_DL*12];
+    rxdataF_comp128   = (simde__m128i *)&rxdataF_comp[aarx][symbol*frame_parms->N_RB_DL*12];
 
     for (rb=0; rb<nb_rb; rb++) {
       // just compute channel magnitude without scaling, this is done after equalization for SC-FDMA
-#if defined(__x86_64__) || defined(__i386__)
-      mmtmpU0 = _mm_madd_epi16(ul_ch128[0],ul_ch128[0]);
-      mmtmpU0 = _mm_srai_epi32(mmtmpU0,output_shift);
-      mmtmpU1 = _mm_madd_epi16(ul_ch128[1],ul_ch128[1]);
-      mmtmpU1 = _mm_srai_epi32(mmtmpU1,output_shift);
-      mmtmpU0 = _mm_packs_epi32(mmtmpU0,mmtmpU1);
-      ul_ch_mag128[0] = _mm_unpacklo_epi16(mmtmpU0,mmtmpU0);
-      ul_ch_mag128[1] = _mm_unpackhi_epi16(mmtmpU0,mmtmpU0);
-      mmtmpU0 = _mm_madd_epi16(ul_ch128[2],ul_ch128[2]);
-      mmtmpU0 = _mm_srai_epi32(mmtmpU0,output_shift);
-      mmtmpU1 = _mm_packs_epi32(mmtmpU0,mmtmpU0);
-      ul_ch_mag128[2] = _mm_unpacklo_epi16(mmtmpU1,mmtmpU1);
+      mmtmpU0 = simde_mm_madd_epi16(ul_ch128[0],ul_ch128[0]);
+      mmtmpU0 = simde_mm_srai_epi32(mmtmpU0,output_shift);
+      mmtmpU1 = simde_mm_madd_epi16(ul_ch128[1],ul_ch128[1]);
+      mmtmpU1 = simde_mm_srai_epi32(mmtmpU1,output_shift);
+      mmtmpU0 = simde_mm_packs_epi32(mmtmpU0,mmtmpU1);
+      ul_ch_mag128[0] = simde_mm_unpacklo_epi16(mmtmpU0,mmtmpU0);
+      ul_ch_mag128[1] = simde_mm_unpackhi_epi16(mmtmpU0,mmtmpU0);
+      mmtmpU0 = simde_mm_madd_epi16(ul_ch128[2],ul_ch128[2]);
+      mmtmpU0 = simde_mm_srai_epi32(mmtmpU0,output_shift);
+      mmtmpU1 = simde_mm_packs_epi32(mmtmpU0,mmtmpU0);
+      ul_ch_mag128[2] = simde_mm_unpacklo_epi16(mmtmpU1,mmtmpU1);
       //LOG_I(PHY,"comp: ant %d symbol %d rb %d => %d,%d,%d (output_shift %d)\n",aarx,symbol,rb,*((int16_t *)&ul_ch_mag128[0]),*((int16_t *)&ul_ch_mag128[1]),*((int16_t *)&ul_ch_mag128[2]),output_shift);
-#elif defined(__arm__) || defined(__aarch64__)
-      mmtmpU0 = vmull_s16(ul_ch128[0], ul_ch128[0]);
-      mmtmpU0 = vqshlq_s32(vqaddq_s32(mmtmpU0,vrev64q_s32(mmtmpU0)),-output_shift128);
-      mmtmpU1 = vmull_s16(ul_ch128[1], ul_ch128[1]);
-      mmtmpU1 = vqshlq_s32(vqaddq_s32(mmtmpU1,vrev64q_s32(mmtmpU1)),-output_shift128);
-      ul_ch_mag128[0] = vcombine_s16(vmovn_s32(mmtmpU0),vmovn_s32(mmtmpU1));
-      mmtmpU0 = vmull_s16(ul_ch128[2], ul_ch128[2]);
-      mmtmpU0 = vqshlq_s32(vqaddq_s32(mmtmpU0,vrev64q_s32(mmtmpU0)),-output_shift128);
-      mmtmpU1 = vmull_s16(ul_ch128[3], ul_ch128[3]);
-      mmtmpU1 = vqshlq_s32(vqaddq_s32(mmtmpU1,vrev64q_s32(mmtmpU1)),-output_shift128);
-      ul_ch_mag128[1] = vcombine_s16(vmovn_s32(mmtmpU0),vmovn_s32(mmtmpU1));
-      mmtmpU0 = vmull_s16(ul_ch128[4], ul_ch128[4]);
-      mmtmpU0 = vqshlq_s32(vqaddq_s32(mmtmpU0,vrev64q_s32(mmtmpU0)),-output_shift128);
-      mmtmpU1 = vmull_s16(ul_ch128[5], ul_ch128[5]);
-      mmtmpU1 = vqshlq_s32(vqaddq_s32(mmtmpU1,vrev64q_s32(mmtmpU1)),-output_shift128);
-      ul_ch_mag128[2] = vcombine_s16(vmovn_s32(mmtmpU0),vmovn_s32(mmtmpU1));
-#endif
-#if defined(__x86_64__) || defined(__i386__)
       // multiply by conjugated channel
-      mmtmpU0 = _mm_madd_epi16(ul_ch128[0],rxdataF128[0]);
+      mmtmpU0 = simde_mm_madd_epi16(ul_ch128[0],rxdataF128[0]);
       //        print_ints("re",&mmtmpU0);
       // mmtmpU0 contains real part of 4 consecutive outputs (32-bit)
-      mmtmpU1 = _mm_shufflelo_epi16(ul_ch128[0],_MM_SHUFFLE(2,3,0,1));
-      mmtmpU1 = _mm_shufflehi_epi16(mmtmpU1,_MM_SHUFFLE(2,3,0,1));
-      mmtmpU1 = _mm_sign_epi16(mmtmpU1,*(__m128i *)&conjugate[0]);
-      mmtmpU1 = _mm_madd_epi16(mmtmpU1,rxdataF128[0]);
+      mmtmpU1 = simde_mm_shufflelo_epi16(ul_ch128[0],SIMDE_MM_SHUFFLE(2,3,0,1));
+      mmtmpU1 = simde_mm_shufflehi_epi16(mmtmpU1,SIMDE_MM_SHUFFLE(2,3,0,1));
+      mmtmpU1 = simde_mm_sign_epi16(mmtmpU1,*(simde__m128i *)&conjugate[0]);
+      mmtmpU1 = simde_mm_madd_epi16(mmtmpU1,rxdataF128[0]);
       //      print_ints("im",&mmtmpU1);
       // mmtmpU1 contains imag part of 4 consecutive outputs (32-bit)
-      mmtmpU0 = _mm_srai_epi32(mmtmpU0,output_shift);
+      mmtmpU0 = simde_mm_srai_epi32(mmtmpU0,output_shift);
       //  print_ints("re(shift)",&mmtmpU0);
-      mmtmpU1 = _mm_srai_epi32(mmtmpU1,output_shift);
+      mmtmpU1 = simde_mm_srai_epi32(mmtmpU1,output_shift);
       //  print_ints("im(shift)",&mmtmpU1);
-      mmtmpU2 = _mm_unpacklo_epi32(mmtmpU0,mmtmpU1);
-      mmtmpU3 = _mm_unpackhi_epi32(mmtmpU0,mmtmpU1);
+      mmtmpU2 = simde_mm_unpacklo_epi32(mmtmpU0,mmtmpU1);
+      mmtmpU3 = simde_mm_unpackhi_epi32(mmtmpU0,mmtmpU1);
       //        print_ints("c0",&mmtmpU2);
       //  print_ints("c1",&mmtmpU3);
-      rxdataF_comp128[0] = _mm_packs_epi32(mmtmpU2,mmtmpU3);
+      rxdataF_comp128[0] = simde_mm_packs_epi32(mmtmpU2,mmtmpU3);
       /*
               LOG_I(PHY,"Antenna %d:",aarx); 
               print_shorts("rx:",&rxdataF128[0]);
@@ -865,18 +676,18 @@ void ulsch_channel_compensation(int32_t **rxdataF_ext,
               print_shorts("pack:",&rxdataF_comp128[0]);
       */
       // multiply by conjugated channel
-      mmtmpU0 = _mm_madd_epi16(ul_ch128[1],rxdataF128[1]);
+      mmtmpU0 = simde_mm_madd_epi16(ul_ch128[1],rxdataF128[1]);
       // mmtmpU0 contains real part of 4 consecutive outputs (32-bit)
-      mmtmpU1 = _mm_shufflelo_epi16(ul_ch128[1],_MM_SHUFFLE(2,3,0,1));
-      mmtmpU1 = _mm_shufflehi_epi16(mmtmpU1,_MM_SHUFFLE(2,3,0,1));
-      mmtmpU1 = _mm_sign_epi16(mmtmpU1,*(__m128i *)conjugate);
-      mmtmpU1 = _mm_madd_epi16(mmtmpU1,rxdataF128[1]);
+      mmtmpU1 = simde_mm_shufflelo_epi16(ul_ch128[1],SIMDE_MM_SHUFFLE(2,3,0,1));
+      mmtmpU1 = simde_mm_shufflehi_epi16(mmtmpU1,SIMDE_MM_SHUFFLE(2,3,0,1));
+      mmtmpU1 = simde_mm_sign_epi16(mmtmpU1,*(simde__m128i *)conjugate);
+      mmtmpU1 = simde_mm_madd_epi16(mmtmpU1,rxdataF128[1]);
       // mmtmpU1 contains imag part of 4 consecutive outputs (32-bit)
-      mmtmpU0 = _mm_srai_epi32(mmtmpU0,output_shift);
-      mmtmpU1 = _mm_srai_epi32(mmtmpU1,output_shift);
-      mmtmpU2 = _mm_unpacklo_epi32(mmtmpU0,mmtmpU1);
-      mmtmpU3 = _mm_unpackhi_epi32(mmtmpU0,mmtmpU1);
-      rxdataF_comp128[1] = _mm_packs_epi32(mmtmpU2,mmtmpU3);
+      mmtmpU0 = simde_mm_srai_epi32(mmtmpU0,output_shift);
+      mmtmpU1 = simde_mm_srai_epi32(mmtmpU1,output_shift);
+      mmtmpU2 = simde_mm_unpacklo_epi32(mmtmpU0,mmtmpU1);
+      mmtmpU3 = simde_mm_unpackhi_epi32(mmtmpU0,mmtmpU1);
+      rxdataF_comp128[1] = simde_mm_packs_epi32(mmtmpU2,mmtmpU3);
       /*
         LOG_I(PHY,"Antenna %d:",aarx);
               print_shorts("rx:",&rxdataF128[1]);
@@ -884,18 +695,18 @@ void ulsch_channel_compensation(int32_t **rxdataF_ext,
               print_shorts("pack:",&rxdataF_comp128[1]);
       */
       //       multiply by conjugated channel
-      mmtmpU0 = _mm_madd_epi16(ul_ch128[2],rxdataF128[2]);
+      mmtmpU0 = simde_mm_madd_epi16(ul_ch128[2],rxdataF128[2]);
       // mmtmpU0 contains real part of 4 consecutive outputs (32-bit)
-      mmtmpU1 = _mm_shufflelo_epi16(ul_ch128[2],_MM_SHUFFLE(2,3,0,1));
-      mmtmpU1 = _mm_shufflehi_epi16(mmtmpU1,_MM_SHUFFLE(2,3,0,1));
-      mmtmpU1 = _mm_sign_epi16(mmtmpU1,*(__m128i *)conjugate);
-      mmtmpU1 = _mm_madd_epi16(mmtmpU1,rxdataF128[2]);
+      mmtmpU1 = simde_mm_shufflelo_epi16(ul_ch128[2],SIMDE_MM_SHUFFLE(2,3,0,1));
+      mmtmpU1 = simde_mm_shufflehi_epi16(mmtmpU1,SIMDE_MM_SHUFFLE(2,3,0,1));
+      mmtmpU1 = simde_mm_sign_epi16(mmtmpU1,*(simde__m128i *)conjugate);
+      mmtmpU1 = simde_mm_madd_epi16(mmtmpU1,rxdataF128[2]);
       // mmtmpU1 contains imag part of 4 consecutive outputs (32-bit)
-      mmtmpU0 = _mm_srai_epi32(mmtmpU0,output_shift);
-      mmtmpU1 = _mm_srai_epi32(mmtmpU1,output_shift);
-      mmtmpU2 = _mm_unpacklo_epi32(mmtmpU0,mmtmpU1);
-      mmtmpU3 = _mm_unpackhi_epi32(mmtmpU0,mmtmpU1);
-      rxdataF_comp128[2] = _mm_packs_epi32(mmtmpU2,mmtmpU3);
+      mmtmpU0 = simde_mm_srai_epi32(mmtmpU0,output_shift);
+      mmtmpU1 = simde_mm_srai_epi32(mmtmpU1,output_shift);
+      mmtmpU2 = simde_mm_unpacklo_epi32(mmtmpU0,mmtmpU1);
+      mmtmpU3 = simde_mm_unpackhi_epi32(mmtmpU0,mmtmpU1);
+      rxdataF_comp128[2] = simde_mm_packs_epi32(mmtmpU2,mmtmpU3);
       /*
               LOG_I(PHY,"Antenna %d:",aarx);
               print_shorts("rx:",&rxdataF128[2]);
@@ -903,71 +714,19 @@ void ulsch_channel_compensation(int32_t **rxdataF_ext,
               print_shorts("pack:",&rxdataF_comp128[2]);
       */
       // Add a jitter to compensate for the saturation in "packs" resulting in a bias on the DC after IDFT
-      rxdataF_comp128[0] = _mm_add_epi16(rxdataF_comp128[0],(*(__m128i *)&jitter[0]));
-      rxdataF_comp128[1] = _mm_add_epi16(rxdataF_comp128[1],(*(__m128i *)&jitter[0]));
-      rxdataF_comp128[2] = _mm_add_epi16(rxdataF_comp128[2],(*(__m128i *)&jitter[0]));
+      rxdataF_comp128[0] = simde_mm_add_epi16(rxdataF_comp128[0],(*(simde__m128i *)&jitter[0]));
+      rxdataF_comp128[1] = simde_mm_add_epi16(rxdataF_comp128[1],(*(simde__m128i *)&jitter[0]));
+      rxdataF_comp128[2] = simde_mm_add_epi16(rxdataF_comp128[2],(*(simde__m128i *)&jitter[0]));
 
       ul_ch128+=3;
       ul_ch_mag128+=3;
       rxdataF128+=3;
       rxdataF_comp128+=3;
-#elif defined(__arm__) || defined(__aarch64__)
-      mmtmpU0 = vmull_s16(ul_ch128[0], rxdataF128[0]);
-      //mmtmpU0 = [Re(ch[0])Re(rx[0]) Im(ch[0])Im(ch[0]) Re(ch[1])Re(rx[1]) Im(ch[1])Im(ch[1])]
-      mmtmpU1 = vmull_s16(ul_ch128[1], rxdataF128[1]);
-      //mmtmpU1 = [Re(ch[2])Re(rx[2]) Im(ch[2])Im(ch[2]) Re(ch[3])Re(rx[3]) Im(ch[3])Im(ch[3])]
-      mmtmpU0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpU0),vget_high_s32(mmtmpU0)),
-                             vpadd_s32(vget_low_s32(mmtmpU1),vget_high_s32(mmtmpU1)));
-      //mmtmpU0 = [Re(ch[0])Re(rx[0])+Im(ch[0])Im(ch[0]) Re(ch[1])Re(rx[1])+Im(ch[1])Im(ch[1]) Re(ch[2])Re(rx[2])+Im(ch[2])Im(ch[2]) Re(ch[3])Re(rx[3])+Im(ch[3])Im(ch[3])]
-      mmtmpU0b = vmull_s16(vrev32_s16(vmul_s16(ul_ch128[0],*(int16x4_t *)conj)), rxdataF128[0]);
-      //mmtmpU0 = [-Im(ch[0])Re(rx[0]) Re(ch[0])Im(rx[0]) -Im(ch[1])Re(rx[1]) Re(ch[1])Im(rx[1])]
-      mmtmpU1b = vmull_s16(vrev32_s16(vmul_s16(ul_ch128[1],*(int16x4_t *)conj)), rxdataF128[1]);
-      //mmtmpU0 = [-Im(ch[2])Re(rx[2]) Re(ch[2])Im(rx[2]) -Im(ch[3])Re(rx[3]) Re(ch[3])Im(rx[3])]
-      mmtmpU1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpU0b),vget_high_s32(mmtmpU0b)),
-                             vpadd_s32(vget_low_s32(mmtmpU1b),vget_high_s32(mmtmpU1b)));
-      //mmtmpU1 = [-Im(ch[0])Re(rx[0])+Re(ch[0])Im(rx[0]) -Im(ch[1])Re(rx[1])+Re(ch[1])Im(rx[1]) -Im(ch[2])Re(rx[2])+Re(ch[2])Im(rx[2]) -Im(ch[3])Re(rx[3])+Re(ch[3])Im(rx[3])]
-      mmtmpU0 = vqshlq_s32(mmtmpU0,-output_shift128);
-      mmtmpU1 = vqshlq_s32(mmtmpU1,-output_shift128);
-      rxdataF_comp128[0] = vcombine_s16(vmovn_s32(mmtmpU0),vmovn_s32(mmtmpU1));
-      mmtmpU0 = vmull_s16(ul_ch128[2], rxdataF128[2]);
-      mmtmpU1 = vmull_s16(ul_ch128[3], rxdataF128[3]);
-      mmtmpU0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpU0),vget_high_s32(mmtmpU0)),
-                             vpadd_s32(vget_low_s32(mmtmpU1),vget_high_s32(mmtmpU1)));
-      mmtmpU0b = vmull_s16(vrev32_s16(vmul_s16(ul_ch128[2],*(int16x4_t *)conj)), rxdataF128[2]);
-      mmtmpU1b = vmull_s16(vrev32_s16(vmul_s16(ul_ch128[3],*(int16x4_t *)conj)), rxdataF128[3]);
-      mmtmpU1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpU0b),vget_high_s32(mmtmpU0b)),
-                             vpadd_s32(vget_low_s32(mmtmpU1b),vget_high_s32(mmtmpU1b)));
-      mmtmpU0 = vqshlq_s32(mmtmpU0,-output_shift128);
-      mmtmpU1 = vqshlq_s32(mmtmpU1,-output_shift128);
-      rxdataF_comp128[1] = vcombine_s16(vmovn_s32(mmtmpU0),vmovn_s32(mmtmpU1));
-      mmtmpU0 = vmull_s16(ul_ch128[4], rxdataF128[4]);
-      mmtmpU1 = vmull_s16(ul_ch128[5], rxdataF128[5]);
-      mmtmpU0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpU0),vget_high_s32(mmtmpU0)),
-                             vpadd_s32(vget_low_s32(mmtmpU1),vget_high_s32(mmtmpU1)));
-      mmtmpU0b = vmull_s16(vrev32_s16(vmul_s16(ul_ch128[4],*(int16x4_t *)conj)), rxdataF128[4]);
-      mmtmpU1b = vmull_s16(vrev32_s16(vmul_s16(ul_ch128[5],*(int16x4_t *)conj)), rxdataF128[5]);
-      mmtmpU1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpU0b),vget_high_s32(mmtmpU0b)),
-                             vpadd_s32(vget_low_s32(mmtmpU1b),vget_high_s32(mmtmpU1b)));
-      mmtmpU0 = vqshlq_s32(mmtmpU0,-output_shift128);
-      mmtmpU1 = vqshlq_s32(mmtmpU1,-output_shift128);
-      rxdataF_comp128[2] = vcombine_s16(vmovn_s32(mmtmpU0),vmovn_s32(mmtmpU1));
-      // Add a jitter to compensate for the saturation in "packs" resulting in a bias on the DC after IDFT
-      rxdataF_comp128[0] = vqaddq_s16(rxdataF_comp128[0],(*(int16x8_t*)&jitter[0]));
-      rxdataF_comp128[1] = vqaddq_s16(rxdataF_comp128[1],(*(int16x8_t*)&jitter[0]));
-      rxdataF_comp128[2] = vqaddq_s16(rxdataF_comp128[2],(*(int16x8_t*)&jitter[0]));
-      
-      ul_ch128+=6;
-      ul_ch_mag128+=3;
-      rxdataF128+=6;
-      rxdataF_comp128+=3;
-#endif
     }
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void ulsch_channel_level(int32_t **drs_ch_estimates_ext,
@@ -976,42 +735,21 @@ void ulsch_channel_level(int32_t **drs_ch_estimates_ext,
                          uint16_t nb_rb) {
   int16_t rb;
   uint8_t aarx;
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *ul_ch128;
-  __m128 avg128U;
-#elif defined(__arm__) || defined(__aarch64__)
-  int32x4_t avg128U;
-  int16x4_t *ul_ch128;
-#endif
+  simde__m128i *ul_ch128;
+  simde__m128 avg128U;
 
   for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
     //clear average level
-#if defined(__x86_64__) || defined(__i386__)
-    avg128U = _mm_setzero_ps();
-    ul_ch128=(__m128i *)drs_ch_estimates_ext[aarx];
+    avg128U = simde_mm_setzero_ps();
+    ul_ch128=(simde__m128i *)drs_ch_estimates_ext[aarx];
 
     for (rb=0; rb<nb_rb; rb++) {
-      avg128U = _mm_add_ps(avg128U,_mm_cvtepi32_ps(_mm_madd_epi16(ul_ch128[0],ul_ch128[0])));
-      avg128U = _mm_add_ps(avg128U,_mm_cvtepi32_ps(_mm_madd_epi16(ul_ch128[1],ul_ch128[1])));
-      avg128U = _mm_add_ps(avg128U,_mm_cvtepi32_ps(_mm_madd_epi16(ul_ch128[2],ul_ch128[2])));
+      avg128U = simde_mm_add_ps(avg128U,simde_mm_cvtepi32_ps(simde_mm_madd_epi16(ul_ch128[0],ul_ch128[0])));
+      avg128U = simde_mm_add_ps(avg128U,simde_mm_cvtepi32_ps(simde_mm_madd_epi16(ul_ch128[1],ul_ch128[1])));
+      avg128U = simde_mm_add_ps(avg128U,simde_mm_cvtepi32_ps(simde_mm_madd_epi16(ul_ch128[2],ul_ch128[2])));
       ul_ch128+=3;
     }
 
-#elif defined(__arm__) || defined(__aarch64__)
-    avg128U = vdupq_n_s32(0);
-    ul_ch128=(int16x4_t *)drs_ch_estimates_ext[aarx];
-
-    for (rb=0; rb<nb_rb; rb++) {
-      avg128U = vqaddq_s32(avg128U,vmull_s16(ul_ch128[0],ul_ch128[0]));
-      avg128U = vqaddq_s32(avg128U,vmull_s16(ul_ch128[1],ul_ch128[1]));
-      avg128U = vqaddq_s32(avg128U,vmull_s16(ul_ch128[2],ul_ch128[2]));
-      avg128U = vqaddq_s32(avg128U,vmull_s16(ul_ch128[3],ul_ch128[3]));
-      avg128U = vqaddq_s32(avg128U,vmull_s16(ul_ch128[4],ul_ch128[4]));
-      avg128U = vqaddq_s32(avg128U,vmull_s16(ul_ch128[5],ul_ch128[5]));
-      ul_ch128+=6;
-    }
-
-#endif
     DevAssert( nb_rb );
     avg[aarx] = (int)((((float *)&avg128U)[0] +
                        ((float *)&avg128U)[1] +
@@ -1019,10 +757,8 @@ void ulsch_channel_level(int32_t **drs_ch_estimates_ext,
                        ((float *)&avg128U)[3])/(float)(nb_rb*12));
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 int ulsch_power_LUT[750];
diff --git a/openair1/PHY/LTE_UE_TRANSPORT/dci_ue.c b/openair1/PHY/LTE_UE_TRANSPORT/dci_ue.c
index c4ab60602d2d3dfea4f13973359657e67396ae72..381dfc0cdff6b1897865baa3d16c47e00dffb986 100644
--- a/openair1/PHY/LTE_UE_TRANSPORT/dci_ue.c
+++ b/openair1/PHY/LTE_UE_TRANSPORT/dci_ue.c
@@ -353,40 +353,18 @@ void pdcch_channel_level(int32_t **dl_ch_estimates_ext,
 
   int16_t rb;
   uint8_t aatx,aarx;
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *dl_ch128;
-  __m128i avg128P;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *dl_ch128;
-  int32x4_t *avg128P;
-#else
-  int16_t *dl_ch128;
-  int32_t *avg128P;
-#error Unsupported CPU architecture, cannot build __FILE__
-#endif
+  simde__m128i *dl_ch128;
+  simde__m128i avg128P;
   for (aatx=0; aatx<frame_parms->nb_antenna_ports_eNB; aatx++)
     for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
       //clear average level
-#if defined(__x86_64__) || defined(__i386__)
-      avg128P = _mm_setzero_si128();
-      dl_ch128=(__m128i *)&dl_ch_estimates_ext[(aatx<<1)+aarx][0];
-#elif defined(__arm__) || defined(__aarch64__)
-     dl_ch128=&dl_ch_estimates_ext[(aatx<<1)+aarx][0];
-#error __arm__ or __aarch64__ not yet implemented, cannot build __FILE__
-#else
-      dl_ch128=&dl_ch_estimates_ext[(aatx<<1)+aarx][0];
-#error Unsupported CPU architecture, cannot build __FILE__
-#endif
+      avg128P = simde_mm_setzero_si128();
+      dl_ch128=(simde__m128i *)&dl_ch_estimates_ext[(aatx<<1)+aarx][0];
       for (rb=0; rb<nb_rb; rb++) {
 
-#if defined(__x86_64__) || defined(__i386__)
-        avg128P = _mm_add_epi32(avg128P,_mm_madd_epi16(dl_ch128[0],dl_ch128[0]));
-        avg128P = _mm_add_epi32(avg128P,_mm_madd_epi16(dl_ch128[1],dl_ch128[1]));
-        avg128P = _mm_add_epi32(avg128P,_mm_madd_epi16(dl_ch128[2],dl_ch128[2]));
-#elif defined(__arm__) || defined(__aarch64__)
-#else
-#error Unsupported CPU architecture, cannot build __FILE__
-#endif
+        avg128P = simde_mm_add_epi32(avg128P,simde_mm_madd_epi16(dl_ch128[0],dl_ch128[0]));
+        avg128P = simde_mm_add_epi32(avg128P,simde_mm_madd_epi16(dl_ch128[1],dl_ch128[1]));
+        avg128P = simde_mm_add_epi32(avg128P,simde_mm_madd_epi16(dl_ch128[2],dl_ch128[2]));
         dl_ch128+=3;
         /*
           if (rb==0) {
@@ -406,10 +384,8 @@ void pdcch_channel_level(int32_t **dl_ch_estimates_ext,
       //      printf("Channel level : %d\n",avg[(aatx<<1)+aarx]);
     }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 
 }
 
@@ -425,82 +401,40 @@ void pdcch_detection_mrc_i(LTE_DL_FRAME_PARMS *frame_parms,
 
   uint8_t aatx;
 
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *rxdataF_comp128_0,*rxdataF_comp128_1,*rxdataF_comp128_i0,*rxdataF_comp128_i1,*rho128_0,*rho128_1,*rho128_i0,*rho128_i1;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *rxdataF_comp128_0,*rxdataF_comp128_1,*rxdataF_comp128_i0,*rxdataF_comp128_i1,*rho128_0,*rho128_1,*rho128_i0,*rho128_i1;
-#else
-#error Unsupported CPU architecture, cannot build __FILE__
-#endif
+  simde__m128i *rxdataF_comp128_0,*rxdataF_comp128_1,*rxdataF_comp128_i0,*rxdataF_comp128_i1,*rho128_0,*rho128_1,*rho128_i0,*rho128_i1;
   int32_t i;
 
   if (frame_parms->nb_antennas_rx>1) {
     for (aatx=0; aatx<frame_parms->nb_antenna_ports_eNB; aatx++) {
       //if (frame_parms->mode1_flag && (aatx>0)) break;
 
-#if defined(__x86_64__) || defined(__i386__)
-      rxdataF_comp128_0   = (__m128i *)&rxdataF_comp[(aatx<<1)][symbol*frame_parms->N_RB_DL*12];
-      rxdataF_comp128_1   = (__m128i *)&rxdataF_comp[(aatx<<1)+1][symbol*frame_parms->N_RB_DL*12];
-#elif defined(__arm__) || defined(__aarch64__)
-      rxdataF_comp128_0   = (int16x8_t *)&rxdataF_comp[(aatx<<1)][symbol*frame_parms->N_RB_DL*12];
-      rxdataF_comp128_1   = (int16x8_t *)&rxdataF_comp[(aatx<<1)+1][symbol*frame_parms->N_RB_DL*12];
-#else
-#error Unsupported CPU architecture, cannot build __FILE__
-#endif
+      rxdataF_comp128_0   = (simde__m128i *)&rxdataF_comp[(aatx<<1)][symbol*frame_parms->N_RB_DL*12];
+      rxdataF_comp128_1   = (simde__m128i *)&rxdataF_comp[(aatx<<1)+1][symbol*frame_parms->N_RB_DL*12];
       // MRC on each re of rb on MF output
       for (i=0; i<frame_parms->N_RB_DL*3; i++) {
-#if defined(__x86_64__) || defined(__i386__)
-        rxdataF_comp128_0[i] = _mm_adds_epi16(_mm_srai_epi16(rxdataF_comp128_0[i],1),_mm_srai_epi16(rxdataF_comp128_1[i],1));
-#elif defined(__arm__) || defined(__aarch64__)
-        rxdataF_comp128_0[i] = vhaddq_s16(rxdataF_comp128_0[i],rxdataF_comp128_1[i]);
-#endif
+        rxdataF_comp128_0[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(rxdataF_comp128_0[i],1),simde_mm_srai_epi16(rxdataF_comp128_1[i],1));
       }
     }
 
-#if defined(__x86_64__) || defined(__i386__)
-    rho128_0 = (__m128i *) &rho[0][symbol*frame_parms->N_RB_DL*12];
-    rho128_1 = (__m128i *) &rho[1][symbol*frame_parms->N_RB_DL*12];
-#elif defined(__arm__) || defined(__aarch64__)
-    rho128_0 = (int16x8_t *) &rho[0][symbol*frame_parms->N_RB_DL*12];
-    rho128_1 = (int16x8_t *) &rho[1][symbol*frame_parms->N_RB_DL*12];
-#endif
+    rho128_0 = (simde__m128i *) &rho[0][symbol*frame_parms->N_RB_DL*12];
+    rho128_1 = (simde__m128i *) &rho[1][symbol*frame_parms->N_RB_DL*12];
     for (i=0; i<frame_parms->N_RB_DL*3; i++) {
-#if defined(__x86_64__) || defined(__i386__)
-      rho128_0[i] = _mm_adds_epi16(_mm_srai_epi16(rho128_0[i],1),_mm_srai_epi16(rho128_1[i],1));
-#elif defined(__arm__) || defined(__aarch64__)
-      rho128_0[i] = vhaddq_s16(rho128_0[i],rho128_1[i]);
-#endif
+      rho128_0[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(rho128_0[i],1),simde_mm_srai_epi16(rho128_1[i],1));
     }
 
-#if defined(__x86_64__) || defined(__i386__)
-    rho128_i0 = (__m128i *) &rho_i[0][symbol*frame_parms->N_RB_DL*12];
-    rho128_i1 = (__m128i *) &rho_i[1][symbol*frame_parms->N_RB_DL*12];
-    rxdataF_comp128_i0   = (__m128i *)&rxdataF_comp_i[0][symbol*frame_parms->N_RB_DL*12];
-    rxdataF_comp128_i1   = (__m128i *)&rxdataF_comp_i[1][symbol*frame_parms->N_RB_DL*12];
-#elif defined(__arm__) || defined(__aarch64__)
-    rho128_i0 = (int16x8_t*) &rho_i[0][symbol*frame_parms->N_RB_DL*12];
-    rho128_i1 = (int16x8_t*) &rho_i[1][symbol*frame_parms->N_RB_DL*12];
-    rxdataF_comp128_i0   = (int16x8_t *)&rxdataF_comp_i[0][symbol*frame_parms->N_RB_DL*12];
-    rxdataF_comp128_i1   = (int16x8_t *)&rxdataF_comp_i[1][symbol*frame_parms->N_RB_DL*12];
-
-#endif
+    rho128_i0 = (simde__m128i *) &rho_i[0][symbol*frame_parms->N_RB_DL*12];
+    rho128_i1 = (simde__m128i *) &rho_i[1][symbol*frame_parms->N_RB_DL*12];
+    rxdataF_comp128_i0   = (simde__m128i *)&rxdataF_comp_i[0][symbol*frame_parms->N_RB_DL*12];
+    rxdataF_comp128_i1   = (simde__m128i *)&rxdataF_comp_i[1][symbol*frame_parms->N_RB_DL*12];
     // MRC on each re of rb on MF and rho
     for (i=0; i<frame_parms->N_RB_DL*3; i++) {
-#if defined(__x86_64__) || defined(__i386__)
-      rxdataF_comp128_i0[i] = _mm_adds_epi16(_mm_srai_epi16(rxdataF_comp128_i0[i],1),_mm_srai_epi16(rxdataF_comp128_i1[i],1));
-      rho128_i0[i]          = _mm_adds_epi16(_mm_srai_epi16(rho128_i0[i],1),_mm_srai_epi16(rho128_i1[i],1));
-#elif defined(__arm__) || defined(__aarch64__)
-      rxdataF_comp128_i0[i] = vhaddq_s16(rxdataF_comp128_i0[i],rxdataF_comp128_i1[i]);
-      rho128_i0[i]          = vhaddq_s16(rho128_i0[i],rho128_i1[i]);
-
-#endif
+      rxdataF_comp128_i0[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(rxdataF_comp128_i0[i],1),simde_mm_srai_epi16(rxdataF_comp128_i1[i],1));
+      rho128_i0[i]          = simde_mm_adds_epi16(simde_mm_srai_epi16(rho128_i0[i],1),simde_mm_srai_epi16(rho128_i1[i],1));
     }
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 
@@ -962,13 +896,9 @@ void pdcch_channel_compensation(int32_t **rxdataF_ext,
 
   uint16_t rb;
 
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *dl_ch128,*rxdataF128,*rxdataF_comp128;
-  __m128i *dl_ch128_2, *rho128;
-  __m128i mmtmpPD0,mmtmpPD1,mmtmpPD2,mmtmpPD3;
-#elif defined(__arm__) || defined(__aarch64__)
-
-#endif
+  simde__m128i *dl_ch128,*rxdataF128,*rxdataF_comp128;
+  simde__m128i *dl_ch128_2, *rho128;
+  simde__m128i mmtmpPD0,mmtmpPD1,mmtmpPD2,mmtmpPD3;
   uint8_t aatx,aarx,pilots=0;
 
 
@@ -986,75 +916,69 @@ void pdcch_channel_compensation(int32_t **rxdataF_ext,
 
     for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
 
-#if defined(__x86_64__) || defined(__i386__)
-      dl_ch128          = (__m128i *)&dl_ch_estimates_ext[(aatx<<1)+aarx][symbol*frame_parms->N_RB_DL*12];
-      rxdataF128        = (__m128i *)&rxdataF_ext[aarx][symbol*frame_parms->N_RB_DL*12];
-      rxdataF_comp128   = (__m128i *)&rxdataF_comp[(aatx<<1)+aarx][symbol*frame_parms->N_RB_DL*12];
-
-#elif defined(__arm__) || defined(__aarch64__)
-
-#endif
+      dl_ch128          = (simde__m128i *)&dl_ch_estimates_ext[(aatx<<1)+aarx][symbol*frame_parms->N_RB_DL*12];
+      rxdataF128        = (simde__m128i *)&rxdataF_ext[aarx][symbol*frame_parms->N_RB_DL*12];
+      rxdataF_comp128   = (simde__m128i *)&rxdataF_comp[(aatx<<1)+aarx][symbol*frame_parms->N_RB_DL*12];
 
       for (rb=0; rb<frame_parms->N_RB_DL; rb++) {
 
-#if defined(__x86_64__) || defined(__i386__)
         // multiply by conjugated channel
-        mmtmpPD0 = _mm_madd_epi16(dl_ch128[0],rxdataF128[0]);
+        mmtmpPD0 = simde_mm_madd_epi16(dl_ch128[0],rxdataF128[0]);
         //  print_ints("re",&mmtmpPD0);
 
         // mmtmpPD0 contains real part of 4 consecutive outputs (32-bit)
-        mmtmpPD1 = _mm_shufflelo_epi16(dl_ch128[0],_MM_SHUFFLE(2,3,0,1));
-        mmtmpPD1 = _mm_shufflehi_epi16(mmtmpPD1,_MM_SHUFFLE(2,3,0,1));
-        mmtmpPD1 = _mm_sign_epi16(mmtmpPD1,*(__m128i*)&conjugate[0]);
+        mmtmpPD1 = simde_mm_shufflelo_epi16(dl_ch128[0], SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpPD1 = simde_mm_shufflehi_epi16(mmtmpPD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpPD1 = simde_mm_sign_epi16(mmtmpPD1,*(simde__m128i*)&conjugate[0]);
         //  print_ints("im",&mmtmpPD1);
-        mmtmpPD1 = _mm_madd_epi16(mmtmpPD1,rxdataF128[0]);
+        mmtmpPD1 = simde_mm_madd_epi16(mmtmpPD1,rxdataF128[0]);
         // mmtmpPD1 contains imag part of 4 consecutive outputs (32-bit)
-        mmtmpPD0 = _mm_srai_epi32(mmtmpPD0,output_shift);
+        mmtmpPD0 = simde_mm_srai_epi32(mmtmpPD0,output_shift);
         //  print_ints("re(shift)",&mmtmpPD0);
-        mmtmpPD1 = _mm_srai_epi32(mmtmpPD1,output_shift);
+        mmtmpPD1 = simde_mm_srai_epi32(mmtmpPD1,output_shift);
         //  print_ints("im(shift)",&mmtmpPD1);
-        mmtmpPD2 = _mm_unpacklo_epi32(mmtmpPD0,mmtmpPD1);
-        mmtmpPD3 = _mm_unpackhi_epi32(mmtmpPD0,mmtmpPD1);
+        mmtmpPD2 = simde_mm_unpacklo_epi32(mmtmpPD0,mmtmpPD1);
+        mmtmpPD3 = simde_mm_unpackhi_epi32(mmtmpPD0,mmtmpPD1);
         //        print_ints("c0",&mmtmpPD2);
         //  print_ints("c1",&mmtmpPD3);
-        rxdataF_comp128[0] = _mm_packs_epi32(mmtmpPD2,mmtmpPD3);
+        rxdataF_comp128[0] = simde_mm_packs_epi32(mmtmpPD2,mmtmpPD3);
         //  print_shorts("rx:",rxdataF128);
         //  print_shorts("ch:",dl_ch128);
         //  print_shorts("pack:",rxdataF_comp128);
 
         // multiply by conjugated channel
-        mmtmpPD0 = _mm_madd_epi16(dl_ch128[1],rxdataF128[1]);
+        mmtmpPD0 = simde_mm_madd_epi16(dl_ch128[1],rxdataF128[1]);
         // mmtmpPD0 contains real part of 4 consecutive outputs (32-bit)
-        mmtmpPD1 = _mm_shufflelo_epi16(dl_ch128[1],_MM_SHUFFLE(2,3,0,1));
-        mmtmpPD1 = _mm_shufflehi_epi16(mmtmpPD1,_MM_SHUFFLE(2,3,0,1));
-        mmtmpPD1 = _mm_sign_epi16(mmtmpPD1,*(__m128i*)conjugate);
-        mmtmpPD1 = _mm_madd_epi16(mmtmpPD1,rxdataF128[1]);
+        mmtmpPD1 = simde_mm_shufflelo_epi16(dl_ch128[1], SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpPD1 = simde_mm_shufflehi_epi16(mmtmpPD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpPD1 = simde_mm_sign_epi16(mmtmpPD1,*(simde__m128i*)conjugate);
+        mmtmpPD1 = simde_mm_madd_epi16(mmtmpPD1,rxdataF128[1]);
         // mmtmpPD1 contains imag part of 4 consecutive outputs (32-bit)
-        mmtmpPD0 = _mm_srai_epi32(mmtmpPD0,output_shift);
-        mmtmpPD1 = _mm_srai_epi32(mmtmpPD1,output_shift);
-        mmtmpPD2 = _mm_unpacklo_epi32(mmtmpPD0,mmtmpPD1);
-        mmtmpPD3 = _mm_unpackhi_epi32(mmtmpPD0,mmtmpPD1);
+        mmtmpPD0 = simde_mm_srai_epi32(mmtmpPD0,output_shift);
+        mmtmpPD1 = simde_mm_srai_epi32(mmtmpPD1,output_shift);
+        mmtmpPD2 = simde_mm_unpacklo_epi32(mmtmpPD0,mmtmpPD1);
+        mmtmpPD3 = simde_mm_unpackhi_epi32(mmtmpPD0,mmtmpPD1);
 
-        rxdataF_comp128[1] = _mm_packs_epi32(mmtmpPD2,mmtmpPD3);
+        rxdataF_comp128[1] = simde_mm_packs_epi32(mmtmpPD2,mmtmpPD3);
 
         //  print_shorts("rx:",rxdataF128+1);
         //  print_shorts("ch:",dl_ch128+1);
         //  print_shorts("pack:",rxdataF_comp128+1);
         // multiply by conjugated channel
         if (pilots == 0) {
-          mmtmpPD0 = _mm_madd_epi16(dl_ch128[2],rxdataF128[2]);
+          mmtmpPD0 = simde_mm_madd_epi16(dl_ch128[2],rxdataF128[2]);
           // mmtmpPD0 contains real part of 4 consecutive outputs (32-bit)
-          mmtmpPD1 = _mm_shufflelo_epi16(dl_ch128[2],_MM_SHUFFLE(2,3,0,1));
-          mmtmpPD1 = _mm_shufflehi_epi16(mmtmpPD1,_MM_SHUFFLE(2,3,0,1));
-          mmtmpPD1 = _mm_sign_epi16(mmtmpPD1,*(__m128i*)conjugate);
-          mmtmpPD1 = _mm_madd_epi16(mmtmpPD1,rxdataF128[2]);
+          mmtmpPD1 = simde_mm_shufflelo_epi16(dl_ch128[2], SIMDE_MM_SHUFFLE(2,3,0,1));
+          mmtmpPD1 = simde_mm_shufflehi_epi16(mmtmpPD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+          mmtmpPD1 = simde_mm_sign_epi16(mmtmpPD1,*(simde__m128i*)conjugate);
+          mmtmpPD1 = simde_mm_madd_epi16(mmtmpPD1,rxdataF128[2]);
           // mmtmpPD1 contains imag part of 4 consecutive outputs (32-bit)
-          mmtmpPD0 = _mm_srai_epi32(mmtmpPD0,output_shift);
-          mmtmpPD1 = _mm_srai_epi32(mmtmpPD1,output_shift);
-          mmtmpPD2 = _mm_unpacklo_epi32(mmtmpPD0,mmtmpPD1);
-          mmtmpPD3 = _mm_unpackhi_epi32(mmtmpPD0,mmtmpPD1);
+          mmtmpPD0 = simde_mm_srai_epi32(mmtmpPD0,output_shift);
+          mmtmpPD1 = simde_mm_srai_epi32(mmtmpPD1,output_shift);
+          mmtmpPD2 = simde_mm_unpacklo_epi32(mmtmpPD0,mmtmpPD1);
+          mmtmpPD3 = simde_mm_unpackhi_epi32(mmtmpPD0,mmtmpPD1);
 
-          rxdataF_comp128[2] = _mm_packs_epi32(mmtmpPD2,mmtmpPD3);
+          rxdataF_comp128[2] = simde_mm_packs_epi32(mmtmpPD2,mmtmpPD3);
         }
 
         //  print_shorts("rx:",rxdataF128+2);
@@ -1070,9 +994,6 @@ void pdcch_channel_compensation(int32_t **rxdataF_ext,
           rxdataF128+=2;
           rxdataF_comp128+=2;
         }
-#elif defined(__arm__) || defined(__aarch64__)
-
-#endif
       }
     }
   }
@@ -1082,74 +1003,69 @@ void pdcch_channel_compensation(int32_t **rxdataF_ext,
 
     for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
 
-#if defined(__x86_64__) || defined(__i386__)
-      rho128        = (__m128i *)&rho[aarx][symbol*frame_parms->N_RB_DL*12];
-      dl_ch128      = (__m128i *)&dl_ch_estimates_ext[aarx][symbol*frame_parms->N_RB_DL*12];
-      dl_ch128_2    = (__m128i *)&dl_ch_estimates_ext[2+aarx][symbol*frame_parms->N_RB_DL*12];
-
-#elif defined(__arm__) || defined(__aarch64__)
+      rho128        = (simde__m128i *)&rho[aarx][symbol*frame_parms->N_RB_DL*12];
+      dl_ch128      = (simde__m128i *)&dl_ch_estimates_ext[aarx][symbol*frame_parms->N_RB_DL*12];
+      dl_ch128_2    = (simde__m128i *)&dl_ch_estimates_ext[2+aarx][symbol*frame_parms->N_RB_DL*12];
 
-#endif
       for (rb=0; rb<frame_parms->N_RB_DL; rb++) {
-#if defined(__x86_64__) || defined(__i386__)
 
         // multiply by conjugated channel
-        mmtmpPD0 = _mm_madd_epi16(dl_ch128[0],dl_ch128_2[0]);
+        mmtmpPD0 = simde_mm_madd_epi16(dl_ch128[0],dl_ch128_2[0]);
         //  print_ints("re",&mmtmpD0);
 
         // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-        mmtmpPD1 = _mm_shufflelo_epi16(dl_ch128[0],_MM_SHUFFLE(2,3,0,1));
-        mmtmpPD1 = _mm_shufflehi_epi16(mmtmpPD1,_MM_SHUFFLE(2,3,0,1));
-        mmtmpPD1 = _mm_sign_epi16(mmtmpPD1,*(__m128i*)&conjugate[0]);
+        mmtmpPD1 = simde_mm_shufflelo_epi16(dl_ch128[0], SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpPD1 = simde_mm_shufflehi_epi16(mmtmpPD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpPD1 = simde_mm_sign_epi16(mmtmpPD1,*(simde__m128i*)&conjugate[0]);
         //  print_ints("im",&mmtmpPD1);
-        mmtmpPD1 = _mm_madd_epi16(mmtmpPD1,dl_ch128_2[0]);
+        mmtmpPD1 = simde_mm_madd_epi16(mmtmpPD1,dl_ch128_2[0]);
         // mmtmpPD1 contains imag part of 4 consecutive outputs (32-bit)
-        mmtmpPD0 = _mm_srai_epi32(mmtmpPD0,output_shift);
+        mmtmpPD0 = simde_mm_srai_epi32(mmtmpPD0,output_shift);
         //  print_ints("re(shift)",&mmtmpD0);
-        mmtmpPD1 = _mm_srai_epi32(mmtmpPD1,output_shift);
+        mmtmpPD1 = simde_mm_srai_epi32(mmtmpPD1,output_shift);
         //  print_ints("im(shift)",&mmtmpD1);
-        mmtmpPD2 = _mm_unpacklo_epi32(mmtmpPD0,mmtmpPD1);
-        mmtmpPD3 = _mm_unpackhi_epi32(mmtmpPD0,mmtmpPD1);
+        mmtmpPD2 = simde_mm_unpacklo_epi32(mmtmpPD0,mmtmpPD1);
+        mmtmpPD3 = simde_mm_unpackhi_epi32(mmtmpPD0,mmtmpPD1);
         //        print_ints("c0",&mmtmpPD2);
         //  print_ints("c1",&mmtmpPD3);
-        rho128[0] = _mm_packs_epi32(mmtmpPD2,mmtmpPD3);
+        rho128[0] = simde_mm_packs_epi32(mmtmpPD2,mmtmpPD3);
 
         //print_shorts("rx:",dl_ch128_2);
         //print_shorts("ch:",dl_ch128);
         //print_shorts("pack:",rho128);
 
         // multiply by conjugated channel
-        mmtmpPD0 = _mm_madd_epi16(dl_ch128[1],dl_ch128_2[1]);
+        mmtmpPD0 = simde_mm_madd_epi16(dl_ch128[1],dl_ch128_2[1]);
         // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-        mmtmpPD1 = _mm_shufflelo_epi16(dl_ch128[1],_MM_SHUFFLE(2,3,0,1));
-        mmtmpPD1 = _mm_shufflehi_epi16(mmtmpPD1,_MM_SHUFFLE(2,3,0,1));
-        mmtmpPD1 = _mm_sign_epi16(mmtmpPD1,*(__m128i*)conjugate);
-        mmtmpPD1 = _mm_madd_epi16(mmtmpPD1,dl_ch128_2[1]);
+        mmtmpPD1 = simde_mm_shufflelo_epi16(dl_ch128[1], SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpPD1 = simde_mm_shufflehi_epi16(mmtmpPD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpPD1 = simde_mm_sign_epi16(mmtmpPD1,*(simde__m128i*)conjugate);
+        mmtmpPD1 = simde_mm_madd_epi16(mmtmpPD1,dl_ch128_2[1]);
         // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-        mmtmpPD0 = _mm_srai_epi32(mmtmpPD0,output_shift);
-        mmtmpPD1 = _mm_srai_epi32(mmtmpPD1,output_shift);
-        mmtmpPD2 = _mm_unpacklo_epi32(mmtmpPD0,mmtmpPD1);
-        mmtmpPD3 = _mm_unpackhi_epi32(mmtmpPD0,mmtmpPD1);
+        mmtmpPD0 = simde_mm_srai_epi32(mmtmpPD0,output_shift);
+        mmtmpPD1 = simde_mm_srai_epi32(mmtmpPD1,output_shift);
+        mmtmpPD2 = simde_mm_unpacklo_epi32(mmtmpPD0,mmtmpPD1);
+        mmtmpPD3 = simde_mm_unpackhi_epi32(mmtmpPD0,mmtmpPD1);
 
 
-        rho128[1] =_mm_packs_epi32(mmtmpPD2,mmtmpPD3);
+        rho128[1] =simde_mm_packs_epi32(mmtmpPD2,mmtmpPD3);
         //print_shorts("rx:",dl_ch128_2+1);
         //print_shorts("ch:",dl_ch128+1);
         //print_shorts("pack:",rho128+1);
         // multiply by conjugated channel
-        mmtmpPD0 = _mm_madd_epi16(dl_ch128[2],dl_ch128_2[2]);
+        mmtmpPD0 = simde_mm_madd_epi16(dl_ch128[2],dl_ch128_2[2]);
         // mmtmpPD0 contains real part of 4 consecutive outputs (32-bit)
-        mmtmpPD1 = _mm_shufflelo_epi16(dl_ch128[2],_MM_SHUFFLE(2,3,0,1));
-        mmtmpPD1 = _mm_shufflehi_epi16(mmtmpPD1,_MM_SHUFFLE(2,3,0,1));
-        mmtmpPD1 = _mm_sign_epi16(mmtmpPD1,*(__m128i*)conjugate);
-        mmtmpPD1 = _mm_madd_epi16(mmtmpPD1,dl_ch128_2[2]);
+        mmtmpPD1 = simde_mm_shufflelo_epi16(dl_ch128[2], SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpPD1 = simde_mm_shufflehi_epi16(mmtmpPD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpPD1 = simde_mm_sign_epi16(mmtmpPD1,*(simde__m128i*)conjugate);
+        mmtmpPD1 = simde_mm_madd_epi16(mmtmpPD1,dl_ch128_2[2]);
         // mmtmpPD1 contains imag part of 4 consecutive outputs (32-bit)
-        mmtmpPD0 = _mm_srai_epi32(mmtmpPD0,output_shift);
-        mmtmpPD1 = _mm_srai_epi32(mmtmpPD1,output_shift);
-        mmtmpPD2 = _mm_unpacklo_epi32(mmtmpPD0,mmtmpPD1);
-        mmtmpPD3 = _mm_unpackhi_epi32(mmtmpPD0,mmtmpPD1);
+        mmtmpPD0 = simde_mm_srai_epi32(mmtmpPD0,output_shift);
+        mmtmpPD1 = simde_mm_srai_epi32(mmtmpPD1,output_shift);
+        mmtmpPD2 = simde_mm_unpacklo_epi32(mmtmpPD0,mmtmpPD1);
+        mmtmpPD3 = simde_mm_unpackhi_epi32(mmtmpPD0,mmtmpPD1);
 
-        rho128[2] = _mm_packs_epi32(mmtmpPD2,mmtmpPD3);
+        rho128[2] = simde_mm_packs_epi32(mmtmpPD2,mmtmpPD3);
         //print_shorts("rx:",dl_ch128_2+2);
         //print_shorts("ch:",dl_ch128+2);
         //print_shorts("pack:",rho128+2);
@@ -1157,20 +1073,13 @@ void pdcch_channel_compensation(int32_t **rxdataF_ext,
         dl_ch128+=3;
         dl_ch128_2+=3;
         rho128+=3;
-
-#elif defined(__arm_)
-
-
-#endif
       }
     }
 
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void pdcch_detection_mrc(LTE_DL_FRAME_PARMS *frame_parms,
@@ -1180,37 +1089,22 @@ void pdcch_detection_mrc(LTE_DL_FRAME_PARMS *frame_parms,
 
   uint8_t aatx;
 
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *rxdataF_comp128_0,*rxdataF_comp128_1;
-#elif defined(__arm__) || defined(__aarch64__)
- int16x8_t *rxdataF_comp128_0,*rxdataF_comp128_1;
-#endif
+  simde__m128i *rxdataF_comp128_0,*rxdataF_comp128_1;
   int32_t i;
 
   if (frame_parms->nb_antennas_rx>1) {
     for (aatx=0; aatx<frame_parms->nb_antenna_ports_eNB; aatx++) {
-#if defined(__x86_64__) || defined(__i386__)
-      rxdataF_comp128_0   = (__m128i *)&rxdataF_comp[(aatx<<1)][symbol*frame_parms->N_RB_DL*12];
-      rxdataF_comp128_1   = (__m128i *)&rxdataF_comp[(aatx<<1)+1][symbol*frame_parms->N_RB_DL*12];
-#elif defined(__arm__) || defined(__aarch64__)
-      rxdataF_comp128_0   = (int16x8_t *)&rxdataF_comp[(aatx<<1)][symbol*frame_parms->N_RB_DL*12];
-      rxdataF_comp128_1   = (int16x8_t *)&rxdataF_comp[(aatx<<1)+1][symbol*frame_parms->N_RB_DL*12];
-#endif
+      rxdataF_comp128_0   = (simde__m128i *)&rxdataF_comp[(aatx<<1)][symbol*frame_parms->N_RB_DL*12];
+      rxdataF_comp128_1   = (simde__m128i *)&rxdataF_comp[(aatx<<1)+1][symbol*frame_parms->N_RB_DL*12];
       // MRC on each re of rb
       for (i=0; i<frame_parms->N_RB_DL*3; i++) {
-#if defined(__x86_64__) || defined(__i386__)
-        rxdataF_comp128_0[i] = _mm_adds_epi16(_mm_srai_epi16(rxdataF_comp128_0[i],1),_mm_srai_epi16(rxdataF_comp128_1[i],1));
-#elif defined(__arm__) || defined(__aarch64__)
-        rxdataF_comp128_0[i] = vhaddq_s16(rxdataF_comp128_0[i],rxdataF_comp128_1[i]);
-#endif
+        rxdataF_comp128_0[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(rxdataF_comp128_0[i],1),simde_mm_srai_epi16(rxdataF_comp128_1[i],1));
       }
     }
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 
 }
 
diff --git a/openair1/PHY/LTE_UE_TRANSPORT/dlsch_demodulation.c b/openair1/PHY/LTE_UE_TRANSPORT/dlsch_demodulation.c
index 416a5dbf39065668b187f839759577d9d82e7236..2e29fd41d1d02650aa4c8d76c18e3b6e70c22afb 100644
--- a/openair1/PHY/LTE_UE_TRANSPORT/dlsch_demodulation.c
+++ b/openair1/PHY/LTE_UE_TRANSPORT/dlsch_demodulation.c
@@ -1260,11 +1260,10 @@ void dlsch_channel_compensation(int **rxdataF_ext,
                                 unsigned short nb_rb,
                                 unsigned char output_shift,
                                 PHY_MEASUREMENTS *measurements) {
-#if defined(__i386) || defined(__x86_64)
   unsigned short rb;
   unsigned char aatx,aarx,symbol_mod,pilots=0;
-  __m128i *dl_ch128,*dl_ch128_2,*dl_ch_mag128,*dl_ch_mag128b,*rxdataF128,*rxdataF_comp128,*rho128;
-  __m128i mmtmpD0,mmtmpD1,mmtmpD2,mmtmpD3,QAM_amp128={0};
+  simde__m128i *dl_ch128,*dl_ch128_2,*dl_ch_mag128,*dl_ch_mag128b,*rxdataF128,*rxdataF_comp128,*rho128;
+  simde__m128i mmtmpD0,mmtmpD1,mmtmpD2,mmtmpD3,QAM_amp128={0};
   symbol_mod = (symbol>=(7-frame_parms->Ncp)) ? symbol-(7-frame_parms->Ncp) : symbol;
 
   if ((symbol_mod == 0) || (symbol_mod == (4-frame_parms->Ncp))) {
@@ -1275,13 +1274,13 @@ void dlsch_channel_compensation(int **rxdataF_ext,
   }
 
   for (aatx=0; aatx<frame_parms->nb_antenna_ports_eNB; aatx++) {
-    __m128i QAM_amp128b = _mm_setzero_si128();
+    simde__m128i QAM_amp128b = simde_mm_setzero_si128();
 
     if (mod_order == 4) {
-      QAM_amp128 = _mm_set1_epi16(QAM16_n1);  // 2/sqrt(10)
+      QAM_amp128 = simde_mm_set1_epi16(QAM16_n1);  // 2/sqrt(10)
     } else if (mod_order == 6) {
-      QAM_amp128  = _mm_set1_epi16(QAM64_n1); //
-      QAM_amp128b = _mm_set1_epi16(QAM64_n2);
+      QAM_amp128  = simde_mm_set1_epi16(QAM64_n1); //
+      QAM_amp128b = simde_mm_set1_epi16(QAM64_n2);
     }
 
     for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
@@ -1290,107 +1289,107 @@ void dlsch_channel_compensation(int **rxdataF_ext,
        * Elena's commit.
        */
       int x = frame_parms->nb_antennas_rx > 1 ? frame_parms->nb_antennas_rx : 2;
-      dl_ch128          = (__m128i *)&dl_ch_estimates_ext[aatx*x + aarx][symbol*frame_parms->N_RB_DL*12];
+      dl_ch128          = (simde__m128i *)&dl_ch_estimates_ext[aatx*x + aarx][symbol*frame_parms->N_RB_DL*12];
       //print_shorts("dl_ch128[0]=",&dl_ch128[0]);*/
-      dl_ch_mag128      = (__m128i *)&dl_ch_mag[aatx*x + aarx][symbol*frame_parms->N_RB_DL*12];
-      dl_ch_mag128b     = (__m128i *)&dl_ch_magb[aatx*x + aarx][symbol*frame_parms->N_RB_DL*12];
-      rxdataF128        = (__m128i *)&rxdataF_ext[aarx][symbol*frame_parms->N_RB_DL*12];
-      rxdataF_comp128   = (__m128i *)&rxdataF_comp[aatx*x + aarx][symbol*frame_parms->N_RB_DL*12];
+      dl_ch_mag128      = (simde__m128i *)&dl_ch_mag[aatx*x + aarx][symbol*frame_parms->N_RB_DL*12];
+      dl_ch_mag128b     = (simde__m128i *)&dl_ch_magb[aatx*x + aarx][symbol*frame_parms->N_RB_DL*12];
+      rxdataF128        = (simde__m128i *)&rxdataF_ext[aarx][symbol*frame_parms->N_RB_DL*12];
+      rxdataF_comp128   = (simde__m128i *)&rxdataF_comp[aatx*x + aarx][symbol*frame_parms->N_RB_DL*12];
 
       for (rb=0; rb<nb_rb; rb++) {
         if (mod_order>2) {
           // get channel amplitude if not QPSK
-          mmtmpD0 = _mm_madd_epi16(dl_ch128[0],dl_ch128[0]);
-          mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-          mmtmpD1 = _mm_madd_epi16(dl_ch128[1],dl_ch128[1]);
-          mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
-          mmtmpD0 = _mm_packs_epi32(mmtmpD0,mmtmpD1);
+          mmtmpD0 = simde_mm_madd_epi16(dl_ch128[0],dl_ch128[0]);
+          mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
+          mmtmpD1 = simde_mm_madd_epi16(dl_ch128[1],dl_ch128[1]);
+          mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
+          mmtmpD0 = simde_mm_packs_epi32(mmtmpD0,mmtmpD1);
           // store channel magnitude here in a new field of dlsch
-          dl_ch_mag128[0] = _mm_unpacklo_epi16(mmtmpD0,mmtmpD0);
+          dl_ch_mag128[0] = simde_mm_unpacklo_epi16(mmtmpD0,mmtmpD0);
           dl_ch_mag128b[0] = dl_ch_mag128[0];
-          dl_ch_mag128[0] = _mm_mulhi_epi16(dl_ch_mag128[0],QAM_amp128);
-          dl_ch_mag128[0] = _mm_slli_epi16(dl_ch_mag128[0],1);
+          dl_ch_mag128[0] = simde_mm_mulhi_epi16(dl_ch_mag128[0],QAM_amp128);
+          dl_ch_mag128[0] = simde_mm_slli_epi16(dl_ch_mag128[0],1);
           //print_ints("Re(ch):",(int16_t*)&mmtmpD0);
           //print_shorts("QAM_amp:",(int16_t*)&QAM_amp128);
           //print_shorts("mag:",(int16_t*)&dl_ch_mag128[0]);
-          dl_ch_mag128[1] = _mm_unpackhi_epi16(mmtmpD0,mmtmpD0);
+          dl_ch_mag128[1] = simde_mm_unpackhi_epi16(mmtmpD0,mmtmpD0);
           dl_ch_mag128b[1] = dl_ch_mag128[1];
-          dl_ch_mag128[1] = _mm_mulhi_epi16(dl_ch_mag128[1],QAM_amp128);
-          dl_ch_mag128[1] = _mm_slli_epi16(dl_ch_mag128[1],1);
+          dl_ch_mag128[1] = simde_mm_mulhi_epi16(dl_ch_mag128[1],QAM_amp128);
+          dl_ch_mag128[1] = simde_mm_slli_epi16(dl_ch_mag128[1],1);
 
           if (pilots==0) {
-            mmtmpD0 = _mm_madd_epi16(dl_ch128[2],dl_ch128[2]);
-            mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-            mmtmpD1 = _mm_packs_epi32(mmtmpD0,mmtmpD0);
-            dl_ch_mag128[2] = _mm_unpacklo_epi16(mmtmpD1,mmtmpD1);
+            mmtmpD0 = simde_mm_madd_epi16(dl_ch128[2],dl_ch128[2]);
+            mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
+            mmtmpD1 = simde_mm_packs_epi32(mmtmpD0,mmtmpD0);
+            dl_ch_mag128[2] = simde_mm_unpacklo_epi16(mmtmpD1,mmtmpD1);
             dl_ch_mag128b[2] = dl_ch_mag128[2];
-            dl_ch_mag128[2] = _mm_mulhi_epi16(dl_ch_mag128[2],QAM_amp128);
-            dl_ch_mag128[2] = _mm_slli_epi16(dl_ch_mag128[2],1);
+            dl_ch_mag128[2] = simde_mm_mulhi_epi16(dl_ch_mag128[2],QAM_amp128);
+            dl_ch_mag128[2] = simde_mm_slli_epi16(dl_ch_mag128[2],1);
           }
 
-          dl_ch_mag128b[0] = _mm_mulhi_epi16(dl_ch_mag128b[0],QAM_amp128b);
-          dl_ch_mag128b[0] = _mm_slli_epi16(dl_ch_mag128b[0],1);
-          dl_ch_mag128b[1] = _mm_mulhi_epi16(dl_ch_mag128b[1],QAM_amp128b);
-          dl_ch_mag128b[1] = _mm_slli_epi16(dl_ch_mag128b[1],1);
+          dl_ch_mag128b[0] = simde_mm_mulhi_epi16(dl_ch_mag128b[0],QAM_amp128b);
+          dl_ch_mag128b[0] = simde_mm_slli_epi16(dl_ch_mag128b[0],1);
+          dl_ch_mag128b[1] = simde_mm_mulhi_epi16(dl_ch_mag128b[1],QAM_amp128b);
+          dl_ch_mag128b[1] = simde_mm_slli_epi16(dl_ch_mag128b[1],1);
 
           if (pilots==0) {
-            dl_ch_mag128b[2] = _mm_mulhi_epi16(dl_ch_mag128b[2],QAM_amp128b);
-            dl_ch_mag128b[2] = _mm_slli_epi16(dl_ch_mag128b[2],1);
+            dl_ch_mag128b[2] = simde_mm_mulhi_epi16(dl_ch_mag128b[2],QAM_amp128b);
+            dl_ch_mag128b[2] = simde_mm_slli_epi16(dl_ch_mag128b[2],1);
           }
         }
 
         // multiply by conjugated channel
-        mmtmpD0 = _mm_madd_epi16(dl_ch128[0],rxdataF128[0]);
+        mmtmpD0 = simde_mm_madd_epi16(dl_ch128[0],rxdataF128[0]);
         // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-        mmtmpD1 = _mm_shufflelo_epi16(dl_ch128[0],_MM_SHUFFLE(2,3,0,1));
-        mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-        mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i *)&conjugate[0]);
+        mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch128[0], SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i *)&conjugate[0]);
         //  print_ints("im",&mmtmpD1);
-        mmtmpD1 = _mm_madd_epi16(mmtmpD1,rxdataF128[0]);
+        mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,rxdataF128[0]);
         // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-        mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
+        mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
         //  print_ints("re(shift)",&mmtmpD0);
-        mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
+        mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
         //  print_ints("im(shift)",&mmtmpD1);
-        mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-        mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+        mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+        mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
         //        print_ints("c0",&mmtmpD2);
         //  print_ints("c1",&mmtmpD3);
-        rxdataF_comp128[0] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+        rxdataF_comp128[0] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
         //  print_shorts("rx:",rxdataF128);
         //  print_shorts("ch:",dl_ch128);
         //  print_shorts("pack:",rxdataF_comp128);
         // multiply by conjugated channel
-        mmtmpD0 = _mm_madd_epi16(dl_ch128[1],rxdataF128[1]);
+        mmtmpD0 = simde_mm_madd_epi16(dl_ch128[1],rxdataF128[1]);
         // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-        mmtmpD1 = _mm_shufflelo_epi16(dl_ch128[1],_MM_SHUFFLE(2,3,0,1));
-        mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-        mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i *)conjugate);
-        mmtmpD1 = _mm_madd_epi16(mmtmpD1,rxdataF128[1]);
+        mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch128[1], SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i *)conjugate);
+        mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,rxdataF128[1]);
         // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-        mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-        mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
-        mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-        mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
-        rxdataF_comp128[1] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+        mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
+        mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
+        mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+        mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+        rxdataF_comp128[1] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
         //  print_shorts("rx:",rxdataF128+1);
         //  print_shorts("ch:",dl_ch128+1);
         //  print_shorts("pack:",rxdataF_comp128+1);
 
         if (pilots==0) {
           // multiply by conjugated channel
-          mmtmpD0 = _mm_madd_epi16(dl_ch128[2],rxdataF128[2]);
+          mmtmpD0 = simde_mm_madd_epi16(dl_ch128[2],rxdataF128[2]);
           // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-          mmtmpD1 = _mm_shufflelo_epi16(dl_ch128[2],_MM_SHUFFLE(2,3,0,1));
-          mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-          mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i *)conjugate);
-          mmtmpD1 = _mm_madd_epi16(mmtmpD1,rxdataF128[2]);
+          mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch128[2], SIMDE_MM_SHUFFLE(2,3,0,1));
+          mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+          mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i *)conjugate);
+          mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,rxdataF128[2]);
           // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-          mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-          mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
-          mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-          mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
-          rxdataF_comp128[2] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+          mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
+          mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
+          mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+          mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+          rxdataF_comp128[2] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
           //  print_shorts("rx:",rxdataF128+2);
           //  print_shorts("ch:",dl_ch128+2);
           // print_shorts("pack:",rxdataF_comp128+2);
@@ -1412,62 +1411,62 @@ void dlsch_channel_compensation(int **rxdataF_ext,
 
   if (rho) {
     for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-      rho128        = (__m128i *)&rho[aarx][symbol*frame_parms->N_RB_DL*12];
-      dl_ch128      = (__m128i *)&dl_ch_estimates_ext[aarx][symbol*frame_parms->N_RB_DL*12];
-      dl_ch128_2    = (__m128i *)&dl_ch_estimates_ext[2+aarx][symbol*frame_parms->N_RB_DL*12];
+      rho128        = (simde__m128i *)&rho[aarx][symbol*frame_parms->N_RB_DL*12];
+      dl_ch128      = (simde__m128i *)&dl_ch_estimates_ext[aarx][symbol*frame_parms->N_RB_DL*12];
+      dl_ch128_2    = (simde__m128i *)&dl_ch_estimates_ext[2+aarx][symbol*frame_parms->N_RB_DL*12];
 
       for (rb=0; rb<nb_rb; rb++) {
         // multiply by conjugated channel
-        mmtmpD0 = _mm_madd_epi16(dl_ch128[0],dl_ch128_2[0]);
+        mmtmpD0 = simde_mm_madd_epi16(dl_ch128[0],dl_ch128_2[0]);
         //  print_ints("re",&mmtmpD0);
         // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-        mmtmpD1 = _mm_shufflelo_epi16(dl_ch128[0],_MM_SHUFFLE(2,3,0,1));
-        mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-        mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i *)&conjugate[0]);
+        mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch128[0], SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i *)&conjugate[0]);
         //  print_ints("im",&mmtmpD1);
-        mmtmpD1 = _mm_madd_epi16(mmtmpD1,dl_ch128_2[0]);
+        mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,dl_ch128_2[0]);
         // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-        mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
+        mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
         //  print_ints("re(shift)",&mmtmpD0);
-        mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
+        mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
         //  print_ints("im(shift)",&mmtmpD1);
-        mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-        mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+        mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+        mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
         //        print_ints("c0",&mmtmpD2);
         //  print_ints("c1",&mmtmpD3);
-        rho128[0] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+        rho128[0] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
         //print_shorts("rx:",dl_ch128_2);
         //print_shorts("ch:",dl_ch128);
         //print_shorts("pack:",rho128);
         // multiply by conjugated channel
-        mmtmpD0 = _mm_madd_epi16(dl_ch128[1],dl_ch128_2[1]);
+        mmtmpD0 = simde_mm_madd_epi16(dl_ch128[1],dl_ch128_2[1]);
         // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-        mmtmpD1 = _mm_shufflelo_epi16(dl_ch128[1],_MM_SHUFFLE(2,3,0,1));
-        mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-        mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i *)conjugate);
-        mmtmpD1 = _mm_madd_epi16(mmtmpD1,dl_ch128_2[1]);
+        mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch128[1], SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i *)conjugate);
+        mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,dl_ch128_2[1]);
         // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-        mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-        mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
-        mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-        mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
-        rho128[1] =_mm_packs_epi32(mmtmpD2,mmtmpD3);
+        mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
+        mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
+        mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+        mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+        rho128[1] =simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
         //print_shorts("rx:",dl_ch128_2+1);
         //print_shorts("ch:",dl_ch128+1);
         //print_shorts("pack:",rho128+1);
         // multiply by conjugated channel
-        mmtmpD0 = _mm_madd_epi16(dl_ch128[2],dl_ch128_2[2]);
+        mmtmpD0 = simde_mm_madd_epi16(dl_ch128[2],dl_ch128_2[2]);
         // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-        mmtmpD1 = _mm_shufflelo_epi16(dl_ch128[2],_MM_SHUFFLE(2,3,0,1));
-        mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-        mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i *)conjugate);
-        mmtmpD1 = _mm_madd_epi16(mmtmpD1,dl_ch128_2[2]);
+        mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch128[2], SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i *)conjugate);
+        mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,dl_ch128_2[2]);
         // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-        mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-        mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
-        mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-        mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
-        rho128[2] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+        mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
+        mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
+        mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+        mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+        rho128[2] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
         //print_shorts("rx:",dl_ch128_2+2);
         //print_shorts("ch:",dl_ch128+2);
         //print_shorts("pack:",rho128+2);
@@ -1482,191 +1481,8 @@ void dlsch_channel_compensation(int **rxdataF_ext,
     }
   }
 
-  _mm_empty();
-  _m_empty();
-#elif defined(__arm__) || defined(__aarch64__)
-  unsigned short rb;
-  unsigned char aatx,aarx,symbol_mod,pilots=0;
-  int16x4_t *dl_ch128,*dl_ch128_2,*rxdataF128;
-  int32x4_t mmtmpD0,mmtmpD1,mmtmpD0b,mmtmpD1b;
-  int16x8_t *dl_ch_mag128,*dl_ch_mag128b,mmtmpD2,mmtmpD3,mmtmpD4;
-  int16x8_t QAM_amp128,QAM_amp128b;
-  int16x4x2_t *rxdataF_comp128,*rho128;
-  int16_t conj[4]__attribute__((aligned(16))) = {1,-1,1,-1};
-  int32x4_t output_shift128 = vmovq_n_s32(-(int32_t)output_shift);
-  symbol_mod = (symbol>=(7-frame_parms->Ncp)) ? symbol-(7-frame_parms->Ncp) : symbol;
-
-  if ((symbol_mod == 0) || (symbol_mod == (4-frame_parms->Ncp))) {
-    if (frame_parms->nb_antenna_ports_eNB==1) { // 10 out of 12 so don't reduce size
-      nb_rb=1+(5*nb_rb/6);
-    } else {
-      pilots=1;
-    }
-  }
-
-  for (aatx=0; aatx<frame_parms->nb_antenna_ports_eNB; aatx++) {
-    if (mod_order == 4) {
-      QAM_amp128  = vmovq_n_s16(QAM16_n1);  // 2/sqrt(10)
-      QAM_amp128b = vmovq_n_s16(0);
-    } else if (mod_order == 6) {
-      QAM_amp128  = vmovq_n_s16(QAM64_n1); //
-      QAM_amp128b = vmovq_n_s16(QAM64_n2);
-    }
-
-    //    printf("comp: rxdataF_comp %p, symbol %d\n",rxdataF_comp[0],symbol);
-
-    for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-      dl_ch128          = (int16x4_t *)&dl_ch_estimates_ext[aatx*frame_parms->nb_antennas_rx + aarx][symbol*frame_parms->N_RB_DL*12];
-      dl_ch_mag128      = (int16x8_t *)&dl_ch_mag[aatx*frame_parms->nb_antennas_rx + aarx][symbol*frame_parms->N_RB_DL*12];
-      dl_ch_mag128b     = (int16x8_t *)&dl_ch_magb[aatx*frame_parms->nb_antennas_rx + aarx][symbol*frame_parms->N_RB_DL*12];
-      rxdataF128        = (int16x4_t *)&rxdataF_ext[aarx][symbol*frame_parms->N_RB_DL*12];
-      rxdataF_comp128   = (int16x4x2_t *)&rxdataF_comp[aatx*frame_parms->nb_antennas_rx + aarx][symbol*frame_parms->N_RB_DL*12];
-
-      for (rb=0; rb<nb_rb; rb++) {
-        if (mod_order>2) {
-          // get channel amplitude if not QPSK
-          mmtmpD0 = vmull_s16(dl_ch128[0], dl_ch128[0]);
-          // mmtmpD0 = [ch0*ch0,ch1*ch1,ch2*ch2,ch3*ch3];
-          mmtmpD0 = vqshlq_s32(vqaddq_s32(mmtmpD0,vrev64q_s32(mmtmpD0)),output_shift128);
-          // mmtmpD0 = [ch0*ch0 + ch1*ch1,ch0*ch0 + ch1*ch1,ch2*ch2 + ch3*ch3,ch2*ch2 + ch3*ch3]>>output_shift128 on 32-bits
-          mmtmpD1 = vmull_s16(dl_ch128[1], dl_ch128[1]);
-          mmtmpD1 = vqshlq_s32(vqaddq_s32(mmtmpD1,vrev64q_s32(mmtmpD1)),output_shift128);
-          mmtmpD2 = vcombine_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-          // mmtmpD2 = [ch0*ch0 + ch1*ch1,ch0*ch0 + ch1*ch1,ch2*ch2 + ch3*ch3,ch2*ch2 + ch3*ch3,ch4*ch4 + ch5*ch5,ch4*ch4 + ch5*ch5,ch6*ch6 + ch7*ch7,ch6*ch6 + ch7*ch7]>>output_shift128 on 16-bits
-          mmtmpD0 = vmull_s16(dl_ch128[2], dl_ch128[2]);
-          mmtmpD0 = vqshlq_s32(vqaddq_s32(mmtmpD0,vrev64q_s32(mmtmpD0)),output_shift128);
-          mmtmpD1 = vmull_s16(dl_ch128[3], dl_ch128[3]);
-          mmtmpD1 = vqshlq_s32(vqaddq_s32(mmtmpD1,vrev64q_s32(mmtmpD1)),output_shift128);
-          mmtmpD3 = vcombine_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-
-          if (pilots==0) {
-            mmtmpD0 = vmull_s16(dl_ch128[4], dl_ch128[4]);
-            mmtmpD0 = vqshlq_s32(vqaddq_s32(mmtmpD0,vrev64q_s32(mmtmpD0)),output_shift128);
-            mmtmpD1 = vmull_s16(dl_ch128[5], dl_ch128[5]);
-            mmtmpD1 = vqshlq_s32(vqaddq_s32(mmtmpD1,vrev64q_s32(mmtmpD1)),output_shift128);
-            mmtmpD4 = vcombine_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-          }
-
-          dl_ch_mag128b[0] = vqdmulhq_s16(mmtmpD2,QAM_amp128b);
-          dl_ch_mag128b[1] = vqdmulhq_s16(mmtmpD3,QAM_amp128b);
-          dl_ch_mag128[0] = vqdmulhq_s16(mmtmpD2,QAM_amp128);
-          dl_ch_mag128[1] = vqdmulhq_s16(mmtmpD3,QAM_amp128);
-
-          if (pilots==0) {
-            dl_ch_mag128b[2] = vqdmulhq_s16(mmtmpD4,QAM_amp128b);
-            dl_ch_mag128[2]  = vqdmulhq_s16(mmtmpD4,QAM_amp128);
-          }
-        }
-
-        mmtmpD0 = vmull_s16(dl_ch128[0], rxdataF128[0]);
-        //mmtmpD0 = [Re(ch[0])Re(rx[0]) Im(ch[0])Im(ch[0]) Re(ch[1])Re(rx[1]) Im(ch[1])Im(ch[1])]
-        mmtmpD1 = vmull_s16(dl_ch128[1], rxdataF128[1]);
-        //mmtmpD1 = [Re(ch[2])Re(rx[2]) Im(ch[2])Im(ch[2]) Re(ch[3])Re(rx[3]) Im(ch[3])Im(ch[3])]
-        mmtmpD0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0),vget_high_s32(mmtmpD0)),
-                               vpadd_s32(vget_low_s32(mmtmpD1),vget_high_s32(mmtmpD1)));
-        //mmtmpD0 = [Re(ch[0])Re(rx[0])+Im(ch[0])Im(ch[0]) Re(ch[1])Re(rx[1])+Im(ch[1])Im(ch[1]) Re(ch[2])Re(rx[2])+Im(ch[2])Im(ch[2]) Re(ch[3])Re(rx[3])+Im(ch[3])Im(ch[3])]
-        mmtmpD0b = vmull_s16(vrev32_s16(vmul_s16(dl_ch128[0],*(int16x4_t *)conj)), rxdataF128[0]);
-        //mmtmpD0 = [-Im(ch[0])Re(rx[0]) Re(ch[0])Im(rx[0]) -Im(ch[1])Re(rx[1]) Re(ch[1])Im(rx[1])]
-        mmtmpD1b = vmull_s16(vrev32_s16(vmul_s16(dl_ch128[1],*(int16x4_t *)conj)), rxdataF128[1]);
-        //mmtmpD0 = [-Im(ch[2])Re(rx[2]) Re(ch[2])Im(rx[2]) -Im(ch[3])Re(rx[3]) Re(ch[3])Im(rx[3])]
-        mmtmpD1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0b),vget_high_s32(mmtmpD0b)),
-                               vpadd_s32(vget_low_s32(mmtmpD1b),vget_high_s32(mmtmpD1b)));
-        //mmtmpD1 = [-Im(ch[0])Re(rx[0])+Re(ch[0])Im(rx[0]) -Im(ch[1])Re(rx[1])+Re(ch[1])Im(rx[1]) -Im(ch[2])Re(rx[2])+Re(ch[2])Im(rx[2]) -Im(ch[3])Re(rx[3])+Re(ch[3])Im(rx[3])]
-        mmtmpD0 = vqshlq_s32(mmtmpD0,output_shift128);
-        mmtmpD1 = vqshlq_s32(mmtmpD1,output_shift128);
-        rxdataF_comp128[0] = vzip_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-        mmtmpD0 = vmull_s16(dl_ch128[2], rxdataF128[2]);
-        mmtmpD1 = vmull_s16(dl_ch128[3], rxdataF128[3]);
-        mmtmpD0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0),vget_high_s32(mmtmpD0)),
-                               vpadd_s32(vget_low_s32(mmtmpD1),vget_high_s32(mmtmpD1)));
-        mmtmpD0b = vmull_s16(vrev32_s16(vmul_s16(dl_ch128[2],*(int16x4_t *)conj)), rxdataF128[2]);
-        mmtmpD1b = vmull_s16(vrev32_s16(vmul_s16(dl_ch128[3],*(int16x4_t *)conj)), rxdataF128[3]);
-        mmtmpD1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0b),vget_high_s32(mmtmpD0b)),
-                               vpadd_s32(vget_low_s32(mmtmpD1b),vget_high_s32(mmtmpD1b)));
-        mmtmpD0 = vqshlq_s32(mmtmpD0,output_shift128);
-        mmtmpD1 = vqshlq_s32(mmtmpD1,output_shift128);
-        rxdataF_comp128[1] = vzip_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-
-        if (pilots==0) {
-          mmtmpD0 = vmull_s16(dl_ch128[4], rxdataF128[4]);
-          mmtmpD1 = vmull_s16(dl_ch128[5], rxdataF128[5]);
-          mmtmpD0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0),vget_high_s32(mmtmpD0)),
-                                 vpadd_s32(vget_low_s32(mmtmpD1),vget_high_s32(mmtmpD1)));
-          mmtmpD0b = vmull_s16(vrev32_s16(vmul_s16(dl_ch128[4],*(int16x4_t *)conj)), rxdataF128[4]);
-          mmtmpD1b = vmull_s16(vrev32_s16(vmul_s16(dl_ch128[5],*(int16x4_t *)conj)), rxdataF128[5]);
-          mmtmpD1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0b),vget_high_s32(mmtmpD0b)),
-                                 vpadd_s32(vget_low_s32(mmtmpD1b),vget_high_s32(mmtmpD1b)));
-          mmtmpD0 = vqshlq_s32(mmtmpD0,output_shift128);
-          mmtmpD1 = vqshlq_s32(mmtmpD1,output_shift128);
-          rxdataF_comp128[2] = vzip_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-          dl_ch128+=6;
-          dl_ch_mag128+=3;
-          dl_ch_mag128b+=3;
-          rxdataF128+=6;
-          rxdataF_comp128+=3;
-        } else { // we have a smaller PDSCH in symbols with pilots so skip last group of 4 REs and increment less
-          dl_ch128+=4;
-          dl_ch_mag128+=2;
-          dl_ch_mag128b+=2;
-          rxdataF128+=4;
-          rxdataF_comp128+=2;
-        }
-      }
-    }
-  }
-
-  if (rho) {
-    for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-      rho128        = (int16x4x2_t *)&rho[aarx][symbol*frame_parms->N_RB_DL*12];
-      dl_ch128      = (int16x4_t *)&dl_ch_estimates_ext[aarx][symbol*frame_parms->N_RB_DL*12];
-      dl_ch128_2    = (int16x4_t *)&dl_ch_estimates_ext[2+aarx][symbol*frame_parms->N_RB_DL*12];
-
-      for (rb=0; rb<nb_rb; rb++) {
-        mmtmpD0 = vmull_s16(dl_ch128[0], dl_ch128_2[0]);
-        mmtmpD1 = vmull_s16(dl_ch128[1], dl_ch128_2[1]);
-        mmtmpD0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0),vget_high_s32(mmtmpD0)),
-                               vpadd_s32(vget_low_s32(mmtmpD1),vget_high_s32(mmtmpD1)));
-        mmtmpD0b = vmull_s16(vrev32_s16(vmul_s16(dl_ch128[0],*(int16x4_t *)conj)), dl_ch128_2[0]);
-        mmtmpD1b = vmull_s16(vrev32_s16(vmul_s16(dl_ch128[1],*(int16x4_t *)conj)), dl_ch128_2[1]);
-        mmtmpD1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0b),vget_high_s32(mmtmpD0b)),
-                               vpadd_s32(vget_low_s32(mmtmpD1b),vget_high_s32(mmtmpD1b)));
-        mmtmpD0 = vqshlq_s32(mmtmpD0,output_shift128);
-        mmtmpD1 = vqshlq_s32(mmtmpD1,output_shift128);
-        rho128[0] = vzip_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-        mmtmpD0 = vmull_s16(dl_ch128[2], dl_ch128_2[2]);
-        mmtmpD1 = vmull_s16(dl_ch128[3], dl_ch128_2[3]);
-        mmtmpD0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0),vget_high_s32(mmtmpD0)),
-                               vpadd_s32(vget_low_s32(mmtmpD1),vget_high_s32(mmtmpD1)));
-        mmtmpD0b = vmull_s16(vrev32_s16(vmul_s16(dl_ch128[2],*(int16x4_t *)conj)), dl_ch128_2[2]);
-        mmtmpD1b = vmull_s16(vrev32_s16(vmul_s16(dl_ch128[3],*(int16x4_t *)conj)), dl_ch128_2[3]);
-        mmtmpD1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0b),vget_high_s32(mmtmpD0b)),
-                               vpadd_s32(vget_low_s32(mmtmpD1b),vget_high_s32(mmtmpD1b)));
-        mmtmpD0 = vqshlq_s32(mmtmpD0,output_shift128);
-        mmtmpD1 = vqshlq_s32(mmtmpD1,output_shift128);
-        rho128[1] = vzip_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-        mmtmpD0 = vmull_s16(dl_ch128[0], dl_ch128_2[0]);
-        mmtmpD1 = vmull_s16(dl_ch128[1], dl_ch128_2[1]);
-        mmtmpD0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0),vget_high_s32(mmtmpD0)),
-                               vpadd_s32(vget_low_s32(mmtmpD1),vget_high_s32(mmtmpD1)));
-        mmtmpD0b = vmull_s16(vrev32_s16(vmul_s16(dl_ch128[4],*(int16x4_t *)conj)), dl_ch128_2[4]);
-        mmtmpD1b = vmull_s16(vrev32_s16(vmul_s16(dl_ch128[5],*(int16x4_t *)conj)), dl_ch128_2[5]);
-        mmtmpD1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0b),vget_high_s32(mmtmpD0b)),
-                               vpadd_s32(vget_low_s32(mmtmpD1b),vget_high_s32(mmtmpD1b)));
-        mmtmpD0 = vqshlq_s32(mmtmpD0,output_shift128);
-        mmtmpD1 = vqshlq_s32(mmtmpD1,output_shift128);
-        rho128[2] = vzip_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-        dl_ch128+=6;
-        dl_ch128_2+=6;
-        rho128+=3;
-      }
-
-      if (first_symbol_flag==1) {
-        measurements->rx_correlation[0][aarx] = signal_energy(&rho[aarx][symbol*frame_parms->N_RB_DL*12],rb*12);
-      }
-    }
-  }
-
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void dlsch_channel_compensation_core(int **rxdataF_ext,
@@ -1686,19 +1502,19 @@ void dlsch_channel_compensation_core(int **rxdataF_ext,
   unsigned short ii;
   int length_mod8 = 0;
   int length2;
-  __m128i *dl_ch128,*dl_ch_mag128,*dl_ch_mag128b, *dl_ch128_2, *rxdataF128,*rxdataF_comp128,*rho128;
-  __m128i mmtmpD0,mmtmpD1,mmtmpD2,mmtmpD3,QAM_amp128={0};
+  simde__m128i *dl_ch128,*dl_ch_mag128,*dl_ch_mag128b, *dl_ch128_2, *rxdataF128,*rxdataF_comp128,*rho128;
+  simde__m128i mmtmpD0,mmtmpD1,mmtmpD2,mmtmpD3,QAM_amp128={0};
   int aatx = 0, aarx = 0;
 
   for (aatx=0; aatx<n_tx; aatx++) {
-    __m128i QAM_amp128b={0};
+    simde__m128i QAM_amp128b={0};
 
     if (mod_order == 4) {
-      QAM_amp128 = _mm_set1_epi16(QAM16_n1);  // 2/sqrt(10)
-      QAM_amp128b = _mm_setzero_si128();
+      QAM_amp128 = simde_mm_set1_epi16(QAM16_n1);  // 2/sqrt(10)
+      QAM_amp128b = simde_mm_setzero_si128();
     } else if (mod_order == 6) {
-      QAM_amp128  = _mm_set1_epi16(QAM64_n1); //
-      QAM_amp128b = _mm_set1_epi16(QAM64_n2);
+      QAM_amp128  = simde_mm_set1_epi16(QAM64_n1); //
+      QAM_amp128b = simde_mm_set1_epi16(QAM64_n2);
     }
 
     for (aarx=0; aarx<n_rx; aarx++) {
@@ -1707,11 +1523,11 @@ void dlsch_channel_compensation_core(int **rxdataF_ext,
        * Elena's commit.
        */
       int x = n_rx > 1 ? n_rx : 2;
-      dl_ch128          = (__m128i *)&dl_ch_estimates_ext[aatx*x + aarx][start_point];
-      dl_ch_mag128      = (__m128i *)&dl_ch_mag[aatx*x + aarx][start_point];
-      dl_ch_mag128b     = (__m128i *)&dl_ch_magb[aatx*x + aarx][start_point];
-      rxdataF128        = (__m128i *)&rxdataF_ext[aarx][start_point];
-      rxdataF_comp128   = (__m128i *)&rxdataF_comp[aatx*x + aarx][start_point];
+      dl_ch128          = (simde__m128i *)&dl_ch_estimates_ext[aatx*x + aarx][start_point];
+      dl_ch_mag128      = (simde__m128i *)&dl_ch_mag[aatx*x + aarx][start_point];
+      dl_ch_mag128b     = (simde__m128i *)&dl_ch_magb[aatx*x + aarx][start_point];
+      rxdataF128        = (simde__m128i *)&rxdataF_ext[aarx][start_point];
+      rxdataF_comp128   = (simde__m128i *)&rxdataF_comp[aatx*x + aarx][start_point];
       length_mod8 = length&7;
 
       if (length_mod8 == 0) {
@@ -1720,63 +1536,63 @@ void dlsch_channel_compensation_core(int **rxdataF_ext,
         for (ii=0; ii<length2; ++ii) {
           if (mod_order>2) {
             // get channel amplitude if not QPSK
-            mmtmpD0 = _mm_madd_epi16(dl_ch128[0],dl_ch128[0]);
-            mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-            mmtmpD1 = _mm_madd_epi16(dl_ch128[1],dl_ch128[1]);
-            mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
-            mmtmpD0 = _mm_packs_epi32(mmtmpD0,mmtmpD1);
+            mmtmpD0 = simde_mm_madd_epi16(dl_ch128[0],dl_ch128[0]);
+            mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
+            mmtmpD1 = simde_mm_madd_epi16(dl_ch128[1],dl_ch128[1]);
+            mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
+            mmtmpD0 = simde_mm_packs_epi32(mmtmpD0,mmtmpD1);
             // store channel magnitude here in a new field of dlsch
-            dl_ch_mag128[0] = _mm_unpacklo_epi16(mmtmpD0,mmtmpD0);
+            dl_ch_mag128[0] = simde_mm_unpacklo_epi16(mmtmpD0,mmtmpD0);
             dl_ch_mag128b[0] = dl_ch_mag128[0];
-            dl_ch_mag128[0] = _mm_mulhi_epi16(dl_ch_mag128[0],QAM_amp128);
-            dl_ch_mag128[0] = _mm_slli_epi16(dl_ch_mag128[0],1);
+            dl_ch_mag128[0] = simde_mm_mulhi_epi16(dl_ch_mag128[0],QAM_amp128);
+            dl_ch_mag128[0] = simde_mm_slli_epi16(dl_ch_mag128[0],1);
             //print_ints("Re(ch):",(int16_t*)&mmtmpD0);
             //print_shorts("QAM_amp:",(int16_t*)&QAM_amp128);
             //print_shorts("mag:",(int16_t*)&dl_ch_mag128[0]);
-            dl_ch_mag128[1] = _mm_unpackhi_epi16(mmtmpD0,mmtmpD0);
+            dl_ch_mag128[1] = simde_mm_unpackhi_epi16(mmtmpD0,mmtmpD0);
             dl_ch_mag128b[1] = dl_ch_mag128[1];
-            dl_ch_mag128[1] = _mm_mulhi_epi16(dl_ch_mag128[1],QAM_amp128);
-            dl_ch_mag128[1] = _mm_slli_epi16(dl_ch_mag128[1],1);
-            dl_ch_mag128b[0] = _mm_mulhi_epi16(dl_ch_mag128b[0],QAM_amp128b);
-            dl_ch_mag128b[0] = _mm_slli_epi16(dl_ch_mag128b[0],1);
-            dl_ch_mag128b[1] = _mm_mulhi_epi16(dl_ch_mag128b[1],QAM_amp128b);
-            dl_ch_mag128b[1] = _mm_slli_epi16(dl_ch_mag128b[1],1);
+            dl_ch_mag128[1] = simde_mm_mulhi_epi16(dl_ch_mag128[1],QAM_amp128);
+            dl_ch_mag128[1] = simde_mm_slli_epi16(dl_ch_mag128[1],1);
+            dl_ch_mag128b[0] = simde_mm_mulhi_epi16(dl_ch_mag128b[0],QAM_amp128b);
+            dl_ch_mag128b[0] = simde_mm_slli_epi16(dl_ch_mag128b[0],1);
+            dl_ch_mag128b[1] = simde_mm_mulhi_epi16(dl_ch_mag128b[1],QAM_amp128b);
+            dl_ch_mag128b[1] = simde_mm_slli_epi16(dl_ch_mag128b[1],1);
           }
 
           // multiply by conjugated channel
-          mmtmpD0 = _mm_madd_epi16(dl_ch128[0],rxdataF128[0]);
+          mmtmpD0 = simde_mm_madd_epi16(dl_ch128[0],rxdataF128[0]);
           // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-          mmtmpD1 = _mm_shufflelo_epi16(dl_ch128[0],_MM_SHUFFLE(2,3,0,1));
-          mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-          mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i *)&conjugate[0]);
+          mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch128[0], SIMDE_MM_SHUFFLE(2,3,0,1));
+          mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+          mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i *)&conjugate[0]);
           //  print_ints("im",&mmtmpD1);
-          mmtmpD1 = _mm_madd_epi16(mmtmpD1,rxdataF128[0]);
+          mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,rxdataF128[0]);
           // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-          mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
+          mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
           //  print_ints("re(shift)",&mmtmpD0);
-          mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
+          mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
           //  print_ints("im(shift)",&mmtmpD1);
-          mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-          mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+          mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+          mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
           //        print_ints("c0",&mmtmpD2);
           //  print_ints("c1",&mmtmpD3);
-          rxdataF_comp128[0] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+          rxdataF_comp128[0] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
           //  print_shorts("rx:",rxdataF128);
           //  print_shorts("ch:",dl_ch128);
           //  print_shorts("pack:",rxdataF_comp128);
           // multiply by conjugated channel
-          mmtmpD0 = _mm_madd_epi16(dl_ch128[1],rxdataF128[1]);
+          mmtmpD0 = simde_mm_madd_epi16(dl_ch128[1],rxdataF128[1]);
           // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-          mmtmpD1 = _mm_shufflelo_epi16(dl_ch128[1],_MM_SHUFFLE(2,3,0,1));
-          mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-          mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i *)conjugate);
-          mmtmpD1 = _mm_madd_epi16(mmtmpD1,rxdataF128[1]);
+          mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch128[1], SIMDE_MM_SHUFFLE(2,3,0,1));
+          mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+          mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i *)conjugate);
+          mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,rxdataF128[1]);
           // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-          mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-          mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
-          mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-          mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
-          rxdataF_comp128[1] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+          mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
+          mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
+          mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+          mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+          rxdataF_comp128[1] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
           //  print_shorts("rx:",rxdataF128+1);
           //  print_shorts("ch:",dl_ch128+1);
           //print_shorts("pack:",rxdataF_comp128+1);
@@ -1796,49 +1612,49 @@ void dlsch_channel_compensation_core(int **rxdataF_ext,
   /*This part of code makes sense only for processing in 2x2 blocks*/
   if (rho) {
     for (aarx=0; aarx<n_rx; aarx++) {
-      rho128        = (__m128i *)&rho[aarx][start_point];
-      dl_ch128      = (__m128i *)&dl_ch_estimates_ext[aarx][start_point];
-      dl_ch128_2    = (__m128i *)&dl_ch_estimates_ext[2+aarx][start_point];
+      rho128        = (simde__m128i *)&rho[aarx][start_point];
+      dl_ch128      = (simde__m128i *)&dl_ch_estimates_ext[aarx][start_point];
+      dl_ch128_2    = (simde__m128i *)&dl_ch_estimates_ext[2+aarx][start_point];
 
       if (length_mod8 == 0) {
         length2 = length>>3;
 
         for (ii=0; ii<length2; ++ii) {
           // multiply by conjugated channel
-          mmtmpD0 = _mm_madd_epi16(dl_ch128[0],dl_ch128_2[0]);
+          mmtmpD0 = simde_mm_madd_epi16(dl_ch128[0],dl_ch128_2[0]);
           //  print_ints("re",&mmtmpD0);
           // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-          mmtmpD1 = _mm_shufflelo_epi16(dl_ch128[0],_MM_SHUFFLE(2,3,0,1));
-          mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-          mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i *)&conjugate[0]);
+          mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch128[0], SIMDE_MM_SHUFFLE(2,3,0,1));
+          mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+          mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i *)&conjugate[0]);
           //  print_ints("im",&mmtmpD1);
-          mmtmpD1 = _mm_madd_epi16(mmtmpD1,dl_ch128_2[0]);
+          mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,dl_ch128_2[0]);
           // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-          mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
+          mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
           //  print_ints("re(shift)",&mmtmpD0);
-          mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
+          mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
           //  print_ints("im(shift)",&mmtmpD1);
-          mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-          mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+          mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+          mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
           //        print_ints("c0",&mmtmpD2);
           //  print_ints("c1",&mmtmpD3);
-          rho128[0] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+          rho128[0] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
           //print_shorts("rx:",dl_ch128_2);
           //print_shorts("ch:",dl_ch128);
           //print_shorts("pack:",rho128);
           // multiply by conjugated channel
-          mmtmpD0 = _mm_madd_epi16(dl_ch128[1],dl_ch128_2[1]);
+          mmtmpD0 = simde_mm_madd_epi16(dl_ch128[1],dl_ch128_2[1]);
           // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-          mmtmpD1 = _mm_shufflelo_epi16(dl_ch128[1],_MM_SHUFFLE(2,3,0,1));
-          mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-          mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i *)conjugate);
-          mmtmpD1 = _mm_madd_epi16(mmtmpD1,dl_ch128_2[1]);
+          mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch128[1], SIMDE_MM_SHUFFLE(2,3,0,1));
+          mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+          mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i *)conjugate);
+          mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,dl_ch128_2[1]);
           // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-          mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-          mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
-          mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-          mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
-          rho128[1] =_mm_packs_epi32(mmtmpD2,mmtmpD3);
+          mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
+          mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
+          mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+          mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+          rho128[1] =simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
           dl_ch128+=2;
           dl_ch128_2+=2;
           rho128+=2;
@@ -1850,137 +1666,94 @@ void dlsch_channel_compensation_core(int **rxdataF_ext,
     }
   }
 
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
 
-#if defined(__x86_64__) || defined(__i386__)
 
-void prec2A_TM56_128(unsigned char pmi,__m128i *ch0,__m128i *ch1) {
-  __m128i amp;
-  amp = _mm_set1_epi16(ONE_OVER_SQRT2_Q15);
+void prec2A_TM56_128(unsigned char pmi,simde__m128i *ch0,simde__m128i *ch1) {
+  simde__m128i amp;
+  amp = simde_mm_set1_epi16(ONE_OVER_SQRT2_Q15);
 
   switch (pmi) {
     case 0 :   // +1 +1
       //    print_shorts("phase 0 :ch0",ch0);
       //    print_shorts("phase 0 :ch1",ch1);
-      ch0[0] = _mm_adds_epi16(ch0[0],ch1[0]);
+      ch0[0] = simde_mm_adds_epi16(ch0[0],ch1[0]);
       break;
 
     case 1 :   // +1 -1
       //    print_shorts("phase 1 :ch0",ch0);
       //    print_shorts("phase 1 :ch1",ch1);
-      ch0[0] = _mm_subs_epi16(ch0[0],ch1[0]);
+      ch0[0] = simde_mm_subs_epi16(ch0[0],ch1[0]);
       //    print_shorts("phase 1 :ch0-ch1",ch0);
       break;
 
     case 2 :   // +1 +j
-      ch1[0] = _mm_sign_epi16(ch1[0],*(__m128i *)&conjugate[0]);
-      ch1[0] = _mm_shufflelo_epi16(ch1[0],_MM_SHUFFLE(2,3,0,1));
-      ch1[0] = _mm_shufflehi_epi16(ch1[0],_MM_SHUFFLE(2,3,0,1));
-      ch0[0] = _mm_subs_epi16(ch0[0],ch1[0]);
+      ch1[0] = simde_mm_sign_epi16(ch1[0],*(simde__m128i *)&conjugate[0]);
+      ch1[0] = simde_mm_shufflelo_epi16(ch1[0], SIMDE_MM_SHUFFLE(2,3,0,1));
+      ch1[0] = simde_mm_shufflehi_epi16(ch1[0], SIMDE_MM_SHUFFLE(2,3,0,1));
+      ch0[0] = simde_mm_subs_epi16(ch0[0],ch1[0]);
       break;   // +1 -j
 
     case 3 :
-      ch1[0] = _mm_sign_epi16(ch1[0],*(__m128i *)&conjugate[0]);
-      ch1[0] = _mm_shufflelo_epi16(ch1[0],_MM_SHUFFLE(2,3,0,1));
-      ch1[0] = _mm_shufflehi_epi16(ch1[0],_MM_SHUFFLE(2,3,0,1));
-      ch0[0] = _mm_adds_epi16(ch0[0],ch1[0]);
+      ch1[0] = simde_mm_sign_epi16(ch1[0],*(simde__m128i *)&conjugate[0]);
+      ch1[0] = simde_mm_shufflelo_epi16(ch1[0], SIMDE_MM_SHUFFLE(2,3,0,1));
+      ch1[0] = simde_mm_shufflehi_epi16(ch1[0], SIMDE_MM_SHUFFLE(2,3,0,1));
+      ch0[0] = simde_mm_adds_epi16(ch0[0],ch1[0]);
       break;
   }
 
-  ch0[0] = _mm_mulhi_epi16(ch0[0],amp);
-  ch0[0] = _mm_slli_epi16(ch0[0],1);
-  _mm_empty();
-  _m_empty();
+  ch0[0] = simde_mm_mulhi_epi16(ch0[0],amp);
+  ch0[0] = simde_mm_slli_epi16(ch0[0],1);
+  simde_mm_empty();
+  simde_m_empty();
 }
-#elif defined(__arm__) || defined(__aarch64__)
-void prec2A_TM56_128(unsigned char pmi,__m128i *ch0,__m128i *ch1) {
-  // sqrt(2) is already taken into account in computation sqrt_rho_a, sqrt_rho_b,
-  //so removed it
-
-  //__m128i amp;
-  //amp = _mm_set1_epi16(ONE_OVER_SQRT2_Q15);
-  switch (pmi) {
-    case 0 :   // +1 +1
-      //    print_shorts("phase 0 :ch0",ch0);
-      //    print_shorts("phase 0 :ch1",ch1);
-      ch0[0] = _mm_adds_epi16(ch0[0],ch1[0]);
-      break;
-
-    case 1 :   // +1 -1
-      //    print_shorts("phase 1 :ch0",ch0);
-      //    print_shorts("phase 1 :ch1",ch1);
-      ch0[0] = _mm_subs_epi16(ch0[0],ch1[0]);
-      //    print_shorts("phase 1 :ch0-ch1",ch0);
-      break;
-
-    case 2 :   // +1 +j
-      ch1[0] = _mm_sign_epi16(ch1[0],*(__m128i *)&conjugate[0]);
-      ch1[0] = _mm_shufflelo_epi16(ch1[0],_MM_SHUFFLE(2,3,0,1));
-      ch1[0] = _mm_shufflehi_epi16(ch1[0],_MM_SHUFFLE(2,3,0,1));
-      ch0[0] = _mm_subs_epi16(ch0[0],ch1[0]);
-      break;   // +1 -j
-
-    case 3 :
-      ch1[0] = _mm_sign_epi16(ch1[0],*(__m128i *)&conjugate[0]);
-      ch1[0] = _mm_shufflelo_epi16(ch1[0],_MM_SHUFFLE(2,3,0,1));
-      ch1[0] = _mm_shufflehi_epi16(ch1[0],_MM_SHUFFLE(2,3,0,1));
-      ch0[0] = _mm_adds_epi16(ch0[0],ch1[0]);
-      break;
-  }
-
-  //ch0[0] = _mm_mulhi_epi16(ch0[0],amp);
-  //ch0[0] = _mm_slli_epi16(ch0[0],1);
-  _mm_empty();
-  _m_empty();
-}
-#endif
 // precoding is stream 0 .5(1,1)  .5(1,-1) .5(1,1)  .5(1,-1)
 //              stream 1 .5(1,-1) .5(1,1)  .5(1,-1) .5(1,1)
 // store "precoded" channel for stream 0 in ch0, stream 1 in ch1
 
 short TM3_prec[8]__attribute__((aligned(16))) = {1,1,-1,-1,1,1,-1,-1} ;
 
-void prec2A_TM3_128(__m128i *ch0,__m128i *ch1) {
-  __m128i amp = _mm_set1_epi16(ONE_OVER_SQRT2_Q15);
-  __m128i tmp0,tmp1;
-  //_mm_mulhi_epi16
+void prec2A_TM3_128(simde__m128i *ch0,simde__m128i *ch1) {
+  simde__m128i amp = simde_mm_set1_epi16(ONE_OVER_SQRT2_Q15);
+  simde__m128i tmp0,tmp1;
+  //simde_mm_mulhi_epi16
   //  print_shorts("prec2A_TM3 ch0 (before):",ch0);
   //  print_shorts("prec2A_TM3 ch1 (before):",ch1);
   tmp0 = ch0[0];
-  tmp1  = _mm_sign_epi16(ch1[0],((__m128i *)&TM3_prec)[0]);
-  //  print_shorts("prec2A_TM3 ch1*s (mid):",(__m128i*)TM3_prec);
-  ch0[0] = _mm_adds_epi16(ch0[0],tmp1);
-  ch1[0] = _mm_subs_epi16(tmp0,tmp1);
-  ch0[0] = _mm_mulhi_epi16(ch0[0],amp);
-  ch0[0] = _mm_slli_epi16(ch0[0],1);
-  ch1[0] = _mm_mulhi_epi16(ch1[0],amp);
-  ch1[0] = _mm_slli_epi16(ch1[0],1);
+  tmp1  = simde_mm_sign_epi16(ch1[0],((simde__m128i *)&TM3_prec)[0]);
+  //  print_shorts("prec2A_TM3 ch1*s (mid):",(simde__m128i *)TM3_prec);
+  ch0[0] = simde_mm_adds_epi16(ch0[0],tmp1);
+  ch1[0] = simde_mm_subs_epi16(tmp0,tmp1);
+  ch0[0] = simde_mm_mulhi_epi16(ch0[0],amp);
+  ch0[0] = simde_mm_slli_epi16(ch0[0],1);
+  ch1[0] = simde_mm_mulhi_epi16(ch1[0],amp);
+  ch1[0] = simde_mm_slli_epi16(ch1[0],1);
   //  print_shorts("prec2A_TM3 ch0 (mid):",&tmp0);
   //  print_shorts("prec2A_TM3 ch1 (mid):",ch1);
-  //ch0[0] = _mm_mulhi_epi16(ch0[0],amp);
-  //ch0[0] = _mm_slli_epi16(ch0[0],1);
-  //ch1[0] = _mm_mulhi_epi16(ch1[0],amp);
-  //ch1[0] = _mm_slli_epi16(ch1[0],1);
-  //ch0[0] = _mm_srai_epi16(ch0[0],1);
-  //ch1[0] = _mm_srai_epi16(ch1[0],1);
+  //ch0[0] = simde_mm_mulhi_epi16(ch0[0],amp);
+  //ch0[0] = simde_mm_slli_epi16(ch0[0],1);
+  //ch1[0] = simde_mm_mulhi_epi16(ch1[0],amp);
+  //ch1[0] = simde_mm_slli_epi16(ch1[0],1);
+  //ch0[0] = simde_mm_srai_epi16(ch0[0],1);
+  //ch1[0] = simde_mm_srai_epi16(ch1[0],1);
   //  print_shorts("prec2A_TM3 ch0 (after):",ch0);
   //  print_shorts("prec2A_TM3 ch1 (after):",ch1);
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 // pmi = 0 => stream 0 (1,1), stream 1 (1,-1)
 // pmi = 1 => stream 0 (1,j), stream 2 (1,-j)
 
-void prec2A_TM4_128(int pmi,__m128i *ch0,__m128i *ch1) {
+void prec2A_TM4_128(int pmi,simde__m128i *ch0,simde__m128i *ch1) {
   // sqrt(2) is already taken into account in computation sqrt_rho_a, sqrt_rho_b,
   //so divide by 2 is replaced by divide by sqrt(2).
   // printf ("demod pmi=%d\n", pmi);
-  __m128i amp;
-  amp = _mm_set1_epi16(ONE_OVER_SQRT2_Q15);
-  __m128i tmp0,tmp1;
+  simde__m128i amp;
+  amp = simde_mm_set1_epi16(ONE_OVER_SQRT2_Q15);
+  simde__m128i tmp0,tmp1;
 
   // print_shorts("prec2A_TM4 ch0 (before):",ch0);
   // print_shorts("prec2A_TM4 ch1 (before):",ch1);
@@ -1988,29 +1761,29 @@ void prec2A_TM4_128(int pmi,__m128i *ch0,__m128i *ch1) {
   if (pmi == 0) { //[1 1;1 -1]
     tmp0 = ch0[0];
     tmp1 = ch1[0];
-    ch0[0] = _mm_adds_epi16(tmp0,tmp1);
-    ch1[0] = _mm_subs_epi16(tmp0,tmp1);
+    ch0[0] = simde_mm_adds_epi16(tmp0,tmp1);
+    ch1[0] = simde_mm_subs_epi16(tmp0,tmp1);
   } else { //ch0+j*ch1 ch0-j*ch1
     tmp0 = ch0[0];
-    tmp1   = _mm_sign_epi16(ch1[0],*(__m128i *)&conjugate[0]);
-    tmp1   = _mm_shufflelo_epi16(tmp1,_MM_SHUFFLE(2,3,0,1));
-    tmp1   = _mm_shufflehi_epi16(tmp1,_MM_SHUFFLE(2,3,0,1));
-    ch0[0] = _mm_subs_epi16(tmp0,tmp1);
-    ch1[0] = _mm_add_epi16(tmp0,tmp1);
+    tmp1   = simde_mm_sign_epi16(ch1[0],*(simde__m128i *)&conjugate[0]);
+    tmp1   = simde_mm_shufflelo_epi16(tmp1, SIMDE_MM_SHUFFLE(2,3,0,1));
+    tmp1   = simde_mm_shufflehi_epi16(tmp1, SIMDE_MM_SHUFFLE(2,3,0,1));
+    ch0[0] = simde_mm_subs_epi16(tmp0,tmp1);
+    ch1[0] = simde_mm_add_epi16(tmp0,tmp1);
   }
 
   //print_shorts("prec2A_TM4 ch0 (middle):",ch0);
   //print_shorts("prec2A_TM4 ch1 (middle):",ch1);
-  ch0[0] = _mm_mulhi_epi16(ch0[0],amp);
-  ch0[0] = _mm_slli_epi16(ch0[0],1);
-  ch1[0] = _mm_mulhi_epi16(ch1[0],amp);
-  ch1[0] = _mm_slli_epi16(ch1[0],1);
-  // ch0[0] = _mm_srai_epi16(ch0[0],1); //divide by 2
-  // ch1[0] = _mm_srai_epi16(ch1[0],1); //divide by 2
+  ch0[0] = simde_mm_mulhi_epi16(ch0[0],amp);
+  ch0[0] = simde_mm_slli_epi16(ch0[0],1);
+  ch1[0] = simde_mm_mulhi_epi16(ch1[0],amp);
+  ch1[0] = simde_mm_slli_epi16(ch1[0],1);
+  // ch0[0] = simde_mm_srai_epi16(ch0[0],1); //divide by 2
+  // ch1[0] = simde_mm_srai_epi16(ch1[0],1); //divide by 2
   //print_shorts("prec2A_TM4 ch0 (end):",ch0);
   //print_shorts("prec2A_TM4 ch1 (end):",ch1);
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
   // print_shorts("prec2A_TM4 ch0 (end):",ch0);
   //print_shorts("prec2A_TM4 ch1 (end):",ch1);
 }
@@ -2029,34 +1802,33 @@ void dlsch_channel_compensation_TM56(int **rxdataF_ext,
                                      unsigned short nb_rb,
                                      unsigned char output_shift,
                                      unsigned char dl_power_off) {
-#if defined(__x86_64__) || defined(__i386__)
   unsigned short rb,Nre;
-  __m128i *dl_ch0_128,*dl_ch1_128,*dl_ch_mag128,*dl_ch_mag128b,*rxdataF128,*rxdataF_comp128;
+  simde__m128i *dl_ch0_128,*dl_ch1_128,*dl_ch_mag128,*dl_ch_mag128b,*rxdataF128,*rxdataF_comp128;
   unsigned char aarx=0,symbol_mod,pilots=0;
   int precoded_signal_strength=0;
-  __m128i mmtmpD0,mmtmpD1,mmtmpD2,mmtmpD3,QAM_amp128={0};
+  simde__m128i mmtmpD0,mmtmpD1,mmtmpD2,mmtmpD3,QAM_amp128={0};
   symbol_mod = (symbol>=(7-frame_parms->Ncp)) ? symbol-(7-frame_parms->Ncp) : symbol;
 
   if ((symbol_mod == 0) || (symbol_mod == (4-frame_parms->Ncp)))
     pilots=1;
 
   //printf("comp prec: symbol %d, pilots %d\n",symbol, pilots);
-  __m128i QAM_amp128b = _mm_setzero_si128();
+  simde__m128i QAM_amp128b = simde_mm_setzero_si128();
 
   if (mod_order == 4) {
-    QAM_amp128 = _mm_set1_epi16(QAM16_n1);
+    QAM_amp128 = simde_mm_set1_epi16(QAM16_n1);
   } else if (mod_order == 6) {
-    QAM_amp128  = _mm_set1_epi16(QAM64_n1);
-    QAM_amp128b = _mm_set1_epi16(QAM64_n2);
+    QAM_amp128  = simde_mm_set1_epi16(QAM64_n1);
+    QAM_amp128b = simde_mm_set1_epi16(QAM64_n2);
   }
 
   for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-    dl_ch0_128          = (__m128i *)&dl_ch_estimates_ext[aarx][symbol*frame_parms->N_RB_DL*12];
-    dl_ch1_128          = (__m128i *)&dl_ch_estimates_ext[2+aarx][symbol*frame_parms->N_RB_DL*12];
-    dl_ch_mag128      = (__m128i *)&dl_ch_mag[aarx][symbol*frame_parms->N_RB_DL*12];
-    dl_ch_mag128b     = (__m128i *)&dl_ch_magb[aarx][symbol*frame_parms->N_RB_DL*12];
-    rxdataF128        = (__m128i *)&rxdataF_ext[aarx][symbol*frame_parms->N_RB_DL*12];
-    rxdataF_comp128   = (__m128i *)&rxdataF_comp[aarx][symbol*frame_parms->N_RB_DL*12];
+    dl_ch0_128          = (simde__m128i *)&dl_ch_estimates_ext[aarx][symbol*frame_parms->N_RB_DL*12];
+    dl_ch1_128          = (simde__m128i *)&dl_ch_estimates_ext[2+aarx][symbol*frame_parms->N_RB_DL*12];
+    dl_ch_mag128      = (simde__m128i *)&dl_ch_mag[aarx][symbol*frame_parms->N_RB_DL*12];
+    dl_ch_mag128b     = (simde__m128i *)&dl_ch_magb[aarx][symbol*frame_parms->N_RB_DL*12];
+    rxdataF128        = (simde__m128i *)&rxdataF_ext[aarx][symbol*frame_parms->N_RB_DL*12];
+    rxdataF_comp128   = (simde__m128i *)&rxdataF_comp[aarx][symbol*frame_parms->N_RB_DL*12];
 
     for (rb=0; rb<nb_rb; rb++) {
       // combine TX channels using precoder from pmi
@@ -2072,97 +1844,97 @@ void dlsch_channel_compensation_TM56(int **rxdataF_ext,
 
       if (mod_order>2) {
         // get channel amplitude if not QPSK
-        mmtmpD0 = _mm_madd_epi16(dl_ch0_128[0],dl_ch0_128[0]);
-        mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-        mmtmpD1 = _mm_madd_epi16(dl_ch0_128[1],dl_ch0_128[1]);
-        mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
-        mmtmpD0 = _mm_packs_epi32(mmtmpD0,mmtmpD1);
-        dl_ch_mag128[0] = _mm_unpacklo_epi16(mmtmpD0,mmtmpD0);
+        mmtmpD0 = simde_mm_madd_epi16(dl_ch0_128[0],dl_ch0_128[0]);
+        mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
+        mmtmpD1 = simde_mm_madd_epi16(dl_ch0_128[1],dl_ch0_128[1]);
+        mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
+        mmtmpD0 = simde_mm_packs_epi32(mmtmpD0,mmtmpD1);
+        dl_ch_mag128[0] = simde_mm_unpacklo_epi16(mmtmpD0,mmtmpD0);
         dl_ch_mag128b[0] = dl_ch_mag128[0];
-        dl_ch_mag128[0] = _mm_mulhi_epi16(dl_ch_mag128[0],QAM_amp128);
-        dl_ch_mag128[0] = _mm_slli_epi16(dl_ch_mag128[0],1);
+        dl_ch_mag128[0] = simde_mm_mulhi_epi16(dl_ch_mag128[0],QAM_amp128);
+        dl_ch_mag128[0] = simde_mm_slli_epi16(dl_ch_mag128[0],1);
         //print_shorts("dl_ch_mag128[0]=",&dl_ch_mag128[0]);
         //print_shorts("dl_ch_mag128[0]=",&dl_ch_mag128[0]);
-        dl_ch_mag128[1] = _mm_unpackhi_epi16(mmtmpD0,mmtmpD0);
+        dl_ch_mag128[1] = simde_mm_unpackhi_epi16(mmtmpD0,mmtmpD0);
         dl_ch_mag128b[1] = dl_ch_mag128[1];
-        dl_ch_mag128[1] = _mm_mulhi_epi16(dl_ch_mag128[1],QAM_amp128);
-        dl_ch_mag128[1] = _mm_slli_epi16(dl_ch_mag128[1],1);
+        dl_ch_mag128[1] = simde_mm_mulhi_epi16(dl_ch_mag128[1],QAM_amp128);
+        dl_ch_mag128[1] = simde_mm_slli_epi16(dl_ch_mag128[1],1);
 
         if (pilots==0) {
-          mmtmpD0 = _mm_madd_epi16(dl_ch0_128[2],dl_ch0_128[2]);
-          mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-          mmtmpD1 = _mm_packs_epi32(mmtmpD0,mmtmpD0);
-          dl_ch_mag128[2] = _mm_unpacklo_epi16(mmtmpD1,mmtmpD1);
+          mmtmpD0 = simde_mm_madd_epi16(dl_ch0_128[2],dl_ch0_128[2]);
+          mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
+          mmtmpD1 = simde_mm_packs_epi32(mmtmpD0,mmtmpD0);
+          dl_ch_mag128[2] = simde_mm_unpacklo_epi16(mmtmpD1,mmtmpD1);
           dl_ch_mag128b[2] = dl_ch_mag128[2];
-          dl_ch_mag128[2] = _mm_mulhi_epi16(dl_ch_mag128[2],QAM_amp128);
-          dl_ch_mag128[2] = _mm_slli_epi16(dl_ch_mag128[2],1);
+          dl_ch_mag128[2] = simde_mm_mulhi_epi16(dl_ch_mag128[2],QAM_amp128);
+          dl_ch_mag128[2] = simde_mm_slli_epi16(dl_ch_mag128[2],1);
         }
 
-        dl_ch_mag128b[0] = _mm_mulhi_epi16(dl_ch_mag128b[0],QAM_amp128b);
-        dl_ch_mag128b[0] = _mm_slli_epi16(dl_ch_mag128b[0],1);
+        dl_ch_mag128b[0] = simde_mm_mulhi_epi16(dl_ch_mag128b[0],QAM_amp128b);
+        dl_ch_mag128b[0] = simde_mm_slli_epi16(dl_ch_mag128b[0],1);
         //print_shorts("dl_ch_mag128b[0]=",&dl_ch_mag128b[0]);
-        dl_ch_mag128b[1] = _mm_mulhi_epi16(dl_ch_mag128b[1],QAM_amp128b);
-        dl_ch_mag128b[1] = _mm_slli_epi16(dl_ch_mag128b[1],1);
+        dl_ch_mag128b[1] = simde_mm_mulhi_epi16(dl_ch_mag128b[1],QAM_amp128b);
+        dl_ch_mag128b[1] = simde_mm_slli_epi16(dl_ch_mag128b[1],1);
 
         if (pilots==0) {
-          dl_ch_mag128b[2] = _mm_mulhi_epi16(dl_ch_mag128b[2],QAM_amp128b);
-          dl_ch_mag128b[2] = _mm_slli_epi16(dl_ch_mag128b[2],1);
+          dl_ch_mag128b[2] = simde_mm_mulhi_epi16(dl_ch_mag128b[2],QAM_amp128b);
+          dl_ch_mag128b[2] = simde_mm_slli_epi16(dl_ch_mag128b[2],1);
         }
       }
 
       // MF multiply by conjugated channel
-      mmtmpD0 = _mm_madd_epi16(dl_ch0_128[0],rxdataF128[0]);
+      mmtmpD0 = simde_mm_madd_epi16(dl_ch0_128[0],rxdataF128[0]);
       //        print_ints("re",&mmtmpD0);
       // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-      mmtmpD1 = _mm_shufflelo_epi16(dl_ch0_128[0],_MM_SHUFFLE(2,3,0,1));
-      mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-      mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i *)&conjugate[0]);
+      mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch0_128[0], SIMDE_MM_SHUFFLE(2,3,0,1));
+      mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+      mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i *)&conjugate[0]);
       //        print_ints("im",&mmtmpD1);
-      mmtmpD1 = _mm_madd_epi16(mmtmpD1,rxdataF128[0]);
+      mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,rxdataF128[0]);
       // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-      mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
+      mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
       //        print_ints("re(shift)",&mmtmpD0);
-      mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
+      mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
       //        print_ints("im(shift)",&mmtmpD1);
-      mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-      mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+      mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+      mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
       //        print_ints("c0",&mmtmpD2);
       //        print_ints("c1",&mmtmpD3);
-      rxdataF_comp128[0] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+      rxdataF_comp128[0] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
       //        print_shorts("rx:",rxdataF128);
       //        print_shorts("ch:",dl_ch128);
       //        print_shorts("pack:",rxdataF_comp128);
       // multiply by conjugated channel
-      mmtmpD0 = _mm_madd_epi16(dl_ch0_128[1],rxdataF128[1]);
+      mmtmpD0 = simde_mm_madd_epi16(dl_ch0_128[1],rxdataF128[1]);
       // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-      mmtmpD1 = _mm_shufflelo_epi16(dl_ch0_128[1],_MM_SHUFFLE(2,3,0,1));
-      mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-      mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i *)conjugate);
-      mmtmpD1 = _mm_madd_epi16(mmtmpD1,rxdataF128[1]);
+      mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch0_128[1], SIMDE_MM_SHUFFLE(2,3,0,1));
+      mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+      mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i *)conjugate);
+      mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,rxdataF128[1]);
       // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-      mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-      mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
-      mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-      mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
-      rxdataF_comp128[1] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+      mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
+      mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
+      mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+      mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+      rxdataF_comp128[1] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
       //  print_shorts("rx:",rxdataF128+1);
       //  print_shorts("ch:",dl_ch128+1);
       //  print_shorts("pack:",rxdataF_comp128+1);
 
       if (pilots==0) {
         // multiply by conjugated channel
-        mmtmpD0 = _mm_madd_epi16(dl_ch0_128[2],rxdataF128[2]);
+        mmtmpD0 = simde_mm_madd_epi16(dl_ch0_128[2],rxdataF128[2]);
         // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-        mmtmpD1 = _mm_shufflelo_epi16(dl_ch0_128[2],_MM_SHUFFLE(2,3,0,1));
-        mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-        mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i *)conjugate);
-        mmtmpD1 = _mm_madd_epi16(mmtmpD1,rxdataF128[2]);
+        mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch0_128[2], SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i *)conjugate);
+        mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,rxdataF128[2]);
         // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-        mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-        mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
-        mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-        mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
-        rxdataF_comp128[2] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+        mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
+        mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
+        mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+        mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+        rxdataF_comp128[2] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
         //  print_shorts("rx:",rxdataF128+2);
         //  print_shorts("ch:",dl_ch128+2);
         //        print_shorts("pack:",rxdataF_comp128+2);
@@ -2190,162 +1962,7 @@ void dlsch_channel_compensation_TM56(int **rxdataF_ext,
   measurements->precoded_cqi_dB[eNB_id][0] = dB_fixed2(precoded_signal_strength,measurements->n0_power_tot);
   //printf("eNB_id %d, symbol %d: precoded CQI %d dB\n",eNB_id,symbol,
   //   measurements->precoded_cqi_dB[eNB_id][0]);
-#elif defined(__arm__) || defined(__aarch64__)
-  uint32_t rb,Nre;
-  uint32_t aarx,symbol_mod,pilots=0;
-  int16x4_t *dl_ch0_128,*dl_ch1_128,*rxdataF128;
-  int16x8_t *dl_ch0_128b,*dl_ch1_128b;
-  int32x4_t mmtmpD0,mmtmpD1,mmtmpD0b,mmtmpD1b;
-  int16x8_t *dl_ch_mag128,*dl_ch_mag128b,mmtmpD2,mmtmpD3,mmtmpD4,*rxdataF_comp128;
-  int16x8_t QAM_amp128,QAM_amp128b;
-  int16_t conj[4]__attribute__((aligned(16))) = {1,-1,1,-1};
-  int32x4_t output_shift128 = vmovq_n_s32(-(int32_t)output_shift);
-  int32_t precoded_signal_strength=0;
-  symbol_mod = (symbol>=(7-frame_parms->Ncp)) ? symbol-(7-frame_parms->Ncp) : symbol;
-
-  if ((symbol_mod == 0) || (symbol_mod == (4-frame_parms->Ncp))) {
-    if (frame_parms->nb_antenna_ports_eNB==1) { // 10 out of 12 so don't reduce size
-      nb_rb=1+(5*nb_rb/6);
-    } else {
-      pilots=1;
-    }
-  }
 
-  if (mod_order == 4) {
-    QAM_amp128  = vmovq_n_s16(QAM16_n1);  // 2/sqrt(10)
-    QAM_amp128b = vmovq_n_s16(0);
-  } else if (mod_order == 6) {
-    QAM_amp128  = vmovq_n_s16(QAM64_n1); //
-    QAM_amp128b = vmovq_n_s16(QAM64_n2);
-  }
-
-  //    printf("comp: rxdataF_comp %p, symbol %d\n",rxdataF_comp[0],symbol);
-
-  for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-    dl_ch0_128          = (int16x4_t *)&dl_ch_estimates_ext[aarx][symbol*frame_parms->N_RB_DL*12];
-    dl_ch1_128          = (int16x4_t *)&dl_ch_estimates_ext[2+aarx][symbol*frame_parms->N_RB_DL*12];
-    dl_ch0_128b         = (int16x8_t *)&dl_ch_estimates_ext[aarx][symbol*frame_parms->N_RB_DL*12];
-    dl_ch1_128b         = (int16x8_t *)&dl_ch_estimates_ext[2+aarx][symbol*frame_parms->N_RB_DL*12];
-    dl_ch_mag128        = (int16x8_t *)&dl_ch_mag[aarx][symbol*frame_parms->N_RB_DL*12];
-    dl_ch_mag128b       = (int16x8_t *)&dl_ch_magb[aarx][symbol*frame_parms->N_RB_DL*12];
-    rxdataF128          = (int16x4_t *)&rxdataF_ext[aarx][symbol*frame_parms->N_RB_DL*12];
-    rxdataF_comp128     = (int16x8_t *)&rxdataF_comp[aarx][symbol*frame_parms->N_RB_DL*12];
-
-    for (rb=0; rb<nb_rb; rb++) {
-#ifdef DEBUG_DLSCH_DEMOD
-      printf("mode 6 prec: rb %u, pmi->%u\n",rb,pmi_ext[rb]);
-#endif
-      prec2A_TM56_128(pmi_ext[rb],&dl_ch0_128b[0],&dl_ch1_128b[0]);
-      prec2A_TM56_128(pmi_ext[rb],&dl_ch0_128b[1],&dl_ch1_128b[1]);
-
-      if (pilots==0) {
-        prec2A_TM56_128(pmi_ext[rb],&dl_ch0_128b[2],&dl_ch1_128b[2]);
-      }
-
-      if (mod_order>2) {
-        // get channel amplitude if not QPSK
-        mmtmpD0 = vmull_s16(dl_ch0_128[0], dl_ch0_128[0]);
-        // mmtmpD0 = [ch0*ch0,ch1*ch1,ch2*ch2,ch3*ch3];
-        mmtmpD0 = vqshlq_s32(vqaddq_s32(mmtmpD0,vrev64q_s32(mmtmpD0)),output_shift128);
-        // mmtmpD0 = [ch0*ch0 + ch1*ch1,ch0*ch0 + ch1*ch1,ch2*ch2 + ch3*ch3,ch2*ch2 + ch3*ch3]>>output_shift128 on 32-bits
-        mmtmpD1 = vmull_s16(dl_ch0_128[1], dl_ch0_128[1]);
-        mmtmpD1 = vqshlq_s32(vqaddq_s32(mmtmpD1,vrev64q_s32(mmtmpD1)),output_shift128);
-        mmtmpD2 = vcombine_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-        // mmtmpD2 = [ch0*ch0 + ch1*ch1,ch0*ch0 + ch1*ch1,ch2*ch2 + ch3*ch3,ch2*ch2 + ch3*ch3,ch4*ch4 + ch5*ch5,ch4*ch4 + ch5*ch5,ch6*ch6 + ch7*ch7,ch6*ch6 + ch7*ch7]>>output_shift128 on 16-bits
-        mmtmpD0 = vmull_s16(dl_ch0_128[2], dl_ch0_128[2]);
-        mmtmpD0 = vqshlq_s32(vqaddq_s32(mmtmpD0,vrev64q_s32(mmtmpD0)),output_shift128);
-        mmtmpD1 = vmull_s16(dl_ch0_128[3], dl_ch0_128[3]);
-        mmtmpD1 = vqshlq_s32(vqaddq_s32(mmtmpD1,vrev64q_s32(mmtmpD1)),output_shift128);
-        mmtmpD3 = vcombine_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-
-        if (pilots==0) {
-          mmtmpD0 = vmull_s16(dl_ch0_128[4], dl_ch0_128[4]);
-          mmtmpD0 = vqshlq_s32(vqaddq_s32(mmtmpD0,vrev64q_s32(mmtmpD0)),output_shift128);
-          mmtmpD1 = vmull_s16(dl_ch0_128[5], dl_ch0_128[5]);
-          mmtmpD1 = vqshlq_s32(vqaddq_s32(mmtmpD1,vrev64q_s32(mmtmpD1)),output_shift128);
-          mmtmpD4 = vcombine_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-        }
-
-        dl_ch_mag128b[0] = vqdmulhq_s16(mmtmpD2,QAM_amp128b);
-        dl_ch_mag128b[1] = vqdmulhq_s16(mmtmpD3,QAM_amp128b);
-        dl_ch_mag128[0] = vqdmulhq_s16(mmtmpD2,QAM_amp128);
-        dl_ch_mag128[1] = vqdmulhq_s16(mmtmpD3,QAM_amp128);
-
-        if (pilots==0) {
-          dl_ch_mag128b[2] = vqdmulhq_s16(mmtmpD4,QAM_amp128b);
-          dl_ch_mag128[2]  = vqdmulhq_s16(mmtmpD4,QAM_amp128);
-        }
-      }
-
-      mmtmpD0 = vmull_s16(dl_ch0_128[0], rxdataF128[0]);
-      //mmtmpD0 = [Re(ch[0])Re(rx[0]) Im(ch[0])Im(ch[0]) Re(ch[1])Re(rx[1]) Im(ch[1])Im(ch[1])]
-      mmtmpD1 = vmull_s16(dl_ch0_128[1], rxdataF128[1]);
-      //mmtmpD1 = [Re(ch[2])Re(rx[2]) Im(ch[2])Im(ch[2]) Re(ch[3])Re(rx[3]) Im(ch[3])Im(ch[3])]
-      mmtmpD0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0),vget_high_s32(mmtmpD0)),
-                             vpadd_s32(vget_low_s32(mmtmpD1),vget_high_s32(mmtmpD1)));
-      //mmtmpD0 = [Re(ch[0])Re(rx[0])+Im(ch[0])Im(ch[0]) Re(ch[1])Re(rx[1])+Im(ch[1])Im(ch[1]) Re(ch[2])Re(rx[2])+Im(ch[2])Im(ch[2]) Re(ch[3])Re(rx[3])+Im(ch[3])Im(ch[3])]
-      mmtmpD0b = vmull_s16(vrev32_s16(vmul_s16(dl_ch0_128[0],*(int16x4_t *)conj)), rxdataF128[0]);
-      //mmtmpD0 = [-Im(ch[0])Re(rx[0]) Re(ch[0])Im(rx[0]) -Im(ch[1])Re(rx[1]) Re(ch[1])Im(rx[1])]
-      mmtmpD1b = vmull_s16(vrev32_s16(vmul_s16(dl_ch0_128[1],*(int16x4_t *)conj)), rxdataF128[1]);
-      //mmtmpD0 = [-Im(ch[2])Re(rx[2]) Re(ch[2])Im(rx[2]) -Im(ch[3])Re(rx[3]) Re(ch[3])Im(rx[3])]
-      mmtmpD1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0b),vget_high_s32(mmtmpD0b)),
-                             vpadd_s32(vget_low_s32(mmtmpD1b),vget_high_s32(mmtmpD1b)));
-      //mmtmpD1 = [-Im(ch[0])Re(rx[0])+Re(ch[0])Im(rx[0]) -Im(ch[1])Re(rx[1])+Re(ch[1])Im(rx[1]) -Im(ch[2])Re(rx[2])+Re(ch[2])Im(rx[2]) -Im(ch[3])Re(rx[3])+Re(ch[3])Im(rx[3])]
-      mmtmpD0 = vqshlq_s32(mmtmpD0,output_shift128);
-      mmtmpD1 = vqshlq_s32(mmtmpD1,output_shift128);
-      rxdataF_comp128[0] = vcombine_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-      mmtmpD0 = vmull_s16(dl_ch0_128[2], rxdataF128[2]);
-      mmtmpD1 = vmull_s16(dl_ch0_128[3], rxdataF128[3]);
-      mmtmpD0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0),vget_high_s32(mmtmpD0)),
-                             vpadd_s32(vget_low_s32(mmtmpD1),vget_high_s32(mmtmpD1)));
-      mmtmpD0b = vmull_s16(vrev32_s16(vmul_s16(dl_ch0_128[2],*(int16x4_t *)conj)), rxdataF128[2]);
-      mmtmpD1b = vmull_s16(vrev32_s16(vmul_s16(dl_ch0_128[3],*(int16x4_t *)conj)), rxdataF128[3]);
-      mmtmpD1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0b),vget_high_s32(mmtmpD0b)),
-                             vpadd_s32(vget_low_s32(mmtmpD1b),vget_high_s32(mmtmpD1b)));
-      mmtmpD0 = vqshlq_s32(mmtmpD0,output_shift128);
-      mmtmpD1 = vqshlq_s32(mmtmpD1,output_shift128);
-      rxdataF_comp128[1] = vcombine_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-
-      if (pilots==0) {
-        mmtmpD0 = vmull_s16(dl_ch0_128[4], rxdataF128[4]);
-        mmtmpD1 = vmull_s16(dl_ch0_128[5], rxdataF128[5]);
-        mmtmpD0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0),vget_high_s32(mmtmpD0)),
-                               vpadd_s32(vget_low_s32(mmtmpD1),vget_high_s32(mmtmpD1)));
-        mmtmpD0b = vmull_s16(vrev32_s16(vmul_s16(dl_ch0_128[4],*(int16x4_t *)conj)), rxdataF128[4]);
-        mmtmpD1b = vmull_s16(vrev32_s16(vmul_s16(dl_ch0_128[5],*(int16x4_t *)conj)), rxdataF128[5]);
-        mmtmpD1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0b),vget_high_s32(mmtmpD0b)),
-                               vpadd_s32(vget_low_s32(mmtmpD1b),vget_high_s32(mmtmpD1b)));
-        mmtmpD0 = vqshlq_s32(mmtmpD0,output_shift128);
-        mmtmpD1 = vqshlq_s32(mmtmpD1,output_shift128);
-        rxdataF_comp128[2] = vcombine_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-        dl_ch0_128+=6;
-        dl_ch1_128+=6;
-        dl_ch_mag128+=3;
-        dl_ch_mag128b+=3;
-        rxdataF128+=6;
-        rxdataF_comp128+=3;
-      } else { // we have a smaller PDSCH in symbols with pilots so skip last group of 4 REs and increment less
-        dl_ch0_128+=4;
-        dl_ch1_128+=4;
-        dl_ch_mag128+=2;
-        dl_ch_mag128b+=2;
-        rxdataF128+=4;
-        rxdataF_comp128+=2;
-      }
-    }
-
-    Nre = (pilots==0) ? 12 : 8;
-    precoded_signal_strength += ((signal_energy_nodc(&dl_ch_estimates_ext[aarx][symbol*frame_parms->N_RB_DL*Nre],
-                                  (nb_rb*Nre))) - (measurements->n0_power[aarx]));
-    // rx_antennas
-  }
-
-  measurements->precoded_cqi_dB[eNB_id][0] = dB_fixed2(precoded_signal_strength,measurements->n0_power_tot);
-  //printf("eNB_id %d, symbol %d: precoded CQI %d dB\n",eNB_id,symbol,
-  //     measurements->precoded_cqi_dB[eNB_id][0]);
-#endif
-  _mm_empty();
-  _m_empty();
 }
 
 void precode_channel_est(int32_t **dl_ch_estimates_ext,
@@ -2355,7 +1972,7 @@ void precode_channel_est(int32_t **dl_ch_estimates_ext,
                          unsigned short nb_rb,
                          MIMO_mode_t mimo_mode) {
   unsigned short rb;
-  __m128i *dl_ch0_128,*dl_ch1_128;
+  simde__m128i *dl_ch0_128,*dl_ch1_128;
   unsigned char aarx=0,symbol_mod,pilots=0;
   unsigned char *pmi_ext = pdsch_vars->pmi_ext;
   symbol_mod = (symbol>=(7-frame_parms->Ncp)) ? symbol-(7-frame_parms->Ncp) : symbol;
@@ -2364,8 +1981,8 @@ void precode_channel_est(int32_t **dl_ch_estimates_ext,
     pilots=1;
 
   for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-    dl_ch0_128          = (__m128i *)&dl_ch_estimates_ext[aarx][symbol*frame_parms->N_RB_DL*12]; // this is h11
-    dl_ch1_128          = (__m128i *)&dl_ch_estimates_ext[2+aarx][symbol*frame_parms->N_RB_DL*12]; // this is h12
+    dl_ch0_128          = (simde__m128i *)&dl_ch_estimates_ext[aarx][symbol*frame_parms->N_RB_DL*12]; // this is h11
+    dl_ch1_128          = (simde__m128i *)&dl_ch_estimates_ext[2+aarx][symbol*frame_parms->N_RB_DL*12]; // this is h12
 
     for (rb=0; rb<nb_rb; rb++) {
       if (mimo_mode==LARGE_CDD) {
@@ -2426,9 +2043,8 @@ void dlsch_channel_compensation_TM34(LTE_DL_FRAME_PARMS *frame_parms,
                                      unsigned short mmse_flag,
                                      unsigned char output_shift0,
                                      unsigned char output_shift1) {
-#if defined(__x86_64__) || defined(__i386__)
   unsigned short rb,Nre;
-  __m128i *dl_ch0_128,*dl_ch1_128,*dl_ch_mag0_128,*dl_ch_mag1_128,*dl_ch_mag0_128b,*dl_ch_mag1_128b,*rxdataF128,*rxdataF_comp0_128,*rxdataF_comp1_128;
+  simde__m128i *dl_ch0_128,*dl_ch1_128,*dl_ch_mag0_128,*dl_ch_mag1_128,*dl_ch_mag0_128b,*dl_ch_mag1_128b,*rxdataF128,*rxdataF_comp0_128,*rxdataF_comp1_128;
   unsigned char aarx=0,symbol_mod,pilots=0;
   int precoded_signal_strength0=0,precoded_signal_strength1=0;
   int rx_power_correction;
@@ -2441,7 +2057,7 @@ void dlsch_channel_compensation_TM34(LTE_DL_FRAME_PARMS *frame_parms,
   int **rxdataF_comp0         = pdsch_vars->rxdataF_comp0;
   int **rxdataF_comp1         = pdsch_vars->rxdataF_comp1[harq_pid][round];
   unsigned char *pmi_ext      = pdsch_vars->pmi_ext;
-  __m128i mmtmpD0,mmtmpD1,mmtmpD2,mmtmpD3,QAM_amp0_128={0},QAM_amp1_128={0};
+  simde__m128i mmtmpD0,mmtmpD1,mmtmpD2,mmtmpD3,QAM_amp0_128={0},QAM_amp1_128={0};
   symbol_mod = (symbol>=(7-frame_parms->Ncp)) ? symbol-(7-frame_parms->Ncp) : symbol;
 
   if ((symbol_mod == 0) || (symbol_mod == (4-frame_parms->Ncp)))
@@ -2449,34 +2065,34 @@ void dlsch_channel_compensation_TM34(LTE_DL_FRAME_PARMS *frame_parms,
 
   rx_power_correction = 1;
   // printf("comp prec: symbol %d, pilots %d\n",symbol, pilots);
-  __m128i  QAM_amp0_128b = _mm_setzero_si128();
+  simde__m128i  QAM_amp0_128b = simde_mm_setzero_si128();
 
   if (mod_order0 == 4) {
-    QAM_amp0_128  = _mm_set1_epi16(QAM16_n1);
+    QAM_amp0_128  = simde_mm_set1_epi16(QAM16_n1);
   } else if (mod_order0 == 6) {
-    QAM_amp0_128  = _mm_set1_epi16(QAM64_n1);
-    QAM_amp0_128b = _mm_set1_epi16(QAM64_n2);
+    QAM_amp0_128  = simde_mm_set1_epi16(QAM64_n1);
+    QAM_amp0_128b = simde_mm_set1_epi16(QAM64_n2);
   }
 
-  __m128i  QAM_amp1_128b = _mm_setzero_si128();
+  simde__m128i  QAM_amp1_128b = simde_mm_setzero_si128();
 
   if (mod_order1 == 4) {
-    QAM_amp1_128  = _mm_set1_epi16(QAM16_n1);
+    QAM_amp1_128  = simde_mm_set1_epi16(QAM16_n1);
   } else if (mod_order1 == 6) {
-    QAM_amp1_128  = _mm_set1_epi16(QAM64_n1);
-    QAM_amp1_128b = _mm_set1_epi16(QAM64_n2);
+    QAM_amp1_128  = simde_mm_set1_epi16(QAM64_n1);
+    QAM_amp1_128b = simde_mm_set1_epi16(QAM64_n2);
   }
 
   for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-    dl_ch0_128          = (__m128i *)&dl_ch_estimates_ext[aarx][symbol*frame_parms->N_RB_DL*12]; // this is h11
-    dl_ch1_128          = (__m128i *)&dl_ch_estimates_ext[2+aarx][symbol*frame_parms->N_RB_DL*12]; // this is h12
-    dl_ch_mag0_128      = (__m128i *)&dl_ch_mag0[aarx][symbol*frame_parms->N_RB_DL*12]; //responsible for x1
-    dl_ch_mag0_128b     = (__m128i *)&dl_ch_magb0[aarx][symbol*frame_parms->N_RB_DL*12];//responsible for x1
-    dl_ch_mag1_128      = (__m128i *)&dl_ch_mag1[aarx][symbol*frame_parms->N_RB_DL*12];   //responsible for x2. always coming from tx2
-    dl_ch_mag1_128b     = (__m128i *)&dl_ch_magb1[aarx][symbol*frame_parms->N_RB_DL*12];  //responsible for x2. always coming from tx2
-    rxdataF128          = (__m128i *)&rxdataF_ext[aarx][symbol*frame_parms->N_RB_DL*12]; //received signal on antenna of interest h11*x1+h12*x2
-    rxdataF_comp0_128   = (__m128i *)&rxdataF_comp0[aarx][symbol*frame_parms->N_RB_DL*12]; //result of multipl with MF x1 on antenna of interest
-    rxdataF_comp1_128   = (__m128i *)&rxdataF_comp1[aarx][symbol*frame_parms->N_RB_DL*12]; //result of multipl with MF x2 on antenna of interest
+    dl_ch0_128          = (simde__m128i *)&dl_ch_estimates_ext[aarx][symbol*frame_parms->N_RB_DL*12]; // this is h11
+    dl_ch1_128          = (simde__m128i *)&dl_ch_estimates_ext[2+aarx][symbol*frame_parms->N_RB_DL*12]; // this is h12
+    dl_ch_mag0_128      = (simde__m128i *)&dl_ch_mag0[aarx][symbol*frame_parms->N_RB_DL*12]; //responsible for x1
+    dl_ch_mag0_128b     = (simde__m128i *)&dl_ch_magb0[aarx][symbol*frame_parms->N_RB_DL*12];//responsible for x1
+    dl_ch_mag1_128      = (simde__m128i *)&dl_ch_mag1[aarx][symbol*frame_parms->N_RB_DL*12];   //responsible for x2. always coming from tx2
+    dl_ch_mag1_128b     = (simde__m128i *)&dl_ch_magb1[aarx][symbol*frame_parms->N_RB_DL*12];  //responsible for x2. always coming from tx2
+    rxdataF128          = (simde__m128i *)&rxdataF_ext[aarx][symbol*frame_parms->N_RB_DL*12]; //received signal on antenna of interest h11*x1+h12*x2
+    rxdataF_comp0_128   = (simde__m128i *)&rxdataF_comp0[aarx][symbol*frame_parms->N_RB_DL*12]; //result of multipl with MF x1 on antenna of interest
+    rxdataF_comp1_128   = (simde__m128i *)&rxdataF_comp1[aarx][symbol*frame_parms->N_RB_DL*12]; //result of multipl with MF x2 on antenna of interest
 
     for (rb=0; rb<nb_rb; rb++) {
       if (mmse_flag == 0) {
@@ -2517,137 +2133,137 @@ void dlsch_channel_compensation_TM34(LTE_DL_FRAME_PARMS *frame_parms,
 
       if (mod_order0>2) {
         // get channel amplitude if not QPSK
-        mmtmpD0 = _mm_madd_epi16(dl_ch0_128[0],dl_ch0_128[0]);
-        mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift0);
-        mmtmpD1 = _mm_madd_epi16(dl_ch0_128[1],dl_ch0_128[1]);
-        mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift0);
-        mmtmpD0 = _mm_packs_epi32(mmtmpD0,mmtmpD1);
-        dl_ch_mag0_128[0] = _mm_unpacklo_epi16(mmtmpD0,mmtmpD0);
+        mmtmpD0 = simde_mm_madd_epi16(dl_ch0_128[0],dl_ch0_128[0]);
+        mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift0);
+        mmtmpD1 = simde_mm_madd_epi16(dl_ch0_128[1],dl_ch0_128[1]);
+        mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift0);
+        mmtmpD0 = simde_mm_packs_epi32(mmtmpD0,mmtmpD1);
+        dl_ch_mag0_128[0] = simde_mm_unpacklo_epi16(mmtmpD0,mmtmpD0);
         dl_ch_mag0_128b[0] = dl_ch_mag0_128[0];
-        dl_ch_mag0_128[0] = _mm_mulhi_epi16(dl_ch_mag0_128[0],QAM_amp0_128);
-        dl_ch_mag0_128[0] = _mm_slli_epi16(dl_ch_mag0_128[0],1);
+        dl_ch_mag0_128[0] = simde_mm_mulhi_epi16(dl_ch_mag0_128[0],QAM_amp0_128);
+        dl_ch_mag0_128[0] = simde_mm_slli_epi16(dl_ch_mag0_128[0],1);
         //  print_shorts("dl_ch_mag0_128[0]=",&dl_ch_mag0_128[0]);
-        dl_ch_mag0_128[1] = _mm_unpackhi_epi16(mmtmpD0,mmtmpD0);
+        dl_ch_mag0_128[1] = simde_mm_unpackhi_epi16(mmtmpD0,mmtmpD0);
         dl_ch_mag0_128b[1] = dl_ch_mag0_128[1];
-        dl_ch_mag0_128[1] = _mm_mulhi_epi16(dl_ch_mag0_128[1],QAM_amp0_128);
-        dl_ch_mag0_128[1] = _mm_slli_epi16(dl_ch_mag0_128[1],1);
+        dl_ch_mag0_128[1] = simde_mm_mulhi_epi16(dl_ch_mag0_128[1],QAM_amp0_128);
+        dl_ch_mag0_128[1] = simde_mm_slli_epi16(dl_ch_mag0_128[1],1);
 
         if (pilots==0) {
-          mmtmpD0 = _mm_madd_epi16(dl_ch0_128[2],dl_ch0_128[2]);
-          mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift0);
-          mmtmpD1 = _mm_packs_epi32(mmtmpD0,mmtmpD0);
-          dl_ch_mag0_128[2] = _mm_unpacklo_epi16(mmtmpD1,mmtmpD1);
+          mmtmpD0 = simde_mm_madd_epi16(dl_ch0_128[2],dl_ch0_128[2]);
+          mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift0);
+          mmtmpD1 = simde_mm_packs_epi32(mmtmpD0,mmtmpD0);
+          dl_ch_mag0_128[2] = simde_mm_unpacklo_epi16(mmtmpD1,mmtmpD1);
           dl_ch_mag0_128b[2] = dl_ch_mag0_128[2];
-          dl_ch_mag0_128[2] = _mm_mulhi_epi16(dl_ch_mag0_128[2],QAM_amp0_128);
-          dl_ch_mag0_128[2] = _mm_slli_epi16(dl_ch_mag0_128[2],1);
+          dl_ch_mag0_128[2] = simde_mm_mulhi_epi16(dl_ch_mag0_128[2],QAM_amp0_128);
+          dl_ch_mag0_128[2] = simde_mm_slli_epi16(dl_ch_mag0_128[2],1);
         }
 
-        dl_ch_mag0_128b[0] = _mm_mulhi_epi16(dl_ch_mag0_128b[0],QAM_amp0_128b);
-        dl_ch_mag0_128b[0] = _mm_slli_epi16(dl_ch_mag0_128b[0],1);
+        dl_ch_mag0_128b[0] = simde_mm_mulhi_epi16(dl_ch_mag0_128b[0],QAM_amp0_128b);
+        dl_ch_mag0_128b[0] = simde_mm_slli_epi16(dl_ch_mag0_128b[0],1);
         // print_shorts("dl_ch_mag0_128b[0]=",&dl_ch_mag0_128b[0]);
-        dl_ch_mag0_128b[1] = _mm_mulhi_epi16(dl_ch_mag0_128b[1],QAM_amp0_128b);
-        dl_ch_mag0_128b[1] = _mm_slli_epi16(dl_ch_mag0_128b[1],1);
+        dl_ch_mag0_128b[1] = simde_mm_mulhi_epi16(dl_ch_mag0_128b[1],QAM_amp0_128b);
+        dl_ch_mag0_128b[1] = simde_mm_slli_epi16(dl_ch_mag0_128b[1],1);
 
         if (pilots==0) {
-          dl_ch_mag0_128b[2] = _mm_mulhi_epi16(dl_ch_mag0_128b[2],QAM_amp0_128b);
-          dl_ch_mag0_128b[2] = _mm_slli_epi16(dl_ch_mag0_128b[2],1);
+          dl_ch_mag0_128b[2] = simde_mm_mulhi_epi16(dl_ch_mag0_128b[2],QAM_amp0_128b);
+          dl_ch_mag0_128b[2] = simde_mm_slli_epi16(dl_ch_mag0_128b[2],1);
         }
       }
 
       if (mod_order1>2) {
         // get channel amplitude if not QPSK
-        mmtmpD0 = _mm_madd_epi16(dl_ch1_128[0],dl_ch1_128[0]);
-        mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift1);
-        mmtmpD1 = _mm_madd_epi16(dl_ch1_128[1],dl_ch1_128[1]);
-        mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift1);
-        mmtmpD0 = _mm_packs_epi32(mmtmpD0,mmtmpD1);
-        dl_ch_mag1_128[0] = _mm_unpacklo_epi16(mmtmpD0,mmtmpD0);
+        mmtmpD0 = simde_mm_madd_epi16(dl_ch1_128[0],dl_ch1_128[0]);
+        mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift1);
+        mmtmpD1 = simde_mm_madd_epi16(dl_ch1_128[1],dl_ch1_128[1]);
+        mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift1);
+        mmtmpD0 = simde_mm_packs_epi32(mmtmpD0,mmtmpD1);
+        dl_ch_mag1_128[0] = simde_mm_unpacklo_epi16(mmtmpD0,mmtmpD0);
         dl_ch_mag1_128b[0] = dl_ch_mag1_128[0];
-        dl_ch_mag1_128[0] = _mm_mulhi_epi16(dl_ch_mag1_128[0],QAM_amp1_128);
-        dl_ch_mag1_128[0] = _mm_slli_epi16(dl_ch_mag1_128[0],1);
+        dl_ch_mag1_128[0] = simde_mm_mulhi_epi16(dl_ch_mag1_128[0],QAM_amp1_128);
+        dl_ch_mag1_128[0] = simde_mm_slli_epi16(dl_ch_mag1_128[0],1);
         // print_shorts("dl_ch_mag1_128[0]=",&dl_ch_mag1_128[0]);
-        dl_ch_mag1_128[1] = _mm_unpackhi_epi16(mmtmpD0,mmtmpD0);
+        dl_ch_mag1_128[1] = simde_mm_unpackhi_epi16(mmtmpD0,mmtmpD0);
         dl_ch_mag1_128b[1] = dl_ch_mag1_128[1];
-        dl_ch_mag1_128[1] = _mm_mulhi_epi16(dl_ch_mag1_128[1],QAM_amp1_128);
-        dl_ch_mag1_128[1] = _mm_slli_epi16(dl_ch_mag1_128[1],1);
+        dl_ch_mag1_128[1] = simde_mm_mulhi_epi16(dl_ch_mag1_128[1],QAM_amp1_128);
+        dl_ch_mag1_128[1] = simde_mm_slli_epi16(dl_ch_mag1_128[1],1);
 
         if (pilots==0) {
-          mmtmpD0 = _mm_madd_epi16(dl_ch1_128[2],dl_ch1_128[2]);
-          mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift1);
-          mmtmpD1 = _mm_packs_epi32(mmtmpD0,mmtmpD0);
-          dl_ch_mag1_128[2] = _mm_unpacklo_epi16(mmtmpD1,mmtmpD1);
+          mmtmpD0 = simde_mm_madd_epi16(dl_ch1_128[2],dl_ch1_128[2]);
+          mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift1);
+          mmtmpD1 = simde_mm_packs_epi32(mmtmpD0,mmtmpD0);
+          dl_ch_mag1_128[2] = simde_mm_unpacklo_epi16(mmtmpD1,mmtmpD1);
           dl_ch_mag1_128b[2] = dl_ch_mag1_128[2];
-          dl_ch_mag1_128[2] = _mm_mulhi_epi16(dl_ch_mag1_128[2],QAM_amp1_128);
-          dl_ch_mag1_128[2] = _mm_slli_epi16(dl_ch_mag1_128[2],1);
+          dl_ch_mag1_128[2] = simde_mm_mulhi_epi16(dl_ch_mag1_128[2],QAM_amp1_128);
+          dl_ch_mag1_128[2] = simde_mm_slli_epi16(dl_ch_mag1_128[2],1);
         }
 
-        dl_ch_mag1_128b[0] = _mm_mulhi_epi16(dl_ch_mag1_128b[0],QAM_amp1_128b);
-        dl_ch_mag1_128b[0] = _mm_slli_epi16(dl_ch_mag1_128b[0],1);
+        dl_ch_mag1_128b[0] = simde_mm_mulhi_epi16(dl_ch_mag1_128b[0],QAM_amp1_128b);
+        dl_ch_mag1_128b[0] = simde_mm_slli_epi16(dl_ch_mag1_128b[0],1);
         // print_shorts("dl_ch_mag1_128b[0]=",&dl_ch_mag1_128b[0]);
-        dl_ch_mag1_128b[1] = _mm_mulhi_epi16(dl_ch_mag1_128b[1],QAM_amp1_128b);
-        dl_ch_mag1_128b[1] = _mm_slli_epi16(dl_ch_mag1_128b[1],1);
+        dl_ch_mag1_128b[1] = simde_mm_mulhi_epi16(dl_ch_mag1_128b[1],QAM_amp1_128b);
+        dl_ch_mag1_128b[1] = simde_mm_slli_epi16(dl_ch_mag1_128b[1],1);
 
         if (pilots==0) {
-          dl_ch_mag1_128b[2] = _mm_mulhi_epi16(dl_ch_mag1_128b[2],QAM_amp1_128b);
-          dl_ch_mag1_128b[2] = _mm_slli_epi16(dl_ch_mag1_128b[2],1);
+          dl_ch_mag1_128b[2] = simde_mm_mulhi_epi16(dl_ch_mag1_128b[2],QAM_amp1_128b);
+          dl_ch_mag1_128b[2] = simde_mm_slli_epi16(dl_ch_mag1_128b[2],1);
         }
       }
 
       // layer 0
       // MF multiply by conjugated channel
-      mmtmpD0 = _mm_madd_epi16(dl_ch0_128[0],rxdataF128[0]);
+      mmtmpD0 = simde_mm_madd_epi16(dl_ch0_128[0],rxdataF128[0]);
       //  print_ints("re",&mmtmpD0);
       // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-      mmtmpD1 = _mm_shufflelo_epi16(dl_ch0_128[0],_MM_SHUFFLE(2,3,0,1));
-      mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-      mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i *)&conjugate[0]);
-      mmtmpD1 = _mm_madd_epi16(mmtmpD1,rxdataF128[0]);
+      mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch0_128[0], SIMDE_MM_SHUFFLE(2,3,0,1));
+      mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+      mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i *)&conjugate[0]);
+      mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,rxdataF128[0]);
       // print_ints("im",&mmtmpD1);
       // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-      mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift0);
+      mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift0);
       // printf("Shift: %d\n",output_shift);
       // print_ints("re(shift)",&mmtmpD0);
-      mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift0);
+      mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift0);
       // print_ints("im(shift)",&mmtmpD1);
-      mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-      mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+      mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+      mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
       //  print_ints("c0",&mmtmpD2);
       // print_ints("c1",&mmtmpD3);
-      rxdataF_comp0_128[0] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+      rxdataF_comp0_128[0] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
       // print_shorts("rx:",rxdataF128);
       // print_shorts("ch:",dl_ch0_128);
       //print_shorts("pack:",rxdataF_comp0_128);
       // multiply by conjugated channel
-      mmtmpD0 = _mm_madd_epi16(dl_ch0_128[1],rxdataF128[1]);
+      mmtmpD0 = simde_mm_madd_epi16(dl_ch0_128[1],rxdataF128[1]);
       // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-      mmtmpD1 = _mm_shufflelo_epi16(dl_ch0_128[1],_MM_SHUFFLE(2,3,0,1));
-      mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-      mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i *)conjugate);
-      mmtmpD1 = _mm_madd_epi16(mmtmpD1,rxdataF128[1]);
+      mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch0_128[1], SIMDE_MM_SHUFFLE(2,3,0,1));
+      mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+      mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i *)conjugate);
+      mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,rxdataF128[1]);
       // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-      mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift0);
-      mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift0);
-      mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-      mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
-      rxdataF_comp0_128[1] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+      mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift0);
+      mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift0);
+      mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+      mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+      rxdataF_comp0_128[1] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
       //  print_shorts("rx:",rxdataF128+1);
       //  print_shorts("ch:",dl_ch0_128+1);
       // print_shorts("pack:",rxdataF_comp0_128+1);
 
       if (pilots==0) {
         // multiply by conjugated channel
-        mmtmpD0 = _mm_madd_epi16(dl_ch0_128[2],rxdataF128[2]);
+        mmtmpD0 = simde_mm_madd_epi16(dl_ch0_128[2],rxdataF128[2]);
         // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-        mmtmpD1 = _mm_shufflelo_epi16(dl_ch0_128[2],_MM_SHUFFLE(2,3,0,1));
-        mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-        mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i *)conjugate);
-        mmtmpD1 = _mm_madd_epi16(mmtmpD1,rxdataF128[2]);
+        mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch0_128[2], SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i *)conjugate);
+        mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,rxdataF128[2]);
         // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-        mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift0);
-        mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift0);
-        mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-        mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
-        rxdataF_comp0_128[2] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+        mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift0);
+        mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift0);
+        mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+        mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+        rxdataF_comp0_128[2] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
         //   print_shorts("rx:",rxdataF128+2);
         //   print_shorts("ch:",dl_ch0_128+2);
         //  print_shorts("pack:",rxdataF_comp0_128+2);
@@ -2655,58 +2271,58 @@ void dlsch_channel_compensation_TM34(LTE_DL_FRAME_PARMS *frame_parms,
 
       // layer 1
       // MF multiply by conjugated channel
-      mmtmpD0 = _mm_madd_epi16(dl_ch1_128[0],rxdataF128[0]);
+      mmtmpD0 = simde_mm_madd_epi16(dl_ch1_128[0],rxdataF128[0]);
       //  print_ints("re",&mmtmpD0);
       // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-      mmtmpD1 = _mm_shufflelo_epi16(dl_ch1_128[0],_MM_SHUFFLE(2,3,0,1));
-      mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-      mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i *)&conjugate[0]);
+      mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch1_128[0], SIMDE_MM_SHUFFLE(2,3,0,1));
+      mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+      mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i *)&conjugate[0]);
       //  print_ints("im",&mmtmpD1);
-      mmtmpD1 = _mm_madd_epi16(mmtmpD1,rxdataF128[0]);
+      mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,rxdataF128[0]);
       // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-      mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift1);
+      mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift1);
       // print_ints("re(shift)",&mmtmpD0);
-      mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift1);
+      mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift1);
       // print_ints("im(shift)",&mmtmpD1);
-      mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-      mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+      mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+      mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
       // print_ints("c0",&mmtmpD2);
       // print_ints("c1",&mmtmpD3);
-      rxdataF_comp1_128[0] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+      rxdataF_comp1_128[0] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
       // print_shorts("rx:",rxdataF128);
       //  print_shorts("ch:",dl_ch1_128);
       // print_shorts("pack:",rxdataF_comp1_128);
       // multiply by conjugated channel
-      mmtmpD0 = _mm_madd_epi16(dl_ch1_128[1],rxdataF128[1]);
+      mmtmpD0 = simde_mm_madd_epi16(dl_ch1_128[1],rxdataF128[1]);
       // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-      mmtmpD1 = _mm_shufflelo_epi16(dl_ch1_128[1],_MM_SHUFFLE(2,3,0,1));
-      mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-      mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i *)conjugate);
-      mmtmpD1 = _mm_madd_epi16(mmtmpD1,rxdataF128[1]);
+      mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch1_128[1], SIMDE_MM_SHUFFLE(2,3,0,1));
+      mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+      mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i *)conjugate);
+      mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,rxdataF128[1]);
       // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-      mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift1);
-      mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift1);
-      mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-      mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
-      rxdataF_comp1_128[1] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+      mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift1);
+      mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift1);
+      mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+      mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+      rxdataF_comp1_128[1] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
       //  print_shorts("rx:",rxdataF128+1);
       // print_shorts("ch:",dl_ch1_128+1);
       // print_shorts("pack:",rxdataF_comp1_128+1);
 
       if (pilots==0) {
         // multiply by conjugated channel
-        mmtmpD0 = _mm_madd_epi16(dl_ch1_128[2],rxdataF128[2]);
+        mmtmpD0 = simde_mm_madd_epi16(dl_ch1_128[2],rxdataF128[2]);
         // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-        mmtmpD1 = _mm_shufflelo_epi16(dl_ch1_128[2],_MM_SHUFFLE(2,3,0,1));
-        mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-        mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i *)conjugate);
-        mmtmpD1 = _mm_madd_epi16(mmtmpD1,rxdataF128[2]);
+        mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch1_128[2], SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i *)conjugate);
+        mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,rxdataF128[2]);
         // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-        mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift1);
-        mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift1);
-        mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-        mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
-        rxdataF_comp1_128[2] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+        mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift1);
+        mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift1);
+        mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+        mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+        rxdataF_comp1_128[2] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
         //   print_shorts("rx:",rxdataF128+2);
         //  print_shorts("ch:",dl_ch1_128+2);
         //         print_shorts("pack:",rxdataF_comp1_128+2);
@@ -2743,263 +2359,8 @@ void dlsch_channel_compensation_TM34(LTE_DL_FRAME_PARMS *frame_parms,
   measurements->precoded_cqi_dB[eNB_id][1] = dB_fixed2(precoded_signal_strength1,measurements->n0_power_tot);
   // printf("eNB_id %d, symbol %d: precoded CQI %d dB\n",eNB_id,symbol,
   //  measurements->precoded_cqi_dB[eNB_id][0]);
-  _mm_empty();
-  _m_empty();
-#elif defined(__arm__) || defined(__aarch64__)
-  unsigned short rb,Nre;
-  unsigned char aarx,symbol_mod,pilots=0;
-  int precoded_signal_strength0=0,precoded_signal_strength1=0, rx_power_correction;
-  int16x4_t *dl_ch0_128,*rxdataF128;
-  int16x4_t *dl_ch1_128;
-  int16x8_t *dl_ch0_128b,*dl_ch1_128b;
-  int32x4_t mmtmpD0,mmtmpD1,mmtmpD0b,mmtmpD1b;
-  int16x8_t *dl_ch_mag0_128,*dl_ch_mag0_128b,*dl_ch_mag1_128,*dl_ch_mag1_128b,mmtmpD2,mmtmpD3,mmtmpD4,*rxdataF_comp0_128,*rxdataF_comp1_128;
-  int16x8_t QAM_amp0_128,QAM_amp0_128b,QAM_amp1_128,QAM_amp1_128b;
-  int32x4_t output_shift128 = vmovq_n_s32(-(int32_t)output_shift);
-  int **rxdataF_ext           = pdsch_vars->rxdataF_ext;
-  int **dl_ch_estimates_ext   = pdsch_vars->dl_ch_estimates_ext;
-  int **dl_ch_mag0            = pdsch_vars->dl_ch_mag0;
-  int **dl_ch_mag1            = pdsch_vars->dl_ch_mag1[harq_pid][round];
-  int **dl_ch_magb0           = pdsch_vars->dl_ch_magb0;
-  int **dl_ch_magb1           = pdsch_vars->dl_ch_magb1[harq_pid][round];
-  int **rxdataF_comp0         = pdsch_vars->rxdataF_comp0;
-  int **rxdataF_comp1         = pdsch_vars->rxdataF_comp1[harq_pid][round];
-  int16_t conj[4]__attribute__((aligned(16))) = {1,-1,1,-1};
-  symbol_mod = (symbol>=(7-frame_parms->Ncp)) ? symbol-(7-frame_parms->Ncp) : symbol;
-
-  if ((symbol_mod == 0) || (symbol_mod == (4-frame_parms->Ncp))) {
-    if (frame_parms->nb_antenna_ports_eNB==1) { // 10 out of 12 so don't reduce size
-      nb_rb=1+(5*nb_rb/6);
-    } else {
-      pilots=1;
-    }
-  }
-
-  rx_power_correction=1;
-
-  if (mod_order0 == 4) {
-    QAM_amp0_128  = vmovq_n_s16(QAM16_n1);  // 2/sqrt(10)
-    QAM_amp0_128b = vmovq_n_s16(0);
-  } else if (mod_order0 == 6) {
-    QAM_amp0_128  = vmovq_n_s16(QAM64_n1); //
-    QAM_amp0_128b = vmovq_n_s16(QAM64_n2);
-  }
-
-  if (mod_order1 == 4) {
-    QAM_amp1_128  = vmovq_n_s16(QAM16_n1);  // 2/sqrt(10)
-    QAM_amp1_128b = vmovq_n_s16(0);
-  } else if (mod_order1 == 6) {
-    QAM_amp1_128  = vmovq_n_s16(QAM64_n1); //
-    QAM_amp1_128b = vmovq_n_s16(QAM64_n2);
-  }
-
-  //    printf("comp: rxdataF_comp %p, symbol %d\n",rxdataF_comp[0],symbol);
-
-  for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-    dl_ch0_128          = (int16x4_t *)&dl_ch_estimates_ext[aarx][symbol*frame_parms->N_RB_DL*12];
-    dl_ch1_128          = (int16x4_t *)&dl_ch_estimates_ext[2+aarx][symbol*frame_parms->N_RB_DL*12];
-    dl_ch0_128b          = (int16x8_t *)&dl_ch_estimates_ext[aarx][symbol*frame_parms->N_RB_DL*12];
-    dl_ch1_128b          = (int16x8_t *)&dl_ch_estimates_ext[2+aarx][symbol*frame_parms->N_RB_DL*12];
-    dl_ch_mag0_128      = (int16x8_t *)&dl_ch_mag0[aarx][symbol*frame_parms->N_RB_DL*12];
-    dl_ch_mag0_128b     = (int16x8_t *)&dl_ch_magb0[aarx][symbol*frame_parms->N_RB_DL*12];
-    dl_ch_mag1_128      = (int16x8_t *)&dl_ch_mag1[aarx][symbol*frame_parms->N_RB_DL*12];
-    dl_ch_mag1_128b     = (int16x8_t *)&dl_ch_magb1[aarx][symbol*frame_parms->N_RB_DL*12];
-    rxdataF128          = (int16x4_t *)&rxdataF_ext[aarx][symbol*frame_parms->N_RB_DL*12];
-    rxdataF_comp0_128   = (int16x8_t *)&rxdataF_comp0[aarx][symbol*frame_parms->N_RB_DL*12];
-    rxdataF_comp1_128   = (int16x8_t *)&rxdataF_comp1[aarx][symbol*frame_parms->N_RB_DL*12];
-
-    for (rb=0; rb<nb_rb; rb++) {
-      if (mmse_flag == 0) {
-        // combine TX channels using precoder from pmi
-        if (mimo_mode==LARGE_CDD) {
-          prec2A_TM3_128(&dl_ch0_128[0],&dl_ch1_128[0]);
-          prec2A_TM3_128(&dl_ch0_128[1],&dl_ch1_128[1]);
-
-          if (pilots==0) {
-            prec2A_TM3_128(&dl_ch0_128[2],&dl_ch1_128[2]);
-          }
-        } else if (mimo_mode==DUALSTREAM_UNIFORM_PRECODING1) {
-          prec2A_TM4_128(0,&dl_ch0_128[0],&dl_ch1_128[0]);
-          prec2A_TM4_128(0,&dl_ch0_128[1],&dl_ch1_128[1]);
-
-          if (pilots==0) {
-            prec2A_TM4_128(0,&dl_ch0_128[2],&dl_ch1_128[2]);
-          }
-        } else if (mimo_mode==DUALSTREAM_UNIFORM_PRECODINGj) {
-          prec2A_TM4_128(1,&dl_ch0_128[0],&dl_ch1_128[0]);
-          prec2A_TM4_128(1,&dl_ch0_128[1],&dl_ch1_128[1]);
-
-          if (pilots==0) {
-            prec2A_TM4_128(1,&dl_ch0_128[2],&dl_ch1_128[2]);
-          }
-        } else {
-          LOG_E(PHY,"Unknown MIMO mode\n");
-          return;
-        }
-      }
-
-      if (mod_order0>2) {
-        // get channel amplitude if not QPSK
-        mmtmpD0 = vmull_s16(dl_ch0_128[0], dl_ch0_128[0]);
-        // mmtmpD0 = [ch0*ch0,ch1*ch1,ch2*ch2,ch3*ch3];
-        mmtmpD0 = vqshlq_s32(vqaddq_s32(mmtmpD0,vrev64q_s32(mmtmpD0)),output_shift128);
-        // mmtmpD0 = [ch0*ch0 + ch1*ch1,ch0*ch0 + ch1*ch1,ch2*ch2 + ch3*ch3,ch2*ch2 + ch3*ch3]>>output_shift128 on 32-bits
-        mmtmpD1 = vmull_s16(dl_ch0_128[1], dl_ch0_128[1]);
-        mmtmpD1 = vqshlq_s32(vqaddq_s32(mmtmpD1,vrev64q_s32(mmtmpD1)),output_shift128);
-        mmtmpD2 = vcombine_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-        // mmtmpD2 = [ch0*ch0 + ch1*ch1,ch0*ch0 + ch1*ch1,ch2*ch2 + ch3*ch3,ch2*ch2 + ch3*ch3,ch4*ch4 + ch5*ch5,ch4*ch4 + ch5*ch5,ch6*ch6 + ch7*ch7,ch6*ch6 + ch7*ch7]>>output_shift128 on 16-bits
-        mmtmpD0 = vmull_s16(dl_ch0_128[2], dl_ch0_128[2]);
-        mmtmpD0 = vqshlq_s32(vqaddq_s32(mmtmpD0,vrev64q_s32(mmtmpD0)),output_shift128);
-        mmtmpD1 = vmull_s16(dl_ch0_128[3], dl_ch0_128[3]);
-        mmtmpD1 = vqshlq_s32(vqaddq_s32(mmtmpD1,vrev64q_s32(mmtmpD1)),output_shift128);
-        mmtmpD3 = vcombine_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-
-        if (pilots==0) {
-          mmtmpD0 = vmull_s16(dl_ch0_128[4], dl_ch0_128[4]);
-          mmtmpD0 = vqshlq_s32(vqaddq_s32(mmtmpD0,vrev64q_s32(mmtmpD0)),output_shift128);
-          mmtmpD1 = vmull_s16(dl_ch0_128[5], dl_ch0_128[5]);
-          mmtmpD1 = vqshlq_s32(vqaddq_s32(mmtmpD1,vrev64q_s32(mmtmpD1)),output_shift128);
-          mmtmpD4 = vcombine_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-        }
-
-        dl_ch_mag0_128b[0] = vqdmulhq_s16(mmtmpD2,QAM_amp0_128b);
-        dl_ch_mag0_128b[1] = vqdmulhq_s16(mmtmpD3,QAM_amp0_128b);
-        dl_ch_mag0_128[0] = vqdmulhq_s16(mmtmpD2,QAM_amp0_128);
-        dl_ch_mag0_128[1] = vqdmulhq_s16(mmtmpD3,QAM_amp0_128);
-
-        if (pilots==0) {
-          dl_ch_mag0_128b[2] = vqdmulhq_s16(mmtmpD4,QAM_amp0_128b);
-          dl_ch_mag0_128[2]  = vqdmulhq_s16(mmtmpD4,QAM_amp0_128);
-        }
-      }
-
-      if (mod_order1>2) {
-        // get channel amplitude if not QPSK
-        mmtmpD0 = vmull_s16(dl_ch1_128[0], dl_ch1_128[0]);
-        // mmtmpD0 = [ch0*ch0,ch1*ch1,ch2*ch2,ch3*ch3];
-        mmtmpD0 = vqshlq_s32(vqaddq_s32(mmtmpD0,vrev64q_s32(mmtmpD0)),output_shift128);
-        // mmtmpD0 = [ch0*ch0 + ch1*ch1,ch0*ch0 + ch1*ch1,ch2*ch2 + ch3*ch3,ch2*ch2 + ch3*ch3]>>output_shift128 on 32-bits
-        mmtmpD1 = vmull_s16(dl_ch1_128[1], dl_ch1_128[1]);
-        mmtmpD1 = vqshlq_s32(vqaddq_s32(mmtmpD1,vrev64q_s32(mmtmpD1)),output_shift128);
-        mmtmpD2 = vcombine_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-        // mmtmpD2 = [ch0*ch0 + ch1*ch1,ch0*ch0 + ch1*ch1,ch2*ch2 + ch3*ch3,ch2*ch2 + ch3*ch3,ch4*ch4 + ch5*ch5,ch4*ch4 + ch5*ch5,ch6*ch6 + ch7*ch7,ch6*ch6 + ch7*ch7]>>output_shift128 on 16-bits
-        mmtmpD0 = vmull_s16(dl_ch1_128[2], dl_ch1_128[2]);
-        mmtmpD0 = vqshlq_s32(vqaddq_s32(mmtmpD0,vrev64q_s32(mmtmpD0)),output_shift128);
-        mmtmpD1 = vmull_s16(dl_ch1_128[3], dl_ch1_128[3]);
-        mmtmpD1 = vqshlq_s32(vqaddq_s32(mmtmpD1,vrev64q_s32(mmtmpD1)),output_shift128);
-        mmtmpD3 = vcombine_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-
-        if (pilots==0) {
-          mmtmpD0 = vmull_s16(dl_ch1_128[4], dl_ch1_128[4]);
-          mmtmpD0 = vqshlq_s32(vqaddq_s32(mmtmpD0,vrev64q_s32(mmtmpD0)),output_shift128);
-          mmtmpD1 = vmull_s16(dl_ch1_128[5], dl_ch1_128[5]);
-          mmtmpD1 = vqshlq_s32(vqaddq_s32(mmtmpD1,vrev64q_s32(mmtmpD1)),output_shift128);
-          mmtmpD4 = vcombine_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-        }
-
-        dl_ch_mag1_128b[0] = vqdmulhq_s16(mmtmpD2,QAM_amp1_128b);
-        dl_ch_mag1_128b[1] = vqdmulhq_s16(mmtmpD3,QAM_amp1_128b);
-        dl_ch_mag1_128[0] = vqdmulhq_s16(mmtmpD2,QAM_amp1_128);
-        dl_ch_mag1_128[1] = vqdmulhq_s16(mmtmpD3,QAM_amp1_128);
-
-        if (pilots==0) {
-          dl_ch_mag1_128b[2] = vqdmulhq_s16(mmtmpD4,QAM_amp1_128b);
-          dl_ch_mag1_128[2]  = vqdmulhq_s16(mmtmpD4,QAM_amp1_128);
-        }
-      }
-
-      mmtmpD0 = vmull_s16(dl_ch0_128[0], rxdataF128[0]);
-      //mmtmpD0 = [Re(ch[0])Re(rx[0]) Im(ch[0])Im(ch[0]) Re(ch[1])Re(rx[1]) Im(ch[1])Im(ch[1])]
-      mmtmpD1 = vmull_s16(dl_ch0_128[1], rxdataF128[1]);
-      //mmtmpD1 = [Re(ch[2])Re(rx[2]) Im(ch[2])Im(ch[2]) Re(ch[3])Re(rx[3]) Im(ch[3])Im(ch[3])]
-      mmtmpD0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0),vget_high_s32(mmtmpD0)),
-                             vpadd_s32(vget_low_s32(mmtmpD1),vget_high_s32(mmtmpD1)));
-      //mmtmpD0 = [Re(ch[0])Re(rx[0])+Im(ch[0])Im(ch[0]) Re(ch[1])Re(rx[1])+Im(ch[1])Im(ch[1]) Re(ch[2])Re(rx[2])+Im(ch[2])Im(ch[2]) Re(ch[3])Re(rx[3])+Im(ch[3])Im(ch[3])]
-      mmtmpD0b = vmull_s16(vrev32_s16(vmul_s16(dl_ch0_128[0],*(int16x4_t *)conj)), rxdataF128[0]);
-      //mmtmpD0 = [-Im(ch[0])Re(rx[0]) Re(ch[0])Im(rx[0]) -Im(ch[1])Re(rx[1]) Re(ch[1])Im(rx[1])]
-      mmtmpD1b = vmull_s16(vrev32_s16(vmul_s16(dl_ch0_128[1],*(int16x4_t *)conj)), rxdataF128[1]);
-      //mmtmpD0 = [-Im(ch[2])Re(rx[2]) Re(ch[2])Im(rx[2]) -Im(ch[3])Re(rx[3]) Re(ch[3])Im(rx[3])]
-      mmtmpD1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0b),vget_high_s32(mmtmpD0b)),
-                             vpadd_s32(vget_low_s32(mmtmpD1b),vget_high_s32(mmtmpD1b)));
-      //mmtmpD1 = [-Im(ch[0])Re(rx[0])+Re(ch[0])Im(rx[0]) -Im(ch[1])Re(rx[1])+Re(ch[1])Im(rx[1]) -Im(ch[2])Re(rx[2])+Re(ch[2])Im(rx[2]) -Im(ch[3])Re(rx[3])+Re(ch[3])Im(rx[3])]
-      mmtmpD0 = vqshlq_s32(mmtmpD0,output_shift128);
-      mmtmpD1 = vqshlq_s32(mmtmpD1,output_shift128);
-      rxdataF_comp0_128[0] = vcombine_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-      mmtmpD0 = vmull_s16(dl_ch0_128[2], rxdataF128[2]);
-      mmtmpD1 = vmull_s16(dl_ch0_128[3], rxdataF128[3]);
-      mmtmpD0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0),vget_high_s32(mmtmpD0)),
-                             vpadd_s32(vget_low_s32(mmtmpD1),vget_high_s32(mmtmpD1)));
-      mmtmpD0b = vmull_s16(vrev32_s16(vmul_s16(dl_ch0_128[2],*(int16x4_t *)conj)), rxdataF128[2]);
-      mmtmpD1b = vmull_s16(vrev32_s16(vmul_s16(dl_ch0_128[3],*(int16x4_t *)conj)), rxdataF128[3]);
-      mmtmpD1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0b),vget_high_s32(mmtmpD0b)),
-                             vpadd_s32(vget_low_s32(mmtmpD1b),vget_high_s32(mmtmpD1b)));
-      mmtmpD0 = vqshlq_s32(mmtmpD0,output_shift128);
-      mmtmpD1 = vqshlq_s32(mmtmpD1,output_shift128);
-      rxdataF_comp0_128[1] = vcombine_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-      // second stream
-      mmtmpD0 = vmull_s16(dl_ch1_128[0], rxdataF128[0]);
-      mmtmpD1 = vmull_s16(dl_ch1_128[1], rxdataF128[1]);
-      mmtmpD0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0),vget_high_s32(mmtmpD0)),
-                             vpadd_s32(vget_low_s32(mmtmpD1),vget_high_s32(mmtmpD1)));
-      mmtmpD0b = vmull_s16(vrev32_s16(vmul_s16(dl_ch0_128[0],*(int16x4_t *)conj)), rxdataF128[0]);
-      mmtmpD1b = vmull_s16(vrev32_s16(vmul_s16(dl_ch0_128[1],*(int16x4_t *)conj)), rxdataF128[1]);
-      //mmtmpD0 = [-Im(ch[2])Re(rx[2]) Re(ch[2])Im(rx[2]) -Im(ch[3])Re(rx[3]) Re(ch[3])Im(rx[3])]
-      mmtmpD1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0b),vget_high_s32(mmtmpD0b)),
-                             vpadd_s32(vget_low_s32(mmtmpD1b),vget_high_s32(mmtmpD1b)));
-      //mmtmpD1 = [-Im(ch[0])Re(rx[0])+Re(ch[0])Im(rx[0]) -Im(ch[1])Re(rx[1])+Re(ch[1])Im(rx[1]) -Im(ch[2])Re(rx[2])+Re(ch[2])Im(rx[2]) -Im(ch[3])Re(rx[3])+Re(ch[3])Im(rx[3])]
-      mmtmpD0 = vqshlq_s32(mmtmpD0,output_shift128);
-      mmtmpD1 = vqshlq_s32(mmtmpD1,output_shift128);
-      rxdataF_comp1_128[0] = vcombine_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-      mmtmpD0 = vmull_s16(dl_ch1_128[2], rxdataF128[2]);
-      mmtmpD1 = vmull_s16(dl_ch1_128[3], rxdataF128[3]);
-      mmtmpD0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0),vget_high_s32(mmtmpD0)),
-                             vpadd_s32(vget_low_s32(mmtmpD1),vget_high_s32(mmtmpD1)));
-      mmtmpD0b = vmull_s16(vrev32_s16(vmul_s16(dl_ch0_128[2],*(int16x4_t *)conj)), rxdataF128[2]);
-      mmtmpD1b = vmull_s16(vrev32_s16(vmul_s16(dl_ch0_128[3],*(int16x4_t *)conj)), rxdataF128[3]);
-      mmtmpD1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0b),vget_high_s32(mmtmpD0b)),
-                             vpadd_s32(vget_low_s32(mmtmpD1b),vget_high_s32(mmtmpD1b)));
-      mmtmpD0 = vqshlq_s32(mmtmpD0,output_shift128);
-      mmtmpD1 = vqshlq_s32(mmtmpD1,output_shift128);
-      rxdataF_comp1_128[1] = vcombine_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-
-      if (pilots==0) {
-        mmtmpD0 = vmull_s16(dl_ch0_128[4], rxdataF128[4]);
-        mmtmpD1 = vmull_s16(dl_ch0_128[5], rxdataF128[5]);
-        mmtmpD0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0),vget_high_s32(mmtmpD0)),
-                               vpadd_s32(vget_low_s32(mmtmpD1),vget_high_s32(mmtmpD1)));
-        mmtmpD0b = vmull_s16(vrev32_s16(vmul_s16(dl_ch0_128[4],*(int16x4_t *)conj)), rxdataF128[4]);
-        mmtmpD1b = vmull_s16(vrev32_s16(vmul_s16(dl_ch0_128[5],*(int16x4_t *)conj)), rxdataF128[5]);
-        mmtmpD1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0b),vget_high_s32(mmtmpD0b)),
-                               vpadd_s32(vget_low_s32(mmtmpD1b),vget_high_s32(mmtmpD1b)));
-        mmtmpD0 = vqshlq_s32(mmtmpD0,output_shift128);
-        mmtmpD1 = vqshlq_s32(mmtmpD1,output_shift128);
-        rxdataF_comp0_128[2] = vcombine_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-        mmtmpD0 = vmull_s16(dl_ch1_128[4], rxdataF128[4]);
-        mmtmpD1 = vmull_s16(dl_ch1_128[5], rxdataF128[5]);
-        mmtmpD0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0),vget_high_s32(mmtmpD0)),
-                               vpadd_s32(vget_low_s32(mmtmpD1),vget_high_s32(mmtmpD1)));
-        mmtmpD0b = vmull_s16(vrev32_s16(vmul_s16(dl_ch1_128[4],*(int16x4_t *)conj)), rxdataF128[4]);
-        mmtmpD1b = vmull_s16(vrev32_s16(vmul_s16(dl_ch1_128[5],*(int16x4_t *)conj)), rxdataF128[5]);
-        mmtmpD1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0b),vget_high_s32(mmtmpD0b)),
-                               vpadd_s32(vget_low_s32(mmtmpD1b),vget_high_s32(mmtmpD1b)));
-        mmtmpD0 = vqshlq_s32(mmtmpD0,output_shift128);
-        mmtmpD1 = vqshlq_s32(mmtmpD1,output_shift128);
-        rxdataF_comp1_128[2] = vcombine_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-      }
-    }
-
-    Nre = (pilots==0) ? 12 : 8;
-    // rx_antennas
-  }
-
-  Nre = (pilots==0) ? 12 : 8;
-  precoded_signal_strength0 += ((signal_energy_nodc(&dl_ch_estimates_ext[aarx][symbol*frame_parms->N_RB_DL*Nre],
-                                 (nb_rb*Nre))*rx_power_correction) - (measurements->n0_power[aarx]));
-  precoded_signal_strength1 += ((signal_energy_nodc(&dl_ch_estimates_ext[aarx+2][symbol*frame_parms->N_RB_DL*Nre],
-                                 (nb_rb*Nre))*rx_power_correction) - (measurements->n0_power[aarx]));
-  measurements->precoded_cqi_dB[eNB_id][0] = dB_fixed2(precoded_signal_strength0,measurements->n0_power_tot);
-  measurements->precoded_cqi_dB[eNB_id][1] = dB_fixed2(precoded_signal_strength1,measurements->n0_power_tot);
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 
@@ -3010,9 +2371,8 @@ void dlsch_dual_stream_correlation(LTE_DL_FRAME_PARMS *frame_parms,
                                    int **dl_ch_estimates_ext_i,
                                    int **dl_ch_rho_ext,
                                    unsigned char output_shift) {
-#if defined(__x86_64__)||defined(__i386__)
   unsigned short rb;
-  __m128i *dl_ch128,*dl_ch128i,*dl_ch_rho128,mmtmpD0,mmtmpD1,mmtmpD2,mmtmpD3;
+  simde__m128i *dl_ch128,*dl_ch128i,*dl_ch_rho128,mmtmpD0,mmtmpD1,mmtmpD2,mmtmpD3;
   unsigned char aarx,symbol_mod,pilots=0;
   //    printf("dlsch_dual_stream_correlation: symbol %d\n",symbol);
   symbol_mod = (symbol>=(7-frame_parms->Ncp)) ? symbol-(7-frame_parms->Ncp) : symbol;
@@ -3024,64 +2384,64 @@ void dlsch_dual_stream_correlation(LTE_DL_FRAME_PARMS *frame_parms,
   //  printf("Dual stream correlation (%p)\n",dl_ch_estimates_ext_i);
 
   for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-    dl_ch128          = (__m128i *)&dl_ch_estimates_ext[aarx][symbol*frame_parms->N_RB_DL*12];
+    dl_ch128          = (simde__m128i *)&dl_ch_estimates_ext[aarx][symbol*frame_parms->N_RB_DL*12];
 
     if (dl_ch_estimates_ext_i == NULL) // TM3/4
-      dl_ch128i         = (__m128i *)&dl_ch_estimates_ext[aarx + frame_parms->nb_antennas_rx][symbol*frame_parms->N_RB_DL*12];
+      dl_ch128i         = (simde__m128i *)&dl_ch_estimates_ext[aarx + frame_parms->nb_antennas_rx][symbol*frame_parms->N_RB_DL*12];
     else
-      dl_ch128i         = (__m128i *)&dl_ch_estimates_ext_i[aarx][symbol*frame_parms->N_RB_DL*12];
+      dl_ch128i         = (simde__m128i *)&dl_ch_estimates_ext_i[aarx][symbol*frame_parms->N_RB_DL*12];
 
-    dl_ch_rho128      = (__m128i *)&dl_ch_rho_ext[aarx][symbol*frame_parms->N_RB_DL*12];
+    dl_ch_rho128      = (simde__m128i *)&dl_ch_rho_ext[aarx][symbol*frame_parms->N_RB_DL*12];
 
     for (rb=0; rb<nb_rb; rb++) {
       // multiply by conjugated channel
-      mmtmpD0 = _mm_madd_epi16(dl_ch128[0],dl_ch128i[0]);
+      mmtmpD0 = simde_mm_madd_epi16(dl_ch128[0],dl_ch128i[0]);
       //      print_ints("re",&mmtmpD0);
       // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-      mmtmpD1 = _mm_shufflelo_epi16(dl_ch128[0],_MM_SHUFFLE(2,3,0,1));
-      mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-      mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i *)&conjugate[0]);
-      mmtmpD1 = _mm_madd_epi16(mmtmpD1,dl_ch128i[0]);
+      mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch128[0], SIMDE_MM_SHUFFLE(2,3,0,1));
+      mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+      mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i *)&conjugate[0]);
+      mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,dl_ch128i[0]);
       //      print_ints("im",&mmtmpD1);
       // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-      mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
+      mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
       //      print_ints("re(shift)",&mmtmpD0);
-      mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
+      mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
       //      print_ints("im(shift)",&mmtmpD1);
-      mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-      mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+      mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+      mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
       //      print_ints("c0",&mmtmpD2);
       //      print_ints("c1",&mmtmpD3);
-      dl_ch_rho128[0] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+      dl_ch_rho128[0] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
       // print_shorts("rho 0:",dl_ch_rho128);
       // multiply by conjugated channel
-      mmtmpD0 = _mm_madd_epi16(dl_ch128[1],dl_ch128i[1]);
+      mmtmpD0 = simde_mm_madd_epi16(dl_ch128[1],dl_ch128i[1]);
       // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-      mmtmpD1 = _mm_shufflelo_epi16(dl_ch128[1],_MM_SHUFFLE(2,3,0,1));
-      mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-      mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i *)conjugate);
-      mmtmpD1 = _mm_madd_epi16(mmtmpD1,dl_ch128i[1]);
+      mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch128[1], SIMDE_MM_SHUFFLE(2,3,0,1));
+      mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+      mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i *)conjugate);
+      mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,dl_ch128i[1]);
       // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-      mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-      mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
-      mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-      mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
-      dl_ch_rho128[1] =_mm_packs_epi32(mmtmpD2,mmtmpD3);
+      mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
+      mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
+      mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+      mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+      dl_ch_rho128[1] =simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
 
       if (pilots==0) {
         // multiply by conjugated channel
-        mmtmpD0 = _mm_madd_epi16(dl_ch128[2],dl_ch128i[2]);
+        mmtmpD0 = simde_mm_madd_epi16(dl_ch128[2],dl_ch128i[2]);
         // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-        mmtmpD1 = _mm_shufflelo_epi16(dl_ch128[2],_MM_SHUFFLE(2,3,0,1));
-        mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-        mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i *)conjugate);
-        mmtmpD1 = _mm_madd_epi16(mmtmpD1,dl_ch128i[2]);
+        mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch128[2], SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i *)conjugate);
+        mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,dl_ch128i[2]);
         // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-        mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-        mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
-        mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-        mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
-        dl_ch_rho128[2] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+        mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
+        mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
+        mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+        mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+        dl_ch_rho128[2] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
         dl_ch128+=3;
         dl_ch128i+=3;
         dl_ch_rho128+=3;
@@ -3093,10 +2453,8 @@ void dlsch_dual_stream_correlation(LTE_DL_FRAME_PARMS *frame_parms,
     }
   }
 
-  _mm_empty();
-  _m_empty();
-#elif defined(__arm__) || defined(__aarch64__)
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 
@@ -3112,26 +2470,25 @@ void dlsch_detection_mrc(LTE_DL_FRAME_PARMS *frame_parms,
                          unsigned char symbol,
                          unsigned short nb_rb,
                          unsigned char dual_stream_UE) {
-#if defined(__x86_64__)||defined(__i386__)
   unsigned char aatx;
   int i;
-  __m128i *rxdataF_comp128_0,*rxdataF_comp128_1,*rxdataF_comp128_i0,*rxdataF_comp128_i1,*dl_ch_mag128_0,*dl_ch_mag128_1,*dl_ch_mag128_0b,*dl_ch_mag128_1b,*rho128_0,*rho128_1,*rho128_i0,*rho128_i1,
+  simde__m128i *rxdataF_comp128_0,*rxdataF_comp128_1,*rxdataF_comp128_i0,*rxdataF_comp128_i1,*dl_ch_mag128_0,*dl_ch_mag128_1,*dl_ch_mag128_0b,*dl_ch_mag128_1b,*rho128_0,*rho128_1,*rho128_i0,*rho128_i1,
           *dl_ch_mag128_i0,*dl_ch_mag128_i1,*dl_ch_mag128_i0b,*dl_ch_mag128_i1b;
 
   if (frame_parms->nb_antennas_rx>1) {
     for (aatx=0; aatx<frame_parms->nb_antenna_ports_eNB; aatx++) {
-      rxdataF_comp128_0   = (__m128i *)&rxdataF_comp[(aatx<<1)][symbol*frame_parms->N_RB_DL*12];
-      rxdataF_comp128_1   = (__m128i *)&rxdataF_comp[(aatx<<1)+1][symbol*frame_parms->N_RB_DL*12];
-      dl_ch_mag128_0      = (__m128i *)&dl_ch_mag[(aatx<<1)][symbol*frame_parms->N_RB_DL*12];
-      dl_ch_mag128_1      = (__m128i *)&dl_ch_mag[(aatx<<1)+1][symbol*frame_parms->N_RB_DL*12];
-      dl_ch_mag128_0b     = (__m128i *)&dl_ch_magb[(aatx<<1)][symbol*frame_parms->N_RB_DL*12];
-      dl_ch_mag128_1b     = (__m128i *)&dl_ch_magb[(aatx<<1)+1][symbol*frame_parms->N_RB_DL*12];
+      rxdataF_comp128_0   = (simde__m128i *)&rxdataF_comp[(aatx<<1)][symbol*frame_parms->N_RB_DL*12];
+      rxdataF_comp128_1   = (simde__m128i *)&rxdataF_comp[(aatx<<1)+1][symbol*frame_parms->N_RB_DL*12];
+      dl_ch_mag128_0      = (simde__m128i *)&dl_ch_mag[(aatx<<1)][symbol*frame_parms->N_RB_DL*12];
+      dl_ch_mag128_1      = (simde__m128i *)&dl_ch_mag[(aatx<<1)+1][symbol*frame_parms->N_RB_DL*12];
+      dl_ch_mag128_0b     = (simde__m128i *)&dl_ch_magb[(aatx<<1)][symbol*frame_parms->N_RB_DL*12];
+      dl_ch_mag128_1b     = (simde__m128i *)&dl_ch_magb[(aatx<<1)+1][symbol*frame_parms->N_RB_DL*12];
 
       // MRC on each re of rb, both on MF output and magnitude (for 16QAM/64QAM llr computation)
       for (i=0; i<nb_rb*3; i++) {
-        rxdataF_comp128_0[i] = _mm_adds_epi16(_mm_srai_epi16(rxdataF_comp128_0[i],1),_mm_srai_epi16(rxdataF_comp128_1[i],1));
-        dl_ch_mag128_0[i]    = _mm_adds_epi16(_mm_srai_epi16(dl_ch_mag128_0[i],1),_mm_srai_epi16(dl_ch_mag128_1[i],1));
-        dl_ch_mag128_0b[i]   = _mm_adds_epi16(_mm_srai_epi16(dl_ch_mag128_0b[i],1),_mm_srai_epi16(dl_ch_mag128_1b[i],1));
+        rxdataF_comp128_0[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(rxdataF_comp128_0[i],1),simde_mm_srai_epi16(rxdataF_comp128_1[i],1));
+        dl_ch_mag128_0[i]    = simde_mm_adds_epi16(simde_mm_srai_epi16(dl_ch_mag128_0[i],1),simde_mm_srai_epi16(dl_ch_mag128_1[i],1));
+        dl_ch_mag128_0b[i]   = simde_mm_adds_epi16(simde_mm_srai_epi16(dl_ch_mag128_0b[i],1),simde_mm_srai_epi16(dl_ch_mag128_1b[i],1));
         //       print_shorts("mrc comp0:",&rxdataF_comp128_0[i]);
         //       print_shorts("mrc mag0:",&dl_ch_mag128_0[i]);
         //       print_shorts("mrc mag0b:",&dl_ch_mag128_0b[i]);
@@ -3140,91 +2497,37 @@ void dlsch_detection_mrc(LTE_DL_FRAME_PARMS *frame_parms,
     }
 
     if (rho) {
-      rho128_0 = (__m128i *) &rho[0][symbol*frame_parms->N_RB_DL*12];
-      rho128_1 = (__m128i *) &rho[1][symbol*frame_parms->N_RB_DL*12];
+      rho128_0 = (simde__m128i *) &rho[0][symbol*frame_parms->N_RB_DL*12];
+      rho128_1 = (simde__m128i *) &rho[1][symbol*frame_parms->N_RB_DL*12];
 
       for (i=0; i<nb_rb*3; i++) {
         //      print_shorts("mrc rho0:",&rho128_0[i]);
         //      print_shorts("mrc rho1:",&rho128_1[i]);
-        rho128_0[i] = _mm_adds_epi16(_mm_srai_epi16(rho128_0[i],1),_mm_srai_epi16(rho128_1[i],1));
-      }
-    }
-
-    if (dual_stream_UE == 1) {
-      rho128_i0 = (__m128i *) &rho_i[0][symbol*frame_parms->N_RB_DL*12];
-      rho128_i1 = (__m128i *) &rho_i[1][symbol*frame_parms->N_RB_DL*12];
-      rxdataF_comp128_i0   = (__m128i *)&rxdataF_comp_i[0][symbol*frame_parms->N_RB_DL*12];
-      rxdataF_comp128_i1   = (__m128i *)&rxdataF_comp_i[1][symbol*frame_parms->N_RB_DL*12];
-      dl_ch_mag128_i0      = (__m128i *)&dl_ch_mag_i[0][symbol*frame_parms->N_RB_DL*12];
-      dl_ch_mag128_i1      = (__m128i *)&dl_ch_mag_i[1][symbol*frame_parms->N_RB_DL*12];
-      dl_ch_mag128_i0b     = (__m128i *)&dl_ch_magb_i[0][symbol*frame_parms->N_RB_DL*12];
-      dl_ch_mag128_i1b     = (__m128i *)&dl_ch_magb_i[1][symbol*frame_parms->N_RB_DL*12];
-
-      for (i=0; i<nb_rb*3; i++) {
-        rxdataF_comp128_i0[i] = _mm_adds_epi16(_mm_srai_epi16(rxdataF_comp128_i0[i],1),_mm_srai_epi16(rxdataF_comp128_i1[i],1));
-        rho128_i0[i]           = _mm_adds_epi16(_mm_srai_epi16(rho128_i0[i],1),_mm_srai_epi16(rho128_i1[i],1));
-        dl_ch_mag128_i0[i]    = _mm_adds_epi16(_mm_srai_epi16(dl_ch_mag128_i0[i],1),_mm_srai_epi16(dl_ch_mag128_i1[i],1));
-        dl_ch_mag128_i0b[i]    = _mm_adds_epi16(_mm_srai_epi16(dl_ch_mag128_i0b[i],1),_mm_srai_epi16(dl_ch_mag128_i1b[i],1));
-      }
-    }
-  }
-
-  _mm_empty();
-  _m_empty();
-#elif defined(__arm__) || defined(__aarch64__)
-  unsigned char aatx;
-  int i;
-  int16x8_t *rxdataF_comp128_0,*rxdataF_comp128_1,*rxdataF_comp128_i0,*rxdataF_comp128_i1,*dl_ch_mag128_0,*dl_ch_mag128_1,*dl_ch_mag128_0b,*dl_ch_mag128_1b,*rho128_0,*rho128_1,*rho128_i0,*rho128_i1,
-            *dl_ch_mag128_i0,*dl_ch_mag128_i1,*dl_ch_mag128_i0b,*dl_ch_mag128_i1b;
-
-  if (frame_parms->nb_antennas_rx>1) {
-    for (aatx=0; aatx<frame_parms->nb_antenna_ports_eNB; aatx++) {
-      rxdataF_comp128_0   = (int16x8_t *)&rxdataF_comp[(aatx<<1)][symbol*frame_parms->N_RB_DL*12];
-      rxdataF_comp128_1   = (int16x8_t *)&rxdataF_comp[(aatx<<1)+1][symbol*frame_parms->N_RB_DL*12];
-      dl_ch_mag128_0      = (int16x8_t *)&dl_ch_mag[(aatx<<1)][symbol*frame_parms->N_RB_DL*12];
-      dl_ch_mag128_1      = (int16x8_t *)&dl_ch_mag[(aatx<<1)+1][symbol*frame_parms->N_RB_DL*12];
-      dl_ch_mag128_0b     = (int16x8_t *)&dl_ch_magb[(aatx<<1)][symbol*frame_parms->N_RB_DL*12];
-      dl_ch_mag128_1b     = (int16x8_t *)&dl_ch_magb[(aatx<<1)+1][symbol*frame_parms->N_RB_DL*12];
-
-      // MRC on each re of rb, both on MF output and magnitude (for 16QAM/64QAM llr computation)
-      for (i=0; i<nb_rb*3; i++) {
-        rxdataF_comp128_0[i] = vhaddq_s16(rxdataF_comp128_0[i],rxdataF_comp128_1[i]);
-        dl_ch_mag128_0[i]    = vhaddq_s16(dl_ch_mag128_0[i],dl_ch_mag128_1[i]);
-        dl_ch_mag128_0b[i]   = vhaddq_s16(dl_ch_mag128_0b[i],dl_ch_mag128_1b[i]);
-      }
-    }
-
-    if (rho) {
-      rho128_0 = (int16x8_t *) &rho[0][symbol*frame_parms->N_RB_DL*12];
-      rho128_1 = (int16x8_t *) &rho[1][symbol*frame_parms->N_RB_DL*12];
-
-      for (i=0; i<nb_rb*3; i++) {
-        //  print_shorts("mrc rho0:",&rho128_0[i]);
-        //  print_shorts("mrc rho1:",&rho128_1[i]);
-        rho128_0[i] = vhaddq_s16(rho128_0[i],rho128_1[i]);
+        rho128_0[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(rho128_0[i],1),simde_mm_srai_epi16(rho128_1[i],1));
       }
     }
 
     if (dual_stream_UE == 1) {
-      rho128_i0 = (int16x8_t *) &rho_i[0][symbol*frame_parms->N_RB_DL*12];
-      rho128_i1 = (int16x8_t *) &rho_i[1][symbol*frame_parms->N_RB_DL*12];
-      rxdataF_comp128_i0   = (int16x8_t *)&rxdataF_comp_i[0][symbol*frame_parms->N_RB_DL*12];
-      rxdataF_comp128_i1   = (int16x8_t *)&rxdataF_comp_i[1][symbol*frame_parms->N_RB_DL*12];
-      dl_ch_mag128_i0      = (int16x8_t *)&dl_ch_mag_i[0][symbol*frame_parms->N_RB_DL*12];
-      dl_ch_mag128_i1      = (int16x8_t *)&dl_ch_mag_i[1][symbol*frame_parms->N_RB_DL*12];
-      dl_ch_mag128_i0b     = (int16x8_t *)&dl_ch_magb_i[0][symbol*frame_parms->N_RB_DL*12];
-      dl_ch_mag128_i1b     = (int16x8_t *)&dl_ch_magb_i[1][symbol*frame_parms->N_RB_DL*12];
+      rho128_i0 = (simde__m128i *) &rho_i[0][symbol*frame_parms->N_RB_DL*12];
+      rho128_i1 = (simde__m128i *) &rho_i[1][symbol*frame_parms->N_RB_DL*12];
+      rxdataF_comp128_i0   = (simde__m128i *)&rxdataF_comp_i[0][symbol*frame_parms->N_RB_DL*12];
+      rxdataF_comp128_i1   = (simde__m128i *)&rxdataF_comp_i[1][symbol*frame_parms->N_RB_DL*12];
+      dl_ch_mag128_i0      = (simde__m128i *)&dl_ch_mag_i[0][symbol*frame_parms->N_RB_DL*12];
+      dl_ch_mag128_i1      = (simde__m128i *)&dl_ch_mag_i[1][symbol*frame_parms->N_RB_DL*12];
+      dl_ch_mag128_i0b     = (simde__m128i *)&dl_ch_magb_i[0][symbol*frame_parms->N_RB_DL*12];
+      dl_ch_mag128_i1b     = (simde__m128i *)&dl_ch_magb_i[1][symbol*frame_parms->N_RB_DL*12];
 
       for (i=0; i<nb_rb*3; i++) {
-        rxdataF_comp128_i0[i] = vhaddq_s16(rxdataF_comp128_i0[i],rxdataF_comp128_i1[i]);
-        rho128_i0[i]          = vhaddq_s16(rho128_i0[i],rho128_i1[i]);
-        dl_ch_mag128_i0[i]    = vhaddq_s16(dl_ch_mag128_i0[i],dl_ch_mag128_i1[i]);
-        dl_ch_mag128_i0b[i]   = vhaddq_s16(dl_ch_mag128_i0b[i],dl_ch_mag128_i1b[i]);
+        rxdataF_comp128_i0[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(rxdataF_comp128_i0[i],1),simde_mm_srai_epi16(rxdataF_comp128_i1[i],1));
+        rho128_i0[i]           = simde_mm_adds_epi16(simde_mm_srai_epi16(rho128_i0[i],1),simde_mm_srai_epi16(rho128_i1[i],1));
+        dl_ch_mag128_i0[i]    = simde_mm_adds_epi16(simde_mm_srai_epi16(dl_ch_mag128_i0[i],1),simde_mm_srai_epi16(dl_ch_mag128_i1[i],1));
+        dl_ch_mag128_i0b[i]    = simde_mm_adds_epi16(simde_mm_srai_epi16(dl_ch_mag128_i0b[i],1),simde_mm_srai_epi16(dl_ch_mag128_i1b[i],1));
       }
     }
   }
 
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void dlsch_detection_mrc_TM34(LTE_DL_FRAME_PARMS *frame_parms,
@@ -3235,10 +2538,10 @@ void dlsch_detection_mrc_TM34(LTE_DL_FRAME_PARMS *frame_parms,
                               unsigned short nb_rb,
                               unsigned char dual_stream_UE) {
   int i;
-  __m128i *rxdataF_comp128_0,*rxdataF_comp128_1;
-  __m128i *dl_ch_mag128_0,*dl_ch_mag128_1;
-  __m128i *dl_ch_mag128_0b,*dl_ch_mag128_1b;
-  __m128i *rho128_0, *rho128_1;
+  simde__m128i *rxdataF_comp128_0,*rxdataF_comp128_1;
+  simde__m128i *dl_ch_mag128_0,*dl_ch_mag128_1;
+  simde__m128i *dl_ch_mag128_0b,*dl_ch_mag128_1b;
+  simde__m128i *rho128_0, *rho128_1;
   int **rxdataF_comp0           = pdsch_vars->rxdataF_comp0;
   int **rxdataF_comp1           = pdsch_vars->rxdataF_comp1[harq_pid][round];
   int **dl_ch_rho_ext           = pdsch_vars->dl_ch_rho_ext[harq_pid][round]; //for second stream
@@ -3247,109 +2550,109 @@ void dlsch_detection_mrc_TM34(LTE_DL_FRAME_PARMS *frame_parms,
   int **dl_ch_mag1              = pdsch_vars->dl_ch_mag1[harq_pid][round];
   int **dl_ch_magb0             = pdsch_vars->dl_ch_magb0;
   int **dl_ch_magb1             = pdsch_vars->dl_ch_magb1[harq_pid][round];
-  rxdataF_comp128_0   = (__m128i *)&rxdataF_comp0[0][symbol*frame_parms->N_RB_DL*12];
-  rxdataF_comp128_1   = (__m128i *)&rxdataF_comp0[1][symbol*frame_parms->N_RB_DL*12];
-  dl_ch_mag128_0      = (__m128i *)&dl_ch_mag0[0][symbol*frame_parms->N_RB_DL*12];
-  dl_ch_mag128_1      = (__m128i *)&dl_ch_mag0[1][symbol*frame_parms->N_RB_DL*12];
-  dl_ch_mag128_0b     = (__m128i *)&dl_ch_magb0[0][symbol*frame_parms->N_RB_DL*12];
-  dl_ch_mag128_1b     = (__m128i *)&dl_ch_magb0[1][symbol*frame_parms->N_RB_DL*12];
-  rho128_0 = (__m128i *) &dl_ch_rho2_ext[0][symbol*frame_parms->N_RB_DL*12];
-  rho128_1 = (__m128i *) &dl_ch_rho2_ext[1][symbol*frame_parms->N_RB_DL*12];
+  rxdataF_comp128_0   = (simde__m128i *)&rxdataF_comp0[0][symbol*frame_parms->N_RB_DL*12];
+  rxdataF_comp128_1   = (simde__m128i *)&rxdataF_comp0[1][symbol*frame_parms->N_RB_DL*12];
+  dl_ch_mag128_0      = (simde__m128i *)&dl_ch_mag0[0][symbol*frame_parms->N_RB_DL*12];
+  dl_ch_mag128_1      = (simde__m128i *)&dl_ch_mag0[1][symbol*frame_parms->N_RB_DL*12];
+  dl_ch_mag128_0b     = (simde__m128i *)&dl_ch_magb0[0][symbol*frame_parms->N_RB_DL*12];
+  dl_ch_mag128_1b     = (simde__m128i *)&dl_ch_magb0[1][symbol*frame_parms->N_RB_DL*12];
+  rho128_0 = (simde__m128i *) &dl_ch_rho2_ext[0][symbol*frame_parms->N_RB_DL*12];
+  rho128_1 = (simde__m128i *) &dl_ch_rho2_ext[1][symbol*frame_parms->N_RB_DL*12];
 
   // MRC on each re of rb, both on MF output and magnitude (for 16QAM/64QAM llr computation)
   for (i=0; i<nb_rb*3; i++) {
-    rxdataF_comp128_0[i] = _mm_adds_epi16(_mm_srai_epi16(rxdataF_comp128_0[i],1),_mm_srai_epi16(rxdataF_comp128_1[i],1));
-    dl_ch_mag128_0[i]    = _mm_adds_epi16(_mm_srai_epi16(dl_ch_mag128_0[i],1),_mm_srai_epi16(dl_ch_mag128_1[i],1));
-    dl_ch_mag128_0b[i]   = _mm_adds_epi16(_mm_srai_epi16(dl_ch_mag128_0b[i],1),_mm_srai_epi16(dl_ch_mag128_1b[i],1));
-    rho128_0[i] = _mm_adds_epi16(_mm_srai_epi16(rho128_0[i],1),_mm_srai_epi16(rho128_1[i],1));
+    rxdataF_comp128_0[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(rxdataF_comp128_0[i],1),simde_mm_srai_epi16(rxdataF_comp128_1[i],1));
+    dl_ch_mag128_0[i]    = simde_mm_adds_epi16(simde_mm_srai_epi16(dl_ch_mag128_0[i],1),simde_mm_srai_epi16(dl_ch_mag128_1[i],1));
+    dl_ch_mag128_0b[i]   = simde_mm_adds_epi16(simde_mm_srai_epi16(dl_ch_mag128_0b[i],1),simde_mm_srai_epi16(dl_ch_mag128_1b[i],1));
+    rho128_0[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(rho128_0[i],1),simde_mm_srai_epi16(rho128_1[i],1));
 
     if (frame_parms->nb_antennas_rx>2) {
-      __m128i *rxdataF_comp128_2 = NULL;
-      __m128i *rxdataF_comp128_3 = NULL;
-      __m128i *dl_ch_mag128_2 = NULL;
-      __m128i *dl_ch_mag128_3 = NULL;
-      __m128i *dl_ch_mag128_2b = NULL;
-      __m128i *dl_ch_mag128_3b = NULL;
-      __m128i *rho128_2 = NULL;
-      __m128i *rho128_3 = NULL;
-      rxdataF_comp128_2   = (__m128i *)&rxdataF_comp0[2][symbol*frame_parms->N_RB_DL*12];
-      rxdataF_comp128_3   = (__m128i *)&rxdataF_comp0[3][symbol*frame_parms->N_RB_DL*12];
-      dl_ch_mag128_2      = (__m128i *)&dl_ch_mag0[2][symbol*frame_parms->N_RB_DL*12];
-      dl_ch_mag128_3      = (__m128i *)&dl_ch_mag0[3][symbol*frame_parms->N_RB_DL*12];
-      dl_ch_mag128_2b     = (__m128i *)&dl_ch_magb0[2][symbol*frame_parms->N_RB_DL*12];
-      dl_ch_mag128_3b     = (__m128i *)&dl_ch_magb0[3][symbol*frame_parms->N_RB_DL*12];
-      rho128_2 = (__m128i *) &dl_ch_rho2_ext[2][symbol*frame_parms->N_RB_DL*12];
-      rho128_3 = (__m128i *) &dl_ch_rho2_ext[3][symbol*frame_parms->N_RB_DL*12];
+      simde__m128i *rxdataF_comp128_2 = NULL;
+      simde__m128i *rxdataF_comp128_3 = NULL;
+      simde__m128i *dl_ch_mag128_2 = NULL;
+      simde__m128i *dl_ch_mag128_3 = NULL;
+      simde__m128i *dl_ch_mag128_2b = NULL;
+      simde__m128i *dl_ch_mag128_3b = NULL;
+      simde__m128i *rho128_2 = NULL;
+      simde__m128i *rho128_3 = NULL;
+      rxdataF_comp128_2   = (simde__m128i *)&rxdataF_comp0[2][symbol*frame_parms->N_RB_DL*12];
+      rxdataF_comp128_3   = (simde__m128i *)&rxdataF_comp0[3][symbol*frame_parms->N_RB_DL*12];
+      dl_ch_mag128_2      = (simde__m128i *)&dl_ch_mag0[2][symbol*frame_parms->N_RB_DL*12];
+      dl_ch_mag128_3      = (simde__m128i *)&dl_ch_mag0[3][symbol*frame_parms->N_RB_DL*12];
+      dl_ch_mag128_2b     = (simde__m128i *)&dl_ch_magb0[2][symbol*frame_parms->N_RB_DL*12];
+      dl_ch_mag128_3b     = (simde__m128i *)&dl_ch_magb0[3][symbol*frame_parms->N_RB_DL*12];
+      rho128_2 = (simde__m128i *) &dl_ch_rho2_ext[2][symbol*frame_parms->N_RB_DL*12];
+      rho128_3 = (simde__m128i *) &dl_ch_rho2_ext[3][symbol*frame_parms->N_RB_DL*12];
       /*rxdataF_comp*/
-      rxdataF_comp128_2[i] = _mm_adds_epi16(_mm_srai_epi16(rxdataF_comp128_2[i],1),_mm_srai_epi16(rxdataF_comp128_3[i],1));
-      rxdataF_comp128_0[i] = _mm_adds_epi16(_mm_srai_epi16(rxdataF_comp128_0[i],1),_mm_srai_epi16(rxdataF_comp128_2[i],1));
+      rxdataF_comp128_2[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(rxdataF_comp128_2[i],1),simde_mm_srai_epi16(rxdataF_comp128_3[i],1));
+      rxdataF_comp128_0[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(rxdataF_comp128_0[i],1),simde_mm_srai_epi16(rxdataF_comp128_2[i],1));
       /*dl_ch_mag*/
-      dl_ch_mag128_2[i] = _mm_adds_epi16(_mm_srai_epi16(dl_ch_mag128_2[i],1),_mm_srai_epi16(dl_ch_mag128_3[i],1));
-      dl_ch_mag128_0[i] = _mm_adds_epi16(_mm_srai_epi16(dl_ch_mag128_0[i],1),_mm_srai_epi16(dl_ch_mag128_2[i],1));
+      dl_ch_mag128_2[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(dl_ch_mag128_2[i],1),simde_mm_srai_epi16(dl_ch_mag128_3[i],1));
+      dl_ch_mag128_0[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(dl_ch_mag128_0[i],1),simde_mm_srai_epi16(dl_ch_mag128_2[i],1));
       /*dl_ch_mag*/
-      dl_ch_mag128_2b[i] = _mm_adds_epi16(_mm_srai_epi16(dl_ch_mag128_2b[i],1),_mm_srai_epi16(dl_ch_mag128_3b[i],1));
-      dl_ch_mag128_0b[i] = _mm_adds_epi16(_mm_srai_epi16(dl_ch_mag128_0b[i],1),_mm_srai_epi16(dl_ch_mag128_2b[i],1));
+      dl_ch_mag128_2b[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(dl_ch_mag128_2b[i],1),simde_mm_srai_epi16(dl_ch_mag128_3b[i],1));
+      dl_ch_mag128_0b[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(dl_ch_mag128_0b[i],1),simde_mm_srai_epi16(dl_ch_mag128_2b[i],1));
       /*rho*/
-      rho128_2[i] = _mm_adds_epi16(_mm_srai_epi16(rho128_2[i],1),_mm_srai_epi16(rho128_3[i],1));
-      rho128_0[i] = _mm_adds_epi16(_mm_srai_epi16(rho128_0[i],1),_mm_srai_epi16(rho128_2[i],1));
+      rho128_2[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(rho128_2[i],1),simde_mm_srai_epi16(rho128_3[i],1));
+      rho128_0[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(rho128_0[i],1),simde_mm_srai_epi16(rho128_2[i],1));
     }
   }
 
   if (dual_stream_UE == 1) {
-    __m128i *dl_ch_mag128_i0, *dl_ch_mag128_i1;
-    __m128i *dl_ch_mag128_i0b, *dl_ch_mag128_i1b;
-    __m128i *rho128_i0, *rho128_i1;
-    __m128i *rxdataF_comp128_i0, *rxdataF_comp128_i1;
-    rxdataF_comp128_i0   = (__m128i *)&rxdataF_comp1[0][symbol*frame_parms->N_RB_DL*12];
-    rxdataF_comp128_i1   = (__m128i *)&rxdataF_comp1[1][symbol*frame_parms->N_RB_DL*12];
-    dl_ch_mag128_i0      = (__m128i *)&dl_ch_mag1[0][symbol*frame_parms->N_RB_DL*12];
-    dl_ch_mag128_i1      = (__m128i *)&dl_ch_mag1[1][symbol*frame_parms->N_RB_DL*12];
-    dl_ch_mag128_i0b     = (__m128i *)&dl_ch_magb1[0][symbol*frame_parms->N_RB_DL*12];
-    dl_ch_mag128_i1b     = (__m128i *)&dl_ch_magb1[1][symbol*frame_parms->N_RB_DL*12];
-    rho128_i0 = (__m128i *) &dl_ch_rho_ext[0][symbol*frame_parms->N_RB_DL*12];
-    rho128_i1 = (__m128i *) &dl_ch_rho_ext[1][symbol*frame_parms->N_RB_DL*12];
+    simde__m128i *dl_ch_mag128_i0, *dl_ch_mag128_i1;
+    simde__m128i *dl_ch_mag128_i0b, *dl_ch_mag128_i1b;
+    simde__m128i *rho128_i0, *rho128_i1;
+    simde__m128i *rxdataF_comp128_i0, *rxdataF_comp128_i1;
+    rxdataF_comp128_i0   = (simde__m128i *)&rxdataF_comp1[0][symbol*frame_parms->N_RB_DL*12];
+    rxdataF_comp128_i1   = (simde__m128i *)&rxdataF_comp1[1][symbol*frame_parms->N_RB_DL*12];
+    dl_ch_mag128_i0      = (simde__m128i *)&dl_ch_mag1[0][symbol*frame_parms->N_RB_DL*12];
+    dl_ch_mag128_i1      = (simde__m128i *)&dl_ch_mag1[1][symbol*frame_parms->N_RB_DL*12];
+    dl_ch_mag128_i0b     = (simde__m128i *)&dl_ch_magb1[0][symbol*frame_parms->N_RB_DL*12];
+    dl_ch_mag128_i1b     = (simde__m128i *)&dl_ch_magb1[1][symbol*frame_parms->N_RB_DL*12];
+    rho128_i0 = (simde__m128i *) &dl_ch_rho_ext[0][symbol*frame_parms->N_RB_DL*12];
+    rho128_i1 = (simde__m128i *) &dl_ch_rho_ext[1][symbol*frame_parms->N_RB_DL*12];
 
     for (i=0; i<nb_rb*3; i++) {
-      rxdataF_comp128_i0[i] = _mm_adds_epi16(_mm_srai_epi16(rxdataF_comp128_i0[i],1),_mm_srai_epi16(rxdataF_comp128_i1[i],1));
-      dl_ch_mag128_i0[i]    = _mm_adds_epi16(_mm_srai_epi16(dl_ch_mag128_i0[i],1),_mm_srai_epi16(dl_ch_mag128_i1[i],1));
-      dl_ch_mag128_i0b[i]    = _mm_adds_epi16(_mm_srai_epi16(dl_ch_mag128_i0b[i],1),_mm_srai_epi16(dl_ch_mag128_i1b[i],1));
-      rho128_i0[i]           = _mm_adds_epi16(_mm_srai_epi16(rho128_i0[i],1),_mm_srai_epi16(rho128_i1[i],1));
+      rxdataF_comp128_i0[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(rxdataF_comp128_i0[i],1),simde_mm_srai_epi16(rxdataF_comp128_i1[i],1));
+      dl_ch_mag128_i0[i]    = simde_mm_adds_epi16(simde_mm_srai_epi16(dl_ch_mag128_i0[i],1),simde_mm_srai_epi16(dl_ch_mag128_i1[i],1));
+      dl_ch_mag128_i0b[i]    = simde_mm_adds_epi16(simde_mm_srai_epi16(dl_ch_mag128_i0b[i],1),simde_mm_srai_epi16(dl_ch_mag128_i1b[i],1));
+      rho128_i0[i]           = simde_mm_adds_epi16(simde_mm_srai_epi16(rho128_i0[i],1),simde_mm_srai_epi16(rho128_i1[i],1));
 
       if (frame_parms->nb_antennas_rx>2) {
-        __m128i *rxdataF_comp128_i2 = NULL;
-        __m128i *rxdataF_comp128_i3 = NULL;
-        __m128i *dl_ch_mag128_i2 = NULL;
-        __m128i *dl_ch_mag128_i3 = NULL;
-        __m128i *dl_ch_mag128_i2b = NULL;
-        __m128i *dl_ch_mag128_i3b = NULL;
-        __m128i *rho128_i2 = NULL;
-        __m128i *rho128_i3 = NULL;
-        rxdataF_comp128_i2   = (__m128i *)&rxdataF_comp1[2][symbol*frame_parms->N_RB_DL*12];
-        rxdataF_comp128_i3   = (__m128i *)&rxdataF_comp1[3][symbol*frame_parms->N_RB_DL*12];
-        dl_ch_mag128_i2      = (__m128i *)&dl_ch_mag1[2][symbol*frame_parms->N_RB_DL*12];
-        dl_ch_mag128_i3      = (__m128i *)&dl_ch_mag1[3][symbol*frame_parms->N_RB_DL*12];
-        dl_ch_mag128_i2b     = (__m128i *)&dl_ch_magb1[2][symbol*frame_parms->N_RB_DL*12];
-        dl_ch_mag128_i3b     = (__m128i *)&dl_ch_magb1[3][symbol*frame_parms->N_RB_DL*12];
-        rho128_i2 = (__m128i *) &dl_ch_rho_ext[2][symbol*frame_parms->N_RB_DL*12];
-        rho128_i3 = (__m128i *) &dl_ch_rho_ext[3][symbol*frame_parms->N_RB_DL*12];
+        simde__m128i *rxdataF_comp128_i2 = NULL;
+        simde__m128i *rxdataF_comp128_i3 = NULL;
+        simde__m128i *dl_ch_mag128_i2 = NULL;
+        simde__m128i *dl_ch_mag128_i3 = NULL;
+        simde__m128i *dl_ch_mag128_i2b = NULL;
+        simde__m128i *dl_ch_mag128_i3b = NULL;
+        simde__m128i *rho128_i2 = NULL;
+        simde__m128i *rho128_i3 = NULL;
+        rxdataF_comp128_i2   = (simde__m128i *)&rxdataF_comp1[2][symbol*frame_parms->N_RB_DL*12];
+        rxdataF_comp128_i3   = (simde__m128i *)&rxdataF_comp1[3][symbol*frame_parms->N_RB_DL*12];
+        dl_ch_mag128_i2      = (simde__m128i *)&dl_ch_mag1[2][symbol*frame_parms->N_RB_DL*12];
+        dl_ch_mag128_i3      = (simde__m128i *)&dl_ch_mag1[3][symbol*frame_parms->N_RB_DL*12];
+        dl_ch_mag128_i2b     = (simde__m128i *)&dl_ch_magb1[2][symbol*frame_parms->N_RB_DL*12];
+        dl_ch_mag128_i3b     = (simde__m128i *)&dl_ch_magb1[3][symbol*frame_parms->N_RB_DL*12];
+        rho128_i2 = (simde__m128i *) &dl_ch_rho_ext[2][symbol*frame_parms->N_RB_DL*12];
+        rho128_i3 = (simde__m128i *) &dl_ch_rho_ext[3][symbol*frame_parms->N_RB_DL*12];
         /*rxdataF_comp*/
-        rxdataF_comp128_i2[i] = _mm_adds_epi16(_mm_srai_epi16(rxdataF_comp128_i2[i],1),_mm_srai_epi16(rxdataF_comp128_i3[i],1));
-        rxdataF_comp128_i0[i] = _mm_adds_epi16(_mm_srai_epi16(rxdataF_comp128_i0[i],1),_mm_srai_epi16(rxdataF_comp128_i2[i],1));
+        rxdataF_comp128_i2[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(rxdataF_comp128_i2[i],1),simde_mm_srai_epi16(rxdataF_comp128_i3[i],1));
+        rxdataF_comp128_i0[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(rxdataF_comp128_i0[i],1),simde_mm_srai_epi16(rxdataF_comp128_i2[i],1));
         /*dl_ch_mag*/
-        dl_ch_mag128_i2[i] = _mm_adds_epi16(_mm_srai_epi16(dl_ch_mag128_i2[i],1),_mm_srai_epi16(dl_ch_mag128_i3[i],1));
-        dl_ch_mag128_i0[i] = _mm_adds_epi16(_mm_srai_epi16(dl_ch_mag128_i0[i],1),_mm_srai_epi16(dl_ch_mag128_i2[i],1));
+        dl_ch_mag128_i2[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(dl_ch_mag128_i2[i],1),simde_mm_srai_epi16(dl_ch_mag128_i3[i],1));
+        dl_ch_mag128_i0[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(dl_ch_mag128_i0[i],1),simde_mm_srai_epi16(dl_ch_mag128_i2[i],1));
         /*dl_ch_mag*/
-        dl_ch_mag128_i2b[i] = _mm_adds_epi16(_mm_srai_epi16(dl_ch_mag128_i2b[i],1),_mm_srai_epi16(dl_ch_mag128_i3b[i],1));
-        dl_ch_mag128_i0b[i] = _mm_adds_epi16(_mm_srai_epi16(dl_ch_mag128_i0b[i],1),_mm_srai_epi16(dl_ch_mag128_i2b[i],1));
+        dl_ch_mag128_i2b[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(dl_ch_mag128_i2b[i],1),simde_mm_srai_epi16(dl_ch_mag128_i3b[i],1));
+        dl_ch_mag128_i0b[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(dl_ch_mag128_i0b[i],1),simde_mm_srai_epi16(dl_ch_mag128_i2b[i],1));
         /*rho*/
-        rho128_i2[i] = _mm_adds_epi16(_mm_srai_epi16(rho128_i2[i],1),_mm_srai_epi16(rho128_i3[i],1));
-        rho128_i0[i] = _mm_adds_epi16(_mm_srai_epi16(rho128_i0[i],1),_mm_srai_epi16(rho128_i2[i],1));
+        rho128_i2[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(rho128_i2[i],1),simde_mm_srai_epi16(rho128_i3[i],1));
+        rho128_i0[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(rho128_i0[i],1),simde_mm_srai_epi16(rho128_i2[i],1));
       }
     }
   }
 
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void dlsch_scale_channel(int **dl_ch_estimates_ext,
@@ -3357,10 +2660,9 @@ void dlsch_scale_channel(int **dl_ch_estimates_ext,
                          LTE_UE_DLSCH_t **dlsch_ue,
                          uint8_t symbol,
                          unsigned short nb_rb) {
-#if defined(__x86_64__)||defined(__i386__)
   short rb, ch_amp;
   unsigned char aatx,aarx,pilots=0,symbol_mod;
-  __m128i *dl_ch128, ch_amp128;
+  simde__m128i *dl_ch128, ch_amp128;
   symbol_mod = (symbol>=(7-frame_parms->Ncp)) ? symbol-(7-frame_parms->Ncp) : symbol;
 
   if ((symbol_mod == 0) || (symbol_mod == (4-frame_parms->Ncp))) {
@@ -3374,31 +2676,28 @@ void dlsch_scale_channel(int **dl_ch_estimates_ext,
   ch_amp = ((pilots) ? (dlsch_ue[0]->sqrt_rho_b) : (dlsch_ue[0]->sqrt_rho_a));
   LOG_D(PHY,"Scaling PDSCH Chest in OFDM symbol %d by %d, pilots %d nb_rb %d NCP %d symbol %d\n",symbol_mod,ch_amp,pilots,nb_rb,frame_parms->Ncp,symbol);
   // printf("Scaling PDSCH Chest in OFDM symbol %d by %d\n",symbol_mod,ch_amp);
-  ch_amp128 = _mm_set1_epi16(ch_amp); // Q3.13
+  ch_amp128 = simde_mm_set1_epi16(ch_amp); // Q3.13
 
   for (aatx=0; aatx<frame_parms->nb_antenna_ports_eNB; aatx++) {
     for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-      dl_ch128=(__m128i *)&dl_ch_estimates_ext[(aatx<<1)+aarx][symbol*frame_parms->N_RB_DL*12];
+      dl_ch128=(simde__m128i *)&dl_ch_estimates_ext[(aatx<<1)+aarx][symbol*frame_parms->N_RB_DL*12];
 
       for (rb=0; rb<nb_rb; rb++) {
-        dl_ch128[0] = _mm_mulhi_epi16(dl_ch128[0],ch_amp128);
-        dl_ch128[0] = _mm_slli_epi16(dl_ch128[0],3);
-        dl_ch128[1] = _mm_mulhi_epi16(dl_ch128[1],ch_amp128);
-        dl_ch128[1] = _mm_slli_epi16(dl_ch128[1],3);
+        dl_ch128[0] = simde_mm_mulhi_epi16(dl_ch128[0],ch_amp128);
+        dl_ch128[0] = simde_mm_slli_epi16(dl_ch128[0],3);
+        dl_ch128[1] = simde_mm_mulhi_epi16(dl_ch128[1],ch_amp128);
+        dl_ch128[1] = simde_mm_slli_epi16(dl_ch128[1],3);
 
         if (pilots) {
           dl_ch128+=2;
         } else {
-          dl_ch128[2] = _mm_mulhi_epi16(dl_ch128[2],ch_amp128);
-          dl_ch128[2] = _mm_slli_epi16(dl_ch128[2],3);
+          dl_ch128[2] = simde_mm_mulhi_epi16(dl_ch128[2],ch_amp128);
+          dl_ch128[2] = simde_mm_slli_epi16(dl_ch128[2],3);
           dl_ch128+=3;
         }
       }
     }
   }
-
-#elif defined(__arm__) || defined(__aarch64__)
-#endif
 }
 
 //compute average channel_level on each (TX,RX) antenna pair
@@ -3407,11 +2706,10 @@ void dlsch_channel_level(int **dl_ch_estimates_ext,
                          int32_t *avg,
                          uint8_t symbol,
                          unsigned short nb_rb) {
-#if defined(__x86_64__)||defined(__i386__)
   //printf("symbol = %d\n", symbol);
   short rb;
   unsigned char aatx,aarx,nre=12,symbol_mod;
-  __m128i *dl_ch128, avg128D;
+  simde__m128i *dl_ch128, avg128D;
   symbol_mod = (symbol>=(7-frame_parms->Ncp)) ? symbol-(7-frame_parms->Ncp) : symbol;
 
   if (((symbol_mod == 0) || (symbol_mod == (frame_parms->Ncp-1)))&&(frame_parms->nb_antenna_ports_eNB!=1))
@@ -3429,23 +2727,28 @@ void dlsch_channel_level(int **dl_ch_estimates_ext,
     for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
       //clear average level
       //printf("aatx = %d, aarx = %d, aatx*frame_parms->nb_antennas_rx + aarx] = %d \n", aatx, aarx, aatx*frame_parms->nb_antennas_rx + aarx);
-      avg128D = _mm_setzero_si128();
+      avg128D = simde_mm_setzero_si128();
       // 5 is always a symbol with no pilots for both normal and extended prefix
-      dl_ch128=(__m128i *)&dl_ch_estimates_ext[aatx*2 + aarx][symbol*frame_parms->N_RB_DL*12];
+      dl_ch128=(simde__m128i *)&dl_ch_estimates_ext[aatx*2 + aarx][symbol*frame_parms->N_RB_DL*12];
 
       for (rb=0; rb<nb_rb; rb++) {
         //printf("rb %d : ",rb);
-        avg128D = _mm_add_epi32(avg128D,_mm_srai_epi32(_mm_madd_epi16(dl_ch128[0],dl_ch128[0]),x));
-        avg128D = _mm_add_epi32(avg128D,_mm_srai_epi32(_mm_madd_epi16(dl_ch128[1],dl_ch128[1]),x));
+        avg128D = simde_mm_add_epi32(avg128D,simde_mm_srai_epi32(simde_mm_madd_epi16(dl_ch128[0],dl_ch128[0]),x));
+        avg128D = simde_mm_add_epi32(avg128D,simde_mm_srai_epi32(simde_mm_madd_epi16(dl_ch128[1],dl_ch128[1]),x));
 
-        //avg128D = _mm_add_epi32(avg128D,_mm_madd_epi16(dl_ch128[0],_mm_srai_epi16(_mm_mulhi_epi16(dl_ch128[0], coeff128),15)));
-        //avg128D = _mm_add_epi32(avg128D,_mm_madd_epi16(dl_ch128[1],_mm_srai_epi16(_mm_mulhi_epi16(dl_ch128[1], coeff128),15)));
+        // avg128D =
+        // simde_mm_add_epi32(avg128D,simde_mm_madd_epi16(dl_ch128[0],simde_mm_srai_epi16(simde_mm_mulhi_epi16(dl_ch128[0],
+        // coeff128),15))); avg128D =
+        // simde_mm_add_epi32(avg128D,simde_mm_madd_epi16(dl_ch128[1],simde_mm_srai_epi16(simde_mm_mulhi_epi16(dl_ch128[1],
+        // coeff128),15)));
 
         if (((symbol_mod == 0) || (symbol_mod == (frame_parms->Ncp-1)))&&(frame_parms->nb_antenna_ports_eNB!=1)) {
           dl_ch128+=2;
         } else {
-          avg128D = _mm_add_epi32(avg128D,_mm_srai_epi32(_mm_madd_epi16(dl_ch128[2],dl_ch128[2]),x));
-          //avg128D = _mm_add_epi32(avg128D,_mm_madd_epi16(dl_ch128[2],_mm_srai_epi16(_mm_mulhi_epi16(dl_ch128[2], coeff128),15)));
+          avg128D = simde_mm_add_epi32(avg128D,simde_mm_srai_epi32(simde_mm_madd_epi16(dl_ch128[2],dl_ch128[2]),x));
+          // avg128D =
+          // simde_mm_add_epi32(avg128D,simde_mm_madd_epi16(dl_ch128[2],simde_mm_srai_epi16(simde_mm_mulhi_epi16(dl_ch128[2],
+          // coeff128),15)));
           dl_ch128+=3;
         }
 
@@ -3462,62 +2765,8 @@ void dlsch_channel_level(int **dl_ch_estimates_ext,
           ((int32_t *)&avg128D)[3])/y;
     }
 
-  _mm_empty();
-  _m_empty();
-#elif defined(__arm__) || defined(__aarch64__)
-  short rb;
-  unsigned char aatx,aarx,nre=12,symbol_mod;
-  int32x4_t avg128D;
-  int16x4_t *dl_ch128;
-  symbol_mod = (symbol>=(7-frame_parms->Ncp)) ? symbol-(7-frame_parms->Ncp) : symbol;
-
-  for (aatx=0; aatx<frame_parms->nb_antenna_ports_eNB; aatx++)
-    for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-      //clear average level
-      avg128D = vdupq_n_s32(0);
-      // 5 is always a symbol with no pilots for both normal and extended prefix
-      dl_ch128=(int16x4_t *)&dl_ch_estimates_ext[aatx*frame_parms->nb_antennas_rx + aarx][symbol*frame_parms->N_RB_DL*12];
-
-      for (rb=0; rb<nb_rb; rb++) {
-        //  printf("rb %d : ",rb);
-        //  print_shorts("ch",&dl_ch128[0]);
-        avg128D = vqaddq_s32(avg128D,vmull_s16(dl_ch128[0],dl_ch128[0]));
-        avg128D = vqaddq_s32(avg128D,vmull_s16(dl_ch128[1],dl_ch128[1]));
-        avg128D = vqaddq_s32(avg128D,vmull_s16(dl_ch128[2],dl_ch128[2]));
-        avg128D = vqaddq_s32(avg128D,vmull_s16(dl_ch128[3],dl_ch128[3]));
-
-        if (((symbol_mod == 0) || (symbol_mod == (frame_parms->Ncp-1)))&&(frame_parms->mode1_flag==0)) {
-          dl_ch128+=4;
-        } else {
-          avg128D = vqaddq_s32(avg128D,vmull_s16(dl_ch128[4],dl_ch128[4]));
-          avg128D = vqaddq_s32(avg128D,vmull_s16(dl_ch128[5],dl_ch128[5]));
-          dl_ch128+=6;
-        }
-
-        /*
-          if (rb==0) {
-          print_shorts("dl_ch128",&dl_ch128[0]);
-          print_shorts("dl_ch128",&dl_ch128[1]);
-          print_shorts("dl_ch128",&dl_ch128[2]);
-          }
-        */
-      }
-
-      if (((symbol_mod == 0) || (symbol_mod == (frame_parms->Ncp-1)))&&(frame_parms->mode1_flag==0))
-        nre=8;
-      else if (((symbol_mod == 0) || (symbol_mod == (frame_parms->Ncp-1)))&&(frame_parms->mode1_flag==1))
-        nre=10;
-      else
-        nre=12;
-
-      avg[aatx*frame_parms->nb_antennas_rx + aarx] = (((int32_t *)&avg128D)[0] +
-          ((int32_t *)&avg128D)[1] +
-          ((int32_t *)&avg128D)[2] +
-          ((int32_t *)&avg128D)[3])/(nb_rb*nre);
-      //printf("Channel level : %d\n",avg[aatx*(frame_parms->nb_antennas_rx-1) + aarx]);
-    }
-
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void dlsch_channel_level_core(int **dl_ch_estimates_ext,
@@ -3526,27 +2775,26 @@ void dlsch_channel_level_core(int **dl_ch_estimates_ext,
                               int n_rx,
                               int length,
                               int start_point) {
-#if defined(__x86_64__)||defined(__i386__)
   short ii;
   int aatx,aarx;
   int length_mod8;
   int length2;
-  __m128i *dl_ch128, avg128D;
+  simde__m128i *dl_ch128, avg128D;
   int16_t x = factor2(length);
   int16_t y = (length)>>x;
 
   for (aatx=0; aatx<n_tx; aatx++)
     for (aarx=0; aarx<n_rx; aarx++) {
-      avg128D = _mm_setzero_si128();
-      dl_ch128=(__m128i *)&dl_ch_estimates_ext[aatx*2 + aarx][start_point];
+      avg128D = simde_mm_setzero_si128();
+      dl_ch128=(simde__m128i *)&dl_ch_estimates_ext[aatx*2 + aarx][start_point];
       length_mod8=length&7;
 
       if (length_mod8 == 0) {
         length2 = length>>3;
 
         for (ii=0; ii<length2; ii++) {
-          avg128D = _mm_add_epi32(avg128D,_mm_srai_epi32(_mm_madd_epi16(dl_ch128[0],dl_ch128[0]),x));
-          avg128D = _mm_add_epi32(avg128D,_mm_srai_epi32(_mm_madd_epi16(dl_ch128[1],dl_ch128[1]),x));
+          avg128D = simde_mm_add_epi32(avg128D,simde_mm_srai_epi32(simde_mm_madd_epi16(dl_ch128[0],dl_ch128[0]),x));
+          avg128D = simde_mm_add_epi32(avg128D,simde_mm_srai_epi32(simde_mm_madd_epi16(dl_ch128[1],dl_ch128[1]),x));
           dl_ch128+=2;
         }
       } else {
@@ -3561,55 +2809,9 @@ void dlsch_channel_level_core(int **dl_ch_estimates_ext,
       //printf("Channel level [%d]: %d\n",aatx*n_rx + aarx, avg[aatx*n_rx + aarx]);
     }
 
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
   /* FIXME This part needs to be adapted like the one above */
-#elif defined(__arm__) || defined(__aarch64__)
-  short rb;
-  unsigned char aatx,aarx,nre=12,symbol_mod;
-  int32x4_t avg128D;
-  int16x4_t *dl_ch128;
-  symbol_mod = (symbol>=(7-frame_parms->Ncp)) ? symbol-(7-frame_parms->Ncp) : symbol;
-
-  for (aatx=0; aatx<frame_parms->nb_antenna_ports_eNB; aatx++)
-    for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-      //clear average level
-      avg128D = vdupq_n_s32(0);
-      // 5 is always a symbol with no pilots for both normal and extended prefix
-      dl_ch128=(int16x4_t *)&dl_ch_estimates_ext[aatx*frame_parms->nb_antennas_rx + aarx][symbol*frame_parms->N_RB_DL*12];
-
-      for (rb=0; rb<nb_rb; rb++) {
-        //  printf("rb %d : ",rb);
-        //  print_shorts("ch",&dl_ch128[0]);
-        avg128D = vqaddq_s32(avg128D,vmull_s16(dl_ch128[0],dl_ch128[0]));
-        avg128D = vqaddq_s32(avg128D,vmull_s16(dl_ch128[1],dl_ch128[1]));
-        avg128D = vqaddq_s32(avg128D,vmull_s16(dl_ch128[2],dl_ch128[2]));
-        avg128D = vqaddq_s32(avg128D,vmull_s16(dl_ch128[3],dl_ch128[3]));
-
-        if (((symbol_mod == 0) || (symbol_mod == (frame_parms->Ncp-1)))&&(frame_parms->nb_antenna_ports_eNB!=1)) {
-          dl_ch128+=4;
-        } else {
-          avg128D = vqaddq_s32(avg128D,vmull_s16(dl_ch128[4],dl_ch128[4]));
-          avg128D = vqaddq_s32(avg128D,vmull_s16(dl_ch128[5],dl_ch128[5]));
-          dl_ch128+=6;
-        }
-      }
-
-      if (((symbol_mod == 0) || (symbol_mod == (frame_parms->Ncp-1)))&&(frame_parms->nb_antenna_ports_eNB!=1))
-        nre=8;
-      else if (((symbol_mod == 0) || (symbol_mod == (frame_parms->Ncp-1)))&&(frame_parms->nb_antenna_ports_eNB==1))
-        nre=10;
-      else
-        nre=12;
-
-      avg[aatx*frame_parms->nb_antennas_rx + aarx] = (((int32_t *)&avg128D)[0] +
-          ((int32_t *)&avg128D)[1] +
-          ((int32_t *)&avg128D)[2] +
-          ((int32_t *)&avg128D)[3])/(nb_rb*nre);
-      //printf("Channel level : %d\n",avg[aatx*(frame_parms->nb_antennas_rx-1) + aarx]);
-    }
-
-#endif
 }
 
 void dlsch_channel_level_median(int **dl_ch_estimates_ext,
@@ -3618,24 +2820,23 @@ void dlsch_channel_level_median(int **dl_ch_estimates_ext,
                                 int n_rx,
                                 int length,
                                 int start_point) {
-#if defined(__x86_64__)||defined(__i386__)
   short ii;
   int aatx,aarx;
   int length2;
   int max = 0, min=0;
   int norm_pack;
-  __m128i *dl_ch128, norm128D;
+  simde__m128i *dl_ch128, norm128D;
 
   for (aatx=0; aatx<n_tx; aatx++) {
     for (aarx=0; aarx<n_rx; aarx++) {
       max = 0;
       min = 0;
-      norm128D = _mm_setzero_si128();
-      dl_ch128=(__m128i *)&dl_ch_estimates_ext[aatx*2 + aarx][start_point];
+      norm128D = simde_mm_setzero_si128();
+      dl_ch128=(simde__m128i *)&dl_ch_estimates_ext[aatx*2 + aarx][start_point];
       length2 = length>>2;
 
       for (ii=0; ii<length2; ii++) {
-        norm128D = _mm_srai_epi32( _mm_madd_epi16(dl_ch128[0],dl_ch128[0]), 1);
+        norm128D = simde_mm_srai_epi32(simde_mm_madd_epi16(dl_ch128[0],dl_ch128[0]), 1);
         //print_ints("norm128D",&norm128D[0]);
         norm_pack = ((int32_t *)&norm128D)[0] +
                     ((int32_t *)&norm128D)[1] +
@@ -3656,45 +2857,8 @@ void dlsch_channel_level_median(int **dl_ch_estimates_ext,
     }
   }
 
-  _mm_empty();
-  _m_empty();
-#elif defined(__arm__) || defined(__aarch64__)
-  short rb;
-  unsigned char aatx,aarx,nre=12,symbol_mod;
-  int32x4_t norm128D;
-  int16x4_t *dl_ch128;
-
-  for (aatx=0; aatx<frame_parms->nb_antenna_ports_eNB; aatx++) {
-    for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-      max = 0;
-      min = 0;
-      norm128D = vdupq_n_s32(0);
-      dl_ch128=(int16x4_t *)&dl_ch_estimates_ext[aatx*n_rx + aarx][start_point];
-      length_mod8=length&3;
-      length2 = length>>2;
-
-      for (ii=0; ii<length2; ii++) {
-        norm128D = vshrq_n_u32(vmull_s16(dl_ch128[0],dl_ch128[0]), 1);
-        norm_pack = ((int32_t *)&norm128D)[0] +
-                    ((int32_t *)&norm128D)[1] +
-                    ((int32_t *)&norm128D)[2] +
-                    ((int32_t *)&norm128D)[3];
-
-        if (norm_pack > max)
-          max = norm_pack;
-
-        if (norm_pack < min)
-          min = norm_pack;
-
-        dl_ch128+=1;
-      }
-
-      median[aatx*n_rx + aarx]  = (max+min)>>1;
-      //printf("Channel level  median [%d]: %d\n",aatx*n_rx + aarx, median[aatx*n_rx + aarx]);
-    }
-  }
-
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void mmse_processing_oai(LTE_UE_PDSCH *pdsch_vars,
@@ -3958,10 +3122,9 @@ void dlsch_channel_aver_band(int **dl_ch_estimates_ext,
                              struct complex32 *chan_avg,
                              unsigned char symbol,
                              unsigned short nb_rb) {
-#if defined(__x86_64__)||defined(__i386__)
   short rb;
   unsigned char aatx,aarx,nre=12,symbol_mod;
-  __m128i *dl_ch128, avg128D;
+  simde__m128i *dl_ch128, avg128D;
   int32_t chan_est_avg[4];
   symbol_mod = (symbol>=(7-frame_parms->Ncp)) ? symbol-(7-frame_parms->Ncp) : symbol;
 
@@ -3974,8 +3137,8 @@ void dlsch_channel_aver_band(int **dl_ch_estimates_ext,
 
   for (aatx=0; aatx<frame_parms->nb_antenna_ports_eNB; aatx++) {
     for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-      dl_ch128=(__m128i *)&dl_ch_estimates_ext[aatx*frame_parms->nb_antennas_rx + aarx][symbol*frame_parms->N_RB_DL*12];
-      avg128D = _mm_setzero_si128();
+      dl_ch128=(simde__m128i *)&dl_ch_estimates_ext[aatx*frame_parms->nb_antennas_rx + aarx][symbol*frame_parms->N_RB_DL*12];
+      avg128D = simde_mm_setzero_si128();
       //  print_shorts("avg128D 1",&avg128D);
 
       for (rb=0; rb<nb_rb; rb++) {
@@ -3983,15 +3146,15 @@ void dlsch_channel_aver_band(int **dl_ch_estimates_ext,
           print_shorts("aver dl_ch128",&dl_ch128[0]);
           print_shorts("aver dl_ch128",&dl_ch128[1]);
           print_shorts("aver dl_ch128",&dl_ch128[2]);
-        avg128D = _mm_add_epi16(avg128D, dl_ch128[0]);*/
+        avg128D = simde_mm_add_epi16(avg128D, dl_ch128[0]);*/
         //print_shorts("avg128D 2",&avg128D);
-        avg128D = _mm_add_epi16(avg128D, dl_ch128[1]);
+        avg128D = simde_mm_add_epi16(avg128D, dl_ch128[1]);
         //  print_shorts("avg128D 3",&avg128D);
 
         if (((symbol_mod == 0) || (symbol_mod == (frame_parms->Ncp-1)))&&(frame_parms->nb_antenna_ports_eNB!=1)) {
           dl_ch128+=2;
         } else {
-          avg128D = _mm_add_epi16(avg128D,dl_ch128[2]);
+          avg128D = simde_mm_add_epi16(avg128D,dl_ch128[2]);
           //  print_shorts("avg128D 4",&avg128D);
           dl_ch128+=3;
         }
@@ -4010,26 +3173,24 @@ void dlsch_channel_aver_band(int **dl_ch_estimates_ext,
       //printf("symb %d chan_avg im [%d] = %d\n", symbol, aatx*frame_parms->nb_antennas_rx + aarx, chan_avg[aatx*frame_parms->nb_antennas_rx + aarx].i);
       chan_est_avg[aatx*frame_parms->nb_antennas_rx + aarx] = (((int32_t)chan_avg[aatx*frame_parms->nb_antennas_rx + aarx].i)<<16)|(((int32_t)chan_avg[aatx*frame_parms->nb_antennas_rx + aarx].r) & 0xffff);
       //printf("symb %d chan_est_avg [%d] = %d\n", symbol, aatx*frame_parms->nb_antennas_rx + aarx, chan_est_avg[aatx*frame_parms->nb_antennas_rx + aarx]);
-      dl_ch128=(__m128i *)&dl_ch_estimates_ext[aatx*frame_parms->nb_antennas_rx + aarx][symbol*frame_parms->N_RB_DL*12];
+      dl_ch128=(simde__m128i *)&dl_ch_estimates_ext[aatx*frame_parms->nb_antennas_rx + aarx][symbol*frame_parms->N_RB_DL*12];
 
       for (rb=0; rb<nb_rb; rb++) {
-        dl_ch128[0] = _mm_set1_epi32(chan_est_avg[aatx*frame_parms->nb_antennas_rx + aarx]);
-        dl_ch128[1] = _mm_set1_epi32(chan_est_avg[aatx*frame_parms->nb_antennas_rx + aarx]);
+        dl_ch128[0] = simde_mm_set1_epi32(chan_est_avg[aatx*frame_parms->nb_antennas_rx + aarx]);
+        dl_ch128[1] = simde_mm_set1_epi32(chan_est_avg[aatx*frame_parms->nb_antennas_rx + aarx]);
 
         if (((symbol_mod == 0) || (symbol_mod == (frame_parms->Ncp-1)))&&(frame_parms->nb_antenna_ports_eNB!=1)) {
           dl_ch128+=2;
         } else {
-          dl_ch128[2] = _mm_set1_epi32(chan_est_avg[aatx*frame_parms->nb_antennas_rx + aarx]);
+          dl_ch128[2] = simde_mm_set1_epi32(chan_est_avg[aatx*frame_parms->nb_antennas_rx + aarx]);
           dl_ch128+=3;
         }
       }
     }
   }
 
-  _mm_empty();
-  _m_empty();
-#elif defined(__arm__) || defined(__aarch64__)
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void rxdataF_to_float(int32_t **rxdataF_ext,
@@ -4285,14 +3446,13 @@ void dlsch_channel_level_TM34(int **dl_ch_estimates_ext,
                               unsigned short nb_rb,
                               unsigned int mmse_flag,
                               MIMO_mode_t mimo_mode) {
-#if defined(__x86_64__)||defined(__i386__)
   short rb;
   unsigned char aarx,nre=12,symbol_mod;
-  __m128i *dl_ch0_128,*dl_ch1_128, dl_ch0_128_tmp, dl_ch1_128_tmp, avg_0_128D, avg_1_128D;
+  simde__m128i *dl_ch0_128,*dl_ch1_128, dl_ch0_128_tmp, dl_ch1_128_tmp, avg_0_128D, avg_1_128D;
   symbol_mod = (symbol>=(7-frame_parms->Ncp)) ? symbol-(7-frame_parms->Ncp) : symbol;
   //clear average level
-  // avg_0_128D = _mm_setzero_si128();
-  // avg_1_128D = _mm_setzero_si128();
+  // avg_0_128D = simde_mm_setzero_si128();
+  // avg_1_128D = simde_mm_setzero_si128();
   avg_0[0] = 0;
   avg_0[1] = 0;
   avg_1[0] = 0;
@@ -4307,17 +3467,17 @@ void dlsch_channel_level_TM34(int **dl_ch_estimates_ext,
     nre=12;
 
   for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-    dl_ch0_128 = (__m128i *)&dl_ch_estimates_ext[aarx][symbol*frame_parms->N_RB_DL*12];
-    dl_ch1_128 = (__m128i *)&dl_ch_estimates_ext[2+aarx][symbol*frame_parms->N_RB_DL*12];
-    avg_0_128D = _mm_setzero_si128();
-    avg_1_128D = _mm_setzero_si128();
+    dl_ch0_128 = (simde__m128i *)&dl_ch_estimates_ext[aarx][symbol*frame_parms->N_RB_DL*12];
+    dl_ch1_128 = (simde__m128i *)&dl_ch_estimates_ext[2+aarx][symbol*frame_parms->N_RB_DL*12];
+    avg_0_128D = simde_mm_setzero_si128();
+    avg_1_128D = simde_mm_setzero_si128();
 
     for (rb=0; rb<nb_rb; rb++) {
       // printf("rb %d : \n",rb);
       //print_shorts("ch0\n",&dl_ch0_128[0]);
       //print_shorts("ch1\n",&dl_ch1_128[0]);
-      dl_ch0_128_tmp = _mm_load_si128(&dl_ch0_128[0]);
-      dl_ch1_128_tmp = _mm_load_si128(&dl_ch1_128[0]);
+      dl_ch0_128_tmp = simde_mm_load_si128(&dl_ch0_128[0]);
+      dl_ch1_128_tmp = simde_mm_load_si128(&dl_ch1_128[0]);
 
       if (mmse_flag == 0) {
         if (mimo_mode==LARGE_CDD)
@@ -4330,11 +3490,11 @@ void dlsch_channel_level_TM34(int **dl_ch_estimates_ext,
           prec2A_TM4_128(pmi_ext[rb],&dl_ch0_128_tmp,&dl_ch1_128_tmp);
       }
 
-      //      mmtmpD0 = _mm_madd_epi16(dl_ch0_128_tmp,dl_ch0_128_tmp);
-      avg_0_128D = _mm_add_epi32(avg_0_128D,_mm_madd_epi16(dl_ch0_128_tmp,dl_ch0_128_tmp));
-      avg_1_128D = _mm_add_epi32(avg_1_128D,_mm_madd_epi16(dl_ch1_128_tmp,dl_ch1_128_tmp));
-      dl_ch0_128_tmp = _mm_load_si128(&dl_ch0_128[1]);
-      dl_ch1_128_tmp = _mm_load_si128(&dl_ch1_128[1]);
+      //      mmtmpD0 = simde_mm_madd_epi16(dl_ch0_128_tmp,dl_ch0_128_tmp);
+      avg_0_128D = simde_mm_add_epi32(avg_0_128D,simde_mm_madd_epi16(dl_ch0_128_tmp,dl_ch0_128_tmp));
+      avg_1_128D = simde_mm_add_epi32(avg_1_128D,simde_mm_madd_epi16(dl_ch1_128_tmp,dl_ch1_128_tmp));
+      dl_ch0_128_tmp = simde_mm_load_si128(&dl_ch0_128[1]);
+      dl_ch1_128_tmp = simde_mm_load_si128(&dl_ch1_128[1]);
 
       if (mmse_flag == 0) {
         if (mimo_mode==LARGE_CDD)
@@ -4347,16 +3507,16 @@ void dlsch_channel_level_TM34(int **dl_ch_estimates_ext,
           prec2A_TM4_128(pmi_ext[rb],&dl_ch0_128_tmp,&dl_ch1_128_tmp);
       }
 
-      //      mmtmpD1 = _mm_madd_epi16(dl_ch0_128_tmp,dl_ch0_128_tmp);
-      avg_0_128D = _mm_add_epi32(avg_0_128D,_mm_madd_epi16(dl_ch0_128_tmp,dl_ch0_128_tmp));
-      avg_1_128D = _mm_add_epi32(avg_1_128D,_mm_madd_epi16(dl_ch1_128_tmp,dl_ch1_128_tmp));
+      //      mmtmpD1 = simde_mm_madd_epi16(dl_ch0_128_tmp,dl_ch0_128_tmp);
+      avg_0_128D = simde_mm_add_epi32(avg_0_128D,simde_mm_madd_epi16(dl_ch0_128_tmp,dl_ch0_128_tmp));
+      avg_1_128D = simde_mm_add_epi32(avg_1_128D,simde_mm_madd_epi16(dl_ch1_128_tmp,dl_ch1_128_tmp));
 
       if (((symbol_mod == 0) || (symbol_mod == (frame_parms->Ncp-1)))&&(frame_parms->nb_antenna_ports_eNB!=1)) {
         dl_ch0_128+=2;
         dl_ch1_128+=2;
       } else {
-        dl_ch0_128_tmp = _mm_load_si128(&dl_ch0_128[2]);
-        dl_ch1_128_tmp = _mm_load_si128(&dl_ch1_128[2]);
+        dl_ch0_128_tmp = simde_mm_load_si128(&dl_ch0_128[2]);
+        dl_ch1_128_tmp = simde_mm_load_si128(&dl_ch1_128[2]);
 
         if (mmse_flag == 0) {
           if (mimo_mode==LARGE_CDD)
@@ -4369,9 +3529,9 @@ void dlsch_channel_level_TM34(int **dl_ch_estimates_ext,
             prec2A_TM4_128(pmi_ext[rb],&dl_ch0_128_tmp,&dl_ch1_128_tmp);
         }
 
-        //      mmtmpD2 = _mm_madd_epi16(dl_ch0_128_tmp,dl_ch0_128_tmp);
-        avg_1_128D = _mm_add_epi32(avg_1_128D,_mm_madd_epi16(dl_ch1_128_tmp,dl_ch1_128_tmp));
-        avg_0_128D = _mm_add_epi32(avg_0_128D,_mm_madd_epi16(dl_ch0_128_tmp,dl_ch0_128_tmp));
+        //      mmtmpD2 = simde_mm_madd_epi16(dl_ch0_128_tmp,dl_ch0_128_tmp);
+        avg_1_128D = simde_mm_add_epi32(avg_1_128D,simde_mm_madd_epi16(dl_ch1_128_tmp,dl_ch1_128_tmp));
+        avg_0_128D = simde_mm_add_epi32(avg_0_128D,simde_mm_madd_epi16(dl_ch0_128_tmp,dl_ch0_128_tmp));
         dl_ch0_128+=3;
         dl_ch1_128+=3;
       }
@@ -4398,10 +3558,8 @@ void dlsch_channel_level_TM34(int **dl_ch_estimates_ext,
   // printf("From Chan_level aver stream 1 final =%d\n", avg_1[0]);
   avg_0[0] = min (avg_0[0], avg_1[0]);
   avg_1[0] = avg_0[0];
-  _mm_empty();
-  _m_empty();
-#elif defined(__arm__) || defined(__aarch64__)
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 //compute average channel_level of effective (precoded) channel
@@ -4411,13 +3569,12 @@ void dlsch_channel_level_TM56(int **dl_ch_estimates_ext,
                               int *avg,
                               uint8_t symbol,
                               unsigned short nb_rb) {
-#if defined(__x86_64__)||defined(__i386__)
   short rb;
   unsigned char aarx,nre=12,symbol_mod;
-  __m128i *dl_ch0_128,*dl_ch1_128, dl_ch0_128_tmp, dl_ch1_128_tmp,avg128D;
+  simde__m128i *dl_ch0_128,*dl_ch1_128, dl_ch0_128_tmp, dl_ch1_128_tmp,avg128D;
   symbol_mod = (symbol>=(7-frame_parms->Ncp)) ? symbol-(7-frame_parms->Ncp) : symbol;
   //clear average level
-  avg128D = _mm_setzero_si128();
+  avg128D = simde_mm_setzero_si128();
   avg[0] = 0;
   avg[1] = 0;
   // 5 is always a symbol with no pilots for both normal and extended prefix
@@ -4430,30 +3587,30 @@ void dlsch_channel_level_TM56(int **dl_ch_estimates_ext,
     nre=12;
 
   for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-    dl_ch0_128 = (__m128i *)&dl_ch_estimates_ext[aarx][symbol*frame_parms->N_RB_DL*12];
-    dl_ch1_128 = (__m128i *)&dl_ch_estimates_ext[2+aarx][symbol*frame_parms->N_RB_DL*12];
+    dl_ch0_128 = (simde__m128i *)&dl_ch_estimates_ext[aarx][symbol*frame_parms->N_RB_DL*12];
+    dl_ch1_128 = (simde__m128i *)&dl_ch_estimates_ext[2+aarx][symbol*frame_parms->N_RB_DL*12];
 
     for (rb=0; rb<nb_rb; rb++) {
-      dl_ch0_128_tmp = _mm_load_si128(&dl_ch0_128[0]);
-      dl_ch1_128_tmp = _mm_load_si128(&dl_ch1_128[0]);
+      dl_ch0_128_tmp = simde_mm_load_si128(&dl_ch0_128[0]);
+      dl_ch1_128_tmp = simde_mm_load_si128(&dl_ch1_128[0]);
       prec2A_TM56_128(pmi_ext[rb],&dl_ch0_128_tmp,&dl_ch1_128_tmp);
-      //      mmtmpD0 = _mm_madd_epi16(dl_ch0_128_tmp,dl_ch0_128_tmp);
-      avg128D = _mm_add_epi32(avg128D,_mm_madd_epi16(dl_ch0_128_tmp,dl_ch0_128_tmp));
-      dl_ch0_128_tmp = _mm_load_si128(&dl_ch0_128[1]);
-      dl_ch1_128_tmp = _mm_load_si128(&dl_ch1_128[1]);
+      //      mmtmpD0 = simde_mm_madd_epi16(dl_ch0_128_tmp,dl_ch0_128_tmp);
+      avg128D = simde_mm_add_epi32(avg128D,simde_mm_madd_epi16(dl_ch0_128_tmp,dl_ch0_128_tmp));
+      dl_ch0_128_tmp = simde_mm_load_si128(&dl_ch0_128[1]);
+      dl_ch1_128_tmp = simde_mm_load_si128(&dl_ch1_128[1]);
       prec2A_TM56_128(pmi_ext[rb],&dl_ch0_128_tmp,&dl_ch1_128_tmp);
-      //      mmtmpD1 = _mm_madd_epi16(dl_ch0_128_tmp,dl_ch0_128_tmp);
-      avg128D = _mm_add_epi32(avg128D,_mm_madd_epi16(dl_ch0_128_tmp,dl_ch0_128_tmp));
+      //      mmtmpD1 = simde_mm_madd_epi16(dl_ch0_128_tmp,dl_ch0_128_tmp);
+      avg128D = simde_mm_add_epi32(avg128D,simde_mm_madd_epi16(dl_ch0_128_tmp,dl_ch0_128_tmp));
 
       if (((symbol_mod == 0) || (symbol_mod == (frame_parms->Ncp-1)))&&(frame_parms->nb_antenna_ports_eNB!=1)) {
         dl_ch0_128+=2;
         dl_ch1_128+=2;
       } else {
-        dl_ch0_128_tmp = _mm_load_si128(&dl_ch0_128[2]);
-        dl_ch1_128_tmp = _mm_load_si128(&dl_ch1_128[2]);
+        dl_ch0_128_tmp = simde_mm_load_si128(&dl_ch0_128[2]);
+        dl_ch1_128_tmp = simde_mm_load_si128(&dl_ch1_128[2]);
         prec2A_TM56_128(pmi_ext[rb],&dl_ch0_128_tmp,&dl_ch1_128_tmp);
-        //      mmtmpD2 = _mm_madd_epi16(dl_ch0_128_tmp,dl_ch0_128_tmp);
-        avg128D = _mm_add_epi32(avg128D,_mm_madd_epi16(dl_ch0_128_tmp,dl_ch0_128_tmp));
+        //      mmtmpD2 = simde_mm_madd_epi16(dl_ch0_128_tmp,dl_ch0_128_tmp);
+        avg128D = simde_mm_add_epi32(avg128D,simde_mm_madd_epi16(dl_ch0_128_tmp,dl_ch0_128_tmp));
         dl_ch0_128+=3;
         dl_ch1_128+=3;
       }
@@ -4467,10 +3624,8 @@ void dlsch_channel_level_TM56(int **dl_ch_estimates_ext,
 
   // choose maximum of the 2 effective channels
   avg[0] = cmax(avg[0],avg[1]);
-  _mm_empty();
-  _m_empty();
-#elif defined(__arm__) || defined(__aarch64__)
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 //compute average channel_level for TM7
@@ -4479,29 +3634,28 @@ void dlsch_channel_level_TM7(int **dl_bf_ch_estimates_ext,
                              int *avg,
                              uint8_t symbol,
                              unsigned short nb_rb) {
-#if defined(__x86_64__)||defined(__i386__)
   short rb;
   unsigned char aatx,aarx,nre=12,symbol_mod;
-  __m128i *dl_ch128,avg128D;
+  simde__m128i *dl_ch128,avg128D;
   symbol_mod = (symbol>=(7-frame_parms->Ncp)) ? symbol-(7-frame_parms->Ncp) : symbol;
 
   for (aatx=0; aatx<frame_parms->nb_antenna_ports_eNB; aatx++)
     for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
       //clear average level
-      avg128D = _mm_setzero_si128();
+      avg128D = simde_mm_setzero_si128();
       // 5 is always a symbol with no pilots for both normal and extended prefix
-      dl_ch128=(__m128i *)&dl_bf_ch_estimates_ext[(aatx<<1)+aarx][symbol*frame_parms->N_RB_DL*12];
+      dl_ch128=(simde__m128i *)&dl_bf_ch_estimates_ext[(aatx<<1)+aarx][symbol*frame_parms->N_RB_DL*12];
 
       for (rb=0; rb<nb_rb; rb++) {
         //  printf("rb %d : ",rb);
         //  print_shorts("ch",&dl_ch128[0]);
-        avg128D = _mm_add_epi32(avg128D,_mm_madd_epi16(dl_ch128[0],dl_ch128[0]));
-        avg128D = _mm_add_epi32(avg128D,_mm_madd_epi16(dl_ch128[1],dl_ch128[1]));
+        avg128D = simde_mm_add_epi32(avg128D,simde_mm_madd_epi16(dl_ch128[0],dl_ch128[0]));
+        avg128D = simde_mm_add_epi32(avg128D,simde_mm_madd_epi16(dl_ch128[1],dl_ch128[1]));
 
         if (((symbol_mod == 0) || (symbol_mod == (frame_parms->Ncp-1)))&&(frame_parms->nb_antenna_ports_eNB!=1)) {
           dl_ch128+=2;
         } else {
-          avg128D = _mm_add_epi32(avg128D,_mm_madd_epi16(dl_ch128[2],dl_ch128[2]));
+          avg128D = simde_mm_add_epi32(avg128D,simde_mm_madd_epi16(dl_ch128[2],dl_ch128[2]));
           dl_ch128+=3;
         }
 
@@ -4530,10 +3684,8 @@ void dlsch_channel_level_TM7(int **dl_bf_ch_estimates_ext,
       //            printf("Channel level : %d\n",avg[(aatx<<1)+aarx]);
     }
 
-  _mm_empty();
-  _m_empty();
-#elif defined(__arm__) || defined(__aarch64__)
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 //#define ONE_OVER_2_Q15 16384
 void dlsch_alamouti(LTE_DL_FRAME_PARMS *frame_parms,
@@ -4542,21 +3694,20 @@ void dlsch_alamouti(LTE_DL_FRAME_PARMS *frame_parms,
                     int **dl_ch_magb,
                     unsigned char symbol,
                     unsigned short nb_rb) {
-#if defined(__x86_64__)||defined(__i386__)
   short *rxF0,*rxF1;
-  __m128i *ch_mag0,*ch_mag1,*ch_mag0b,*ch_mag1b;
+  simde__m128i *ch_mag0,*ch_mag1,*ch_mag0b,*ch_mag1b;
   unsigned char rb,re;
   int jj = (symbol*frame_parms->N_RB_DL*12);
   uint8_t symbol_mod = (symbol>=(7-frame_parms->Ncp)) ? symbol-(7-frame_parms->Ncp) : symbol;
   uint8_t pilots = ((symbol_mod==0)||(symbol_mod==(4-frame_parms->Ncp))) ? 1 : 0;
-  //amp = _mm_set1_epi16(ONE_OVER_2_Q15);
-  //    printf("Doing alamouti!\n");
+  // amp = simde_mm_set1_epi16(ONE_OVER_2_Q15);
+  //     printf("Doing alamouti!\n");
   rxF0     = (short *)&rxdataF_comp[0][jj]; //tx antenna 0  h0*y
   rxF1     = (short *)&rxdataF_comp[2][jj]; //tx antenna 1  h1*y
-  ch_mag0 = (__m128i *)&dl_ch_mag[0][jj];
-  ch_mag1 = (__m128i *)&dl_ch_mag[2][jj];
-  ch_mag0b = (__m128i *)&dl_ch_magb[0][jj];
-  ch_mag1b = (__m128i *)&dl_ch_magb[2][jj];
+  ch_mag0 = (simde__m128i *)&dl_ch_mag[0][jj];
+  ch_mag1 = (simde__m128i *)&dl_ch_mag[2][jj];
+  ch_mag0b = (simde__m128i *)&dl_ch_magb[0][jj];
+  ch_mag1b = (simde__m128i *)&dl_ch_magb[2][jj];
 
   for (rb=0; rb<nb_rb; rb++) {
     for (re=0; re<((pilots==0)?12:8); re+=2) {
@@ -4572,22 +3723,24 @@ void dlsch_alamouti(LTE_DL_FRAME_PARMS *frame_parms,
     }
 
     // compute levels for 16QAM or 64 QAM llr unit
-    ch_mag0[0] = _mm_adds_epi16(ch_mag0[0],ch_mag1[0]);
-    ch_mag0[1] = _mm_adds_epi16(ch_mag0[1],ch_mag1[1]);
-    ch_mag0b[0] = _mm_adds_epi16(ch_mag0b[0],ch_mag1b[0]);
-    ch_mag0b[1] = _mm_adds_epi16(ch_mag0b[1],ch_mag1b[1]);
+    ch_mag0[0] = simde_mm_adds_epi16(ch_mag0[0],ch_mag1[0]);
+    ch_mag0[1] = simde_mm_adds_epi16(ch_mag0[1],ch_mag1[1]);
+    ch_mag0b[0] = simde_mm_adds_epi16(ch_mag0b[0],ch_mag1b[0]);
+    ch_mag0b[1] = simde_mm_adds_epi16(ch_mag0b[1],ch_mag1b[1]);
 
     // account for 1/sqrt(2) scaling at transmission
-    //ch_mag0[0] = _mm_srai_epi16(ch_mag0[0],1);
-    //ch_mag0[1] = _mm_srai_epi16(ch_mag0[1],1);
-    //ch_mag0b[0] = _mm_srai_epi16(ch_mag0b[0],1);
-    //ch_mag0b[1] = _mm_srai_epi16(ch_mag0b[1],1);
+    //ch_mag0[0] = simde_mm_srai_epi16(ch_mag0[0],1);
+    //ch_mag0[1] = simde_mm_srai_epi16(ch_mag0[1],1);
+    //ch_mag0b[0] = simde_mm_srai_epi16(ch_mag0b[0],1);
+    //ch_mag0b[1] = simde_mm_srai_epi16(ch_mag0b[1],1);
+
+
 
     if (pilots==0) {
-      ch_mag0[2] = _mm_adds_epi16(ch_mag0[2],ch_mag1[2]);
-      ch_mag0b[2] = _mm_adds_epi16(ch_mag0b[2],ch_mag1b[2]);
-      //ch_mag0[2] = _mm_srai_epi16(ch_mag0[2],1);
-      //ch_mag0b[2] = _mm_srai_epi16(ch_mag0b[2],1);
+      ch_mag0[2] = simde_mm_adds_epi16(ch_mag0[2],ch_mag1[2]);
+      ch_mag0b[2] = simde_mm_adds_epi16(ch_mag0b[2],ch_mag1b[2]);
+      //ch_mag0[2] = simde_mm_srai_epi16(ch_mag0[2],1);
+      //ch_mag0b[2] = simde_mm_srai_epi16(ch_mag0b[2],1);
       ch_mag0+=3;
       ch_mag1+=3;
       ch_mag0b+=3;
@@ -4600,13 +3753,10 @@ void dlsch_alamouti(LTE_DL_FRAME_PARMS *frame_parms,
     }
   }
 
-  _mm_empty();
-  _m_empty();
-#elif defined(__arm__) || defined(__aarch64__)
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
-
 //==============================================================================================
 // Extraction functions
 //==============================================================================================
@@ -6436,9 +5586,9 @@ unsigned short dlsch_extract_rbs_TM7(int **rxdataF,
       }
     }
   }
-
-  _mm_empty();
-  _m_empty();
+  
+  simde_mm_empty();
+  simde_m_empty();
   return(nb_rb/frame_parms->nb_antennas_rx);
 }
 
diff --git a/openair1/PHY/LTE_UE_TRANSPORT/dlsch_llr_computation.c b/openair1/PHY/LTE_UE_TRANSPORT/dlsch_llr_computation.c
index c5478a634095cbb0b011656f883d6838cfcd10a5..383218def9ac3715fffb173ae2f83c2d2b1c2d2b 100644
--- a/openair1/PHY/LTE_UE_TRANSPORT/dlsch_llr_computation.c
+++ b/openair1/PHY/LTE_UE_TRANSPORT/dlsch_llr_computation.c
@@ -42,78 +42,73 @@
 
 const int16_t zeros[8] __attribute__((aligned(16))) = {0, 0, 0, 0, 0, 0, 0, 0};
 const int16_t ones[8] __attribute__((aligned(16))) = {0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff};
-#if defined(__x86_64__) || defined(__i386__)
 //==============================================================================================
 // Auxiliary Makros
 
 // calculates psi_a = psi_r*a_r + psi_i*a_i
 #define prodsum_psi_a_epi16(psi_r, a_r, psi_i, a_i, psi_a) \
-  tmp_result = _mm_mulhi_epi16(psi_r, a_r);                \
-  tmp_result = _mm_slli_epi16(tmp_result, 1);              \
-  tmp_result2 = _mm_mulhi_epi16(psi_i, a_i);               \
-  tmp_result2 = _mm_slli_epi16(tmp_result2, 1);            \
-  simde__m128i psi_a = _mm_adds_epi16(tmp_result, tmp_result2);
+  tmp_result = simde_mm_mulhi_epi16(psi_r, a_r);           \
+  tmp_result = simde_mm_slli_epi16(tmp_result, 1);         \
+  tmp_result2 = simde_mm_mulhi_epi16(psi_i, a_i);          \
+  tmp_result2 = simde_mm_slli_epi16(tmp_result2, 1);       \
+  simde__m128i psi_a = simde_mm_adds_epi16(tmp_result, tmp_result2);
 
 // calculate interference magnitude
-#define interference_abs_epi16(psi, int_ch_mag, int_mag, c1, c2)   \
-  tmp_result = _mm_cmplt_epi16(psi, int_ch_mag);                   \
-  tmp_result2 = _mm_xor_si128(tmp_result, (*(__m128i *)&ones[0])); \
-  tmp_result = _mm_and_si128(tmp_result, c1);                      \
-  tmp_result2 = _mm_and_si128(tmp_result2, c2);                    \
-  simde__m128i int_mag = _mm_or_si128(tmp_result, tmp_result2);
+#define interference_abs_epi16(psi, int_ch_mag, int_mag, c1, c2)             \
+  tmp_result = simde_mm_cmplt_epi16(psi, int_ch_mag);                        \
+  tmp_result2 = simde_mm_xor_si128(tmp_result, (*(simde__m128i *)&ones[0])); \
+  tmp_result = simde_mm_and_si128(tmp_result, c1);                           \
+  tmp_result2 = simde_mm_and_si128(tmp_result2, c2);                         \
+  simde__m128i int_mag = simde_mm_or_si128(tmp_result, tmp_result2);
 
 // calculate interference magnitude
 // tmp_result = ones in shorts corr. to interval 2<=x<=4, tmp_result2 interval < 2, tmp_result3 interval 4<x<6 and tmp_result4 interval x>6
 #define interference_abs_64qam_epi16(psi, int_ch_mag, int_two_ch_mag, int_three_ch_mag, a, c1, c3, c5, c7) \
-  tmp_result = _mm_cmplt_epi16(psi, int_two_ch_mag);                                                       \
-  tmp_result3 = _mm_xor_si128(tmp_result, (*(__m128i *)&ones[0]));                                         \
-  tmp_result2 = _mm_cmplt_epi16(psi, int_ch_mag);                                                          \
-  tmp_result = _mm_xor_si128(tmp_result, tmp_result2);                                                     \
-  tmp_result4 = _mm_cmpgt_epi16(psi, int_three_ch_mag);                                                    \
-  tmp_result3 = _mm_xor_si128(tmp_result3, tmp_result4);                                                   \
-  tmp_result = _mm_and_si128(tmp_result, c3);                                                              \
-  tmp_result2 = _mm_and_si128(tmp_result2, c1);                                                            \
-  tmp_result3 = _mm_and_si128(tmp_result3, c5);                                                            \
-  tmp_result4 = _mm_and_si128(tmp_result4, c7);                                                            \
-  tmp_result = _mm_or_si128(tmp_result, tmp_result2);                                                      \
-  tmp_result3 = _mm_or_si128(tmp_result3, tmp_result4);                                                    \
-  simde__m128i a = _mm_or_si128(tmp_result, tmp_result3);
+  tmp_result = simde_mm_cmplt_epi16(psi, int_two_ch_mag);                                                  \
+  tmp_result3 = simde_mm_xor_si128(tmp_result, (*(simde__m128i *)&ones[0]));                               \
+  tmp_result2 = simde_mm_cmplt_epi16(psi, int_ch_mag);                                                     \
+  tmp_result = simde_mm_xor_si128(tmp_result, tmp_result2);                                                \
+  tmp_result4 = simde_mm_cmpgt_epi16(psi, int_three_ch_mag);                                               \
+  tmp_result3 = simde_mm_xor_si128(tmp_result3, tmp_result4);                                              \
+  tmp_result = simde_mm_and_si128(tmp_result, c3);                                                         \
+  tmp_result2 = simde_mm_and_si128(tmp_result2, c1);                                                       \
+  tmp_result3 = simde_mm_and_si128(tmp_result3, c5);                                                       \
+  tmp_result4 = simde_mm_and_si128(tmp_result4, c7);                                                       \
+  tmp_result = simde_mm_or_si128(tmp_result, tmp_result2);                                                 \
+  tmp_result3 = simde_mm_or_si128(tmp_result3, tmp_result4);                                               \
+  simde__m128i a = simde_mm_or_si128(tmp_result, tmp_result3);
 
 // calculates a_sq = int_ch_mag*(a_r^2 + a_i^2)*scale_factor
 #define square_a_epi16(a_r, a_i, int_ch_mag, scale_factor, a_sq) \
-  tmp_result = _mm_mulhi_epi16(a_r, a_r);                        \
-  tmp_result = _mm_slli_epi16(tmp_result, 1);                    \
-  tmp_result = _mm_mulhi_epi16(tmp_result, scale_factor);        \
-  tmp_result = _mm_slli_epi16(tmp_result, 1);                    \
-  tmp_result = _mm_mulhi_epi16(tmp_result, int_ch_mag);          \
-  tmp_result = _mm_slli_epi16(tmp_result, 1);                    \
-  tmp_result2 = _mm_mulhi_epi16(a_i, a_i);                       \
-  tmp_result2 = _mm_slli_epi16(tmp_result2, 1);                  \
-  tmp_result2 = _mm_mulhi_epi16(tmp_result2, scale_factor);      \
-  tmp_result2 = _mm_slli_epi16(tmp_result2, 1);                  \
-  tmp_result2 = _mm_mulhi_epi16(tmp_result2, int_ch_mag);        \
-  tmp_result2 = _mm_slli_epi16(tmp_result2, 1);                  \
-  simde__m128i a_sq = _mm_adds_epi16(tmp_result, tmp_result2);
+  tmp_result = simde_mm_mulhi_epi16(a_r, a_r);                   \
+  tmp_result = simde_mm_slli_epi16(tmp_result, 1);               \
+  tmp_result = simde_mm_mulhi_epi16(tmp_result, scale_factor);   \
+  tmp_result = simde_mm_slli_epi16(tmp_result, 1);               \
+  tmp_result = simde_mm_mulhi_epi16(tmp_result, int_ch_mag);     \
+  tmp_result = simde_mm_slli_epi16(tmp_result, 1);               \
+  tmp_result2 = simde_mm_mulhi_epi16(a_i, a_i);                  \
+  tmp_result2 = simde_mm_slli_epi16(tmp_result2, 1);             \
+  tmp_result2 = simde_mm_mulhi_epi16(tmp_result2, scale_factor); \
+  tmp_result2 = simde_mm_slli_epi16(tmp_result2, 1);             \
+  tmp_result2 = simde_mm_mulhi_epi16(tmp_result2, int_ch_mag);   \
+  tmp_result2 = simde_mm_slli_epi16(tmp_result2, 1);             \
+  simde__m128i a_sq = simde_mm_adds_epi16(tmp_result, tmp_result2);
 
 // calculates a_sq = int_ch_mag*(a_r^2 + a_i^2)*scale_factor for 64-QAM
 #define square_a_64qam_epi16(a_r, a_i, int_ch_mag, scale_factor, a_sq) \
-  tmp_result = _mm_mulhi_epi16(a_r, a_r);                              \
-  tmp_result = _mm_slli_epi16(tmp_result, 1);                          \
-  tmp_result = _mm_mulhi_epi16(tmp_result, scale_factor);              \
-  tmp_result = _mm_slli_epi16(tmp_result, 3);                          \
-  tmp_result = _mm_mulhi_epi16(tmp_result, int_ch_mag);                \
-  tmp_result = _mm_slli_epi16(tmp_result, 1);                          \
-  tmp_result2 = _mm_mulhi_epi16(a_i, a_i);                             \
-  tmp_result2 = _mm_slli_epi16(tmp_result2, 1);                        \
-  tmp_result2 = _mm_mulhi_epi16(tmp_result2, scale_factor);            \
-  tmp_result2 = _mm_slli_epi16(tmp_result2, 3);                        \
-  tmp_result2 = _mm_mulhi_epi16(tmp_result2, int_ch_mag);              \
-  tmp_result2 = _mm_slli_epi16(tmp_result2, 1);                        \
-  simde__m128i a_sq = _mm_adds_epi16(tmp_result, tmp_result2);
-
-#elif defined(__arm__) || defined(__aarch64__)
-
-#endif
+  tmp_result = simde_mm_mulhi_epi16(a_r, a_r);                         \
+  tmp_result = simde_mm_slli_epi16(tmp_result, 1);                     \
+  tmp_result = simde_mm_mulhi_epi16(tmp_result, scale_factor);         \
+  tmp_result = simde_mm_slli_epi16(tmp_result, 3);                     \
+  tmp_result = simde_mm_mulhi_epi16(tmp_result, int_ch_mag);           \
+  tmp_result = simde_mm_slli_epi16(tmp_result, 1);                     \
+  tmp_result2 = simde_mm_mulhi_epi16(a_i, a_i);                        \
+  tmp_result2 = simde_mm_slli_epi16(tmp_result2, 1);                   \
+  tmp_result2 = simde_mm_mulhi_epi16(tmp_result2, scale_factor);       \
+  tmp_result2 = simde_mm_slli_epi16(tmp_result2, 3);                   \
+  tmp_result2 = simde_mm_mulhi_epi16(tmp_result2, int_ch_mag);         \
+  tmp_result2 = simde_mm_slli_epi16(tmp_result2, 1);                   \
+  simde__m128i a_sq = simde_mm_adds_epi16(tmp_result, tmp_result2);
 
 //==============================================================================================
 // SINGLE-STREAM
@@ -374,70 +369,30 @@ void qam16_llr(int16_t *stream0_in,
                int length)
 {
   int i;
-  #if defined(__x86_64__) || defined(__i386__)
-  __m128i *rxF_128 = (__m128i*)stream0_in;
-  __m128i *ch_mag_128 = (__m128i*)chan_magn;
-  __m128i llr128[2];
+  simde__m128i *rxF_128 = (simde__m128i*)stream0_in;
+  simde__m128i *ch_mag_128 = (simde__m128i*)chan_magn;
+  simde__m128i llr128[2];
   int32_t *llr32 = (int32_t*) llr;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *rxF_128 = (int16x8_t*)stream0_in;
-  int16x8_t *ch_mag_128 = (int16x8_t*)chan_magn;
-  int16x8_t xmm0;
-  int16_t *llr16 = (int16_t*)llr;
-#endif
 
  // printf ("This line in qam16_llr is %d.\n", __LINE__);
 
   for (i=0; i<length; i++) {
-#if defined(__x86_64__) || defined(__i386)
-    simde__m128i xmm0 = _mm_abs_epi16(rxF_128[i]);
-    xmm0 = _mm_subs_epi16(ch_mag_128[i], xmm0);
+    simde__m128i xmm0 = simde_mm_abs_epi16(rxF_128[i]);
+    xmm0 = simde_mm_subs_epi16(ch_mag_128[i], xmm0);
 
     // lambda_1=y_R, lambda_2=|y_R|-|h|^2, lamda_3=y_I, lambda_4=|y_I|-|h|^2
-    llr128[0] = _mm_unpacklo_epi32(rxF_128[i],xmm0);
-    llr128[1] = _mm_unpackhi_epi32(rxF_128[i],xmm0);
-    llr32[0] = _mm_extract_epi32(llr128[0],0); //((uint32_t *)&llr128[0])[0];
-    llr32[1] = _mm_extract_epi32(llr128[0],1); //((uint32_t *)&llr128[0])[0];
-    llr32[2] = _mm_extract_epi32(llr128[0],2); //((uint32_t *)&llr128[0])[2];
-    llr32[3] = _mm_extract_epi32(llr128[0],3); //((uint32_t *)&llr128[0])[3];
-    llr32[4] = _mm_extract_epi32(llr128[1],0); //((uint32_t *)&llr128[1])[0];
-    llr32[5] = _mm_extract_epi32(llr128[1],1); //((uint32_t *)&llr128[1])[1];
-    llr32[6] = _mm_extract_epi32(llr128[1],2); //((uint32_t *)&llr128[1])[2];
-    llr32[7] = _mm_extract_epi32(llr128[1],3); //((uint32_t *)&llr128[1])[3];
+    llr128[0] = simde_mm_unpacklo_epi32(rxF_128[i],xmm0);
+    llr128[1] = simde_mm_unpackhi_epi32(rxF_128[i],xmm0);
+    llr32[0] = simde_mm_extract_epi32(llr128[0],0); //((uint32_t *)&llr128[0])[0];
+    llr32[1] = simde_mm_extract_epi32(llr128[0],1); //((uint32_t *)&llr128[0])[0];
+    llr32[2] = simde_mm_extract_epi32(llr128[0],2); //((uint32_t *)&llr128[0])[2];
+    llr32[3] = simde_mm_extract_epi32(llr128[0],3); //((uint32_t *)&llr128[0])[3];
+    llr32[4] = simde_mm_extract_epi32(llr128[1],0); //((uint32_t *)&llr128[1])[0];
+    llr32[5] = simde_mm_extract_epi32(llr128[1],1); //((uint32_t *)&llr128[1])[1];
+    llr32[6] = simde_mm_extract_epi32(llr128[1],2); //((uint32_t *)&llr128[1])[2];
+    llr32[7] = simde_mm_extract_epi32(llr128[1],3); //((uint32_t *)&llr128[1])[3];
     llr32+=8;
-#elif defined(__arm__) || defined(__aarch64__)
-    xmm0 = vabsq_s16(rxF[i]);
-    xmm0 = vqsubq_s16(ch_mag[i],xmm0);
-    // lambda_1=y_R, lambda_2=|y_R|-|h|^2, lamda_3=y_I, lambda_4=|y_I|-|h|^2
-
-    llr16[0] = vgetq_lane_s16(rxF[i],0);
-    llr16[1] = vgetq_lane_s16(rxF[i],1);
-    llr16[2] = vgetq_lane_s16(xmm0,0);
-    llr16[3] = vgetq_lane_s16(xmm0,1);
-    llr16[4] = vgetq_lane_s16(rxF[i],2);
-    llr16[5] = vgetq_lane_s16(rxF[i],3);
-    llr16[6] = vgetq_lane_s16(xmm0,2);
-    llr16[7] = vgetq_lane_s16(xmm0,3);
-    llr16[8] = vgetq_lane_s16(rxF[i],4);
-    llr16[9] = vgetq_lane_s16(rxF[i],5);
-    llr16[10] = vgetq_lane_s16(xmm0,4);
-    llr16[11] = vgetq_lane_s16(xmm0,5);
-    llr16[12] = vgetq_lane_s16(rxF[i],6);
-    llr16[13] = vgetq_lane_s16(rxF[i],6);
-    llr16[14] = vgetq_lane_s16(xmm0,7);
-    llr16[15] = vgetq_lane_s16(xmm0,7);
-    llr16+=16;
-
-#endif
-
   }
-
-#if defined(__x86_64__) || defined(__i386)
-  _mm_empty();
-  _m_empty();
-#endif
-
-
 }
 
 void dlsch_16qam_llr_SIC (LTE_DL_FRAME_PARMS *frame_parms,
@@ -462,14 +417,14 @@ void dlsch_16qam_llr_SIC (LTE_DL_FRAME_PARMS *frame_parms,
   uint16_t *sic_data;
   uint16_t pbch_pss_sss_adjust;
   unsigned char len_mod4=0;
-  __m128i llr128[2];
-  __m128i *ch_mag;
+  simde__m128i llr128[2];
+  simde__m128i *ch_mag;
   nsymb = (frame_parms->Ncp==0) ? 14:12;
 
-    for (symbol=num_pdcch_symbols; symbol<nsymb; symbol++) {
+  for (symbol=num_pdcch_symbols; symbol<nsymb; symbol++) {
     uint16_t *rxF = (uint16_t*)(&rxdataF_comp[0][((int16_t)symbol*frame_parms->N_RB_DL*12)]);
     int16_t *rho_1=(int16_t*)(&rho_i[0][((int16_t)symbol*frame_parms->N_RB_DL*12)]);
-    ch_mag = (__m128i*)(&dl_ch_mag[0][((int16_t)symbol*frame_parms->N_RB_DL*12)]);
+    ch_mag = (simde__m128i*)(&dl_ch_mag[0][((int16_t)symbol*frame_parms->N_RB_DL*12)]);
     sic_data = (uint16_t*)(&sic_buffer[0][((int16_t)len_acc)]);
 
     symbol_mod = (symbol>=(7-frame_parms->Ncp)) ? symbol-(7-frame_parms->Ncp) : symbol;
@@ -517,31 +472,29 @@ void dlsch_16qam_llr_SIC (LTE_DL_FRAME_PARMS *frame_parms,
     for (i=0; i<len; i++) {
 
 
-    __m128i *x1 = (__m128i*)rxF;//clean_x1;
+      simde__m128i *x1 = (simde__m128i*)rxF;//clean_x1;
 //printf("%p %p %p\n", clean_x1, &clean_x1, &clean_x1[0]);
 //int *a = malloc(10*sizeof(int));
 //printf("%p %p\n", a, &a);
 //exit(0);
-    simde__m128i xmm0 = _mm_abs_epi16(x1[i]);
-    xmm0 = _mm_subs_epi16(ch_mag[i],xmm0);
+      simde__m128i xmm0 = simde_mm_abs_epi16(x1[i]);
+      xmm0 = simde_mm_subs_epi16(ch_mag[i],xmm0);
 
     // lambda_1=y_R, lambda_2=|y_R|-|h|^2, lamda_3=y_I, lambda_4=|y_I|-|h|^2
-    llr128[0] = _mm_unpacklo_epi32(x1[i],xmm0);
-    llr128[1] = _mm_unpackhi_epi32(x1[i],xmm0);
-    llr32[0] = _mm_extract_epi32(llr128[0],0); //((uint32_t *)&llr128[0])[0];
-    llr32[1] = _mm_extract_epi32(llr128[0],1); //((uint32_t *)&llr128[0])[1];
-    llr32[2] = _mm_extract_epi32(llr128[0],2); //((uint32_t *)&llr128[0])[2];
-    llr32[3] = _mm_extract_epi32(llr128[0],3); //((uint32_t *)&llr128[0])[3];
-    llr32[4] = _mm_extract_epi32(llr128[1],0); //((uint32_t *)&llr128[1])[0];
-    llr32[5] = _mm_extract_epi32(llr128[1],1); //((uint32_t *)&llr128[1])[1];
-    llr32[6] = _mm_extract_epi32(llr128[1],2); //((uint32_t *)&llr128[1])[2];
-    llr32[7] = _mm_extract_epi32(llr128[1],3); //((uint32_t *)&llr128[1])[3];
-    llr32+=8;
+      llr128[0] = simde_mm_unpacklo_epi32(x1[i],xmm0);
+      llr128[1] = simde_mm_unpackhi_epi32(x1[i],xmm0);
+      llr32[0] = simde_mm_extract_epi32(llr128[0],0); //((uint32_t *)&llr128[0])[0];
+      llr32[1] = simde_mm_extract_epi32(llr128[0],1); //((uint32_t *)&llr128[0])[1];
+      llr32[2] = simde_mm_extract_epi32(llr128[0],2); //((uint32_t *)&llr128[0])[2];
+      llr32[3] = simde_mm_extract_epi32(llr128[0],3); //((uint32_t *)&llr128[0])[3];
+      llr32[4] = simde_mm_extract_epi32(llr128[1],0); //((uint32_t *)&llr128[1])[0];
+      llr32[5] = simde_mm_extract_epi32(llr128[1],1); //((uint32_t *)&llr128[1])[1];
+      llr32[6] = simde_mm_extract_epi32(llr128[1],2); //((uint32_t *)&llr128[1])[2];
+      llr32[7] = simde_mm_extract_epi32(llr128[1],3); //((uint32_t *)&llr128[1])[3];
+      llr32+=8;
 
+    }
   }
-  _mm_empty();
-  _m_empty();
-}
 }
 
 
@@ -615,35 +568,18 @@ void qam64_llr(int16_t *stream0_in,
                int length)
 {
 
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *rxF_128 = (__m128i*)stream0_in;
-  __m128i *ch_mag_128 = (__m128i*)chan_magn;
-  __m128i *ch_magb_128 = (__m128i*)chan_magn_b;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *rxF_128 = (int16x8_t*)stream0_in;
-  int16x8_t *ch_mag_128 = (int16x8_t*)chan_magn;
-  int16x8_t *ch_magb_128 = (int16x8_t*)chan_magn_b;
-  int16x8_t xmm1,xmm2;
-#endif
-
-
+  simde__m128i *rxF_128 = (simde__m128i*)stream0_in;
+  simde__m128i *ch_mag_128 = (simde__m128i*)chan_magn;
+  simde__m128i *ch_magb_128 = (simde__m128i*)chan_magn_b;
   int i;
   //int16_t *llr2;
   //llr2 = llr;
 
   for (i=0; i<length; i++) {
-
-#if defined(__x86_64__) || defined(__i386__)
-    simde__m128i xmm1 = _mm_abs_epi16(rxF_128[i]);
-    xmm1 = _mm_subs_epi16(ch_mag_128[i],xmm1);
-    simde__m128i xmm2 = _mm_abs_epi16(xmm1);
-    xmm2 = _mm_subs_epi16(ch_magb_128[i],xmm2);
-#elif defined(__arm__) || defined(__aarch64__)
-    xmm1 = vabsq_s16(rxF_128[i]);
-    xmm1 = vsubq_s16(ch_mag_128[i],xmm1);
-    xmm2 = vabsq_s16(xmm1);
-    xmm2 = vsubq_s16(ch_magb_128[i],xmm2);
-#endif
+    simde__m128i xmm1 = simde_mm_abs_epi16(rxF_128[i]);
+    xmm1 = simde_mm_subs_epi16(ch_mag_128[i],xmm1);
+    simde__m128i xmm2 = simde_mm_abs_epi16(xmm1);
+    xmm2 = simde_mm_subs_epi16(ch_magb_128[i],xmm2);
     // loop over all LLRs in quad word (24 coded bits)
     /*
       for (j=0;j<8;j+=2) {
@@ -659,69 +595,33 @@ void qam64_llr(int16_t *stream0_in,
     */
     llr[0] = ((short *)&rxF_128[i])[0];
     llr[1] = ((short *)&rxF_128[i])[1];
-#if defined(__x86_64__) || defined(__i386__)
-    llr[2] = _mm_extract_epi16(xmm1,0);
-    llr[3] = _mm_extract_epi16(xmm1,1);//((short *)&xmm1)[j+1];
-    llr[4] = _mm_extract_epi16(xmm2,0);//((short *)&xmm2)[j];
-    llr[5] = _mm_extract_epi16(xmm2,1);//((short *)&xmm2)[j+1];
-#elif defined(__arm__) || defined(__aarch64__)
-    llr[2] = vgetq_lane_s16(xmm1,0);
-    llr[3] = vgetq_lane_s16(xmm1,1);//((short *)&xmm1)[j+1];
-    llr[4] = vgetq_lane_s16(xmm2,0);//((short *)&xmm2)[j];
-    llr[5] = vgetq_lane_s16(xmm2,1);//((short *)&xmm2)[j+1];
-#endif
-
+    llr[2] = simde_mm_extract_epi16(xmm1,0);
+    llr[3] = simde_mm_extract_epi16(xmm1,1);//((short *)&xmm1)[j+1];
+    llr[4] = simde_mm_extract_epi16(xmm2,0);//((short *)&xmm2)[j];
+    llr[5] = simde_mm_extract_epi16(xmm2,1);//((short *)&xmm2)[j+1];
     llr+=6;
     llr[0] = ((short *)&rxF_128[i])[2];
     llr[1] = ((short *)&rxF_128[i])[3];
-#if defined(__x86_64__) || defined(__i386__)
-    llr[2] = _mm_extract_epi16(xmm1,2);
-    llr[3] = _mm_extract_epi16(xmm1,3);//((short *)&xmm1)[j+1];
-    llr[4] = _mm_extract_epi16(xmm2,2);//((short *)&xmm2)[j];
-    llr[5] = _mm_extract_epi16(xmm2,3);//((short *)&xmm2)[j+1];
-#elif defined(__arm__) || defined(__aarch64__)
-    llr[2] = vgetq_lane_s16(xmm1,2);
-    llr[3] = vgetq_lane_s16(xmm1,3);//((short *)&xmm1)[j+1];
-    llr[4] = vgetq_lane_s16(xmm2,2);//((short *)&xmm2)[j];
-    llr[5] = vgetq_lane_s16(xmm2,3);//((short *)&xmm2)[j+1];
-#endif
-
+    llr[2] = simde_mm_extract_epi16(xmm1,2);
+    llr[3] = simde_mm_extract_epi16(xmm1,3);//((short *)&xmm1)[j+1];
+    llr[4] = simde_mm_extract_epi16(xmm2,2);//((short *)&xmm2)[j];
+    llr[5] = simde_mm_extract_epi16(xmm2,3);//((short *)&xmm2)[j+1];
     llr+=6;
     llr[0] = ((short *)&rxF_128[i])[4];
     llr[1] = ((short *)&rxF_128[i])[5];
-#if defined(__x86_64__) || defined(__i386__)
-    llr[2] = _mm_extract_epi16(xmm1,4);
-    llr[3] = _mm_extract_epi16(xmm1,5);//((short *)&xmm1)[j+1];
-    llr[4] = _mm_extract_epi16(xmm2,4);//((short *)&xmm2)[j];
-    llr[5] = _mm_extract_epi16(xmm2,5);//((short *)&xmm2)[j+1];
-#elif defined(__arm__) || defined(__aarch64__)
-    llr[2] = vgetq_lane_s16(xmm1,4);
-    llr[3] = vgetq_lane_s16(xmm1,5);//((short *)&xmm1)[j+1];
-    llr[4] = vgetq_lane_s16(xmm2,4);//((short *)&xmm2)[j];
-    llr[5] = vgetq_lane_s16(xmm2,5);//((short *)&xmm2)[j+1];
-#endif
+    llr[2] = simde_mm_extract_epi16(xmm1,4);
+    llr[3] = simde_mm_extract_epi16(xmm1,5);//((short *)&xmm1)[j+1];
+    llr[4] = simde_mm_extract_epi16(xmm2,4);//((short *)&xmm2)[j];
+    llr[5] = simde_mm_extract_epi16(xmm2,5);//((short *)&xmm2)[j+1];
     llr+=6;
     llr[0] = ((short *)&rxF_128[i])[6];
     llr[1] = ((short *)&rxF_128[i])[7];
-#if defined(__x86_64__) || defined(__i386__)
-    llr[2] = _mm_extract_epi16(xmm1,6);
-    llr[3] = _mm_extract_epi16(xmm1,7);//((short *)&xmm1)[j+1];
-    llr[4] = _mm_extract_epi16(xmm2,6);//((short *)&xmm2)[j];
-    llr[5] = _mm_extract_epi16(xmm2,7);//((short *)&xmm2)[j+1];
-#elif defined(__arm__) || defined(__aarch64__)
-    llr[2] = vgetq_lane_s16(xmm1,6);
-    llr[3] = vgetq_lane_s16(xmm1,7);//((short *)&xmm1)[j+1];
-    llr[4] = vgetq_lane_s16(xmm2,6);//((short *)&xmm2)[j];
-    llr[5] = vgetq_lane_s16(xmm2,7);//((short *)&xmm2)[j+1];
-#endif
-    llr+=6;
-
+    llr[2] = simde_mm_extract_epi16(xmm1,6);
+    llr[3] = simde_mm_extract_epi16(xmm1,7);//((short *)&xmm1)[j+1];
+    llr[4] = simde_mm_extract_epi16(xmm2,6);//((short *)&xmm2)[j];
+    llr[5] = simde_mm_extract_epi16(xmm2,7);//((short *)&xmm2)[j+1];
+    llr += 6;
   }
-
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
 }
 
 void dlsch_64qam_llr_SIC(LTE_DL_FRAME_PARMS *frame_parms,
@@ -748,15 +648,15 @@ void dlsch_64qam_llr_SIC(LTE_DL_FRAME_PARMS *frame_parms,
   uint16_t pbch_pss_sss_adjust;
   unsigned char len_mod4=0;
   uint16_t *llr2;
-  __m128i *ch_mag,*ch_magb;
+  simde__m128i *ch_mag,*ch_magb;
 
   nsymb = (frame_parms->Ncp==0) ? 14:12;
 
   for (symbol=num_pdcch_symbols; symbol<nsymb; symbol++) {
     uint16_t *rxF = (uint16_t*)(&rxdataF_comp[0][((int16_t)symbol*frame_parms->N_RB_DL*12)]);
     int16_t *rho_1=(int16_t*)(&rho_i[0][((int16_t)symbol*frame_parms->N_RB_DL*12)]);
-    ch_mag = (__m128i*)(&dl_ch_mag[0][((int16_t)symbol*frame_parms->N_RB_DL*12)]);
-    ch_magb = (__m128i*)(&dl_ch_magb[0][((int16_t)symbol*frame_parms->N_RB_DL*12)]);
+    ch_mag = (simde__m128i*)(&dl_ch_mag[0][((int16_t)symbol*frame_parms->N_RB_DL*12)]);
+    ch_magb = (simde__m128i*)(&dl_ch_magb[0][((int16_t)symbol*frame_parms->N_RB_DL*12)]);
     sic_data = (uint16_t*)(&sic_buffer[0][((int16_t)len_acc)]);
 
     symbol_mod = (symbol>=(7-frame_parms->Ncp)) ? symbol-(7-frame_parms->Ncp) : symbol;
@@ -808,11 +708,11 @@ void dlsch_64qam_llr_SIC(LTE_DL_FRAME_PARMS *frame_parms,
 
     for (i=0; i<len2; i++) {
 
-      __m128i *x1 = (__m128i*)rxF;
-      simde__m128i xmm1 = _mm_abs_epi16(x1[i]);
-      xmm1 = _mm_subs_epi16(ch_mag[i],xmm1);
-      simde__m128i xmm2 = _mm_abs_epi16(xmm1);
-      xmm2 = _mm_subs_epi16(ch_magb[i],xmm2);
+      simde__m128i *x1 = (simde__m128i*)rxF;
+      simde__m128i xmm1 = simde_mm_abs_epi16(x1[i]);
+      xmm1 = simde_mm_subs_epi16(ch_mag[i],xmm1);
+      simde__m128i xmm2 = simde_mm_abs_epi16(xmm1);
+      xmm2 = simde_mm_subs_epi16(ch_magb[i],xmm2);
 
       // loop over all LLRs in quad word (24 coded bits)
       /*
@@ -829,48 +729,41 @@ void dlsch_64qam_llr_SIC(LTE_DL_FRAME_PARMS *frame_parms,
       */
       llr2[0] = ((short *)&x1[i])[0];
       llr2[1] = ((short *)&x1[i])[1];
-      llr2[2] = _mm_extract_epi16(xmm1,0);
-      llr2[3] = _mm_extract_epi16(xmm1,1);//((short *)&xmm1)[j+1];
-      llr2[4] = _mm_extract_epi16(xmm2,0);//((short *)&xmm2)[j];
-      llr2[5] = _mm_extract_epi16(xmm2,1);//((short *)&xmm2)[j+1];
+      llr2[2] = simde_mm_extract_epi16(xmm1,0);
+      llr2[3] = simde_mm_extract_epi16(xmm1,1);//((short *)&xmm1)[j+1];
+      llr2[4] = simde_mm_extract_epi16(xmm2,0);//((short *)&xmm2)[j];
+      llr2[5] = simde_mm_extract_epi16(xmm2,1);//((short *)&xmm2)[j+1];
 
 
       llr2+=6;
       llr2[0] = ((short *)&x1[i])[2];
       llr2[1] = ((short *)&x1[i])[3];
 
-      llr2[2] = _mm_extract_epi16(xmm1,2);
-      llr2[3] = _mm_extract_epi16(xmm1,3);//((short *)&xmm1)[j+1];
-      llr2[4] = _mm_extract_epi16(xmm2,2);//((short *)&xmm2)[j];
-      llr2[5] = _mm_extract_epi16(xmm2,3);//((short *)&xmm2)[j+1];
+      llr2[2] = simde_mm_extract_epi16(xmm1,2);
+      llr2[3] = simde_mm_extract_epi16(xmm1,3);//((short *)&xmm1)[j+1];
+      llr2[4] = simde_mm_extract_epi16(xmm2,2);//((short *)&xmm2)[j];
+      llr2[5] = simde_mm_extract_epi16(xmm2,3);//((short *)&xmm2)[j+1];
 
       llr2+=6;
       llr2[0] = ((short *)&x1[i])[4];
       llr2[1] = ((short *)&x1[i])[5];
 
-      llr2[2] = _mm_extract_epi16(xmm1,4);
-      llr2[3] = _mm_extract_epi16(xmm1,5);//((short *)&xmm1)[j+1];
-      llr2[4] = _mm_extract_epi16(xmm2,4);//((short *)&xmm2)[j];
-      llr2[5] = _mm_extract_epi16(xmm2,5);//((short *)&xmm2)[j+1];
+      llr2[2] = simde_mm_extract_epi16(xmm1,4);
+      llr2[3] = simde_mm_extract_epi16(xmm1,5);//((short *)&xmm1)[j+1];
+      llr2[4] = simde_mm_extract_epi16(xmm2,4);//((short *)&xmm2)[j];
+      llr2[5] = simde_mm_extract_epi16(xmm2,5);//((short *)&xmm2)[j+1];
 
       llr2+=6;
       llr2[0] = ((short *)&x1[i])[6];
       llr2[1] = ((short *)&x1[i])[7];
 
-      llr2[2] = _mm_extract_epi16(xmm1,6);
-      llr2[3] = _mm_extract_epi16(xmm1,7);//((short *)&xmm1)[j+1];
-      llr2[4] = _mm_extract_epi16(xmm2,6);//((short *)&xmm2)[j];
-      llr2[5] = _mm_extract_epi16(xmm2,7);//((short *)&xmm2)[j+1];
-
-      llr2+=6;
+      llr2[2] = simde_mm_extract_epi16(xmm1,6);
+      llr2[3] = simde_mm_extract_epi16(xmm1,7);//((short *)&xmm1)[j+1];
+      llr2[4] = simde_mm_extract_epi16(xmm2,6);//((short *)&xmm2)[j];
+      llr2[5] = simde_mm_extract_epi16(xmm2,7);//((short *)&xmm2)[j+1];
 
+      llr2 += 6;
     }
-
- // *llr_save = llr;
-
-  _mm_empty();
-  _m_empty();
-
   }
 }
 //#endif
@@ -882,23 +775,6 @@ void dlsch_64qam_llr_SIC(LTE_DL_FRAME_PARMS *frame_parms,
 // QPSK
 //----------------------------------------------------------------------------------------------
 
-#if defined(__x86_64__) || defined(__i386)
-__m128i  y0r_over2 __attribute__ ((aligned(16)));
-__m128i  y0i_over2 __attribute__ ((aligned(16)));
-__m128i  y1r_over2 __attribute__ ((aligned(16)));
-__m128i  y1i_over2 __attribute__ ((aligned(16)));
-
-__m128i  A __attribute__ ((aligned(16)));
-__m128i  B __attribute__ ((aligned(16)));
-__m128i  C __attribute__ ((aligned(16)));
-__m128i  D __attribute__ ((aligned(16)));
-__m128i  E __attribute__ ((aligned(16)));
-__m128i  F __attribute__ ((aligned(16)));
-__m128i  G __attribute__ ((aligned(16)));
-__m128i  H __attribute__ ((aligned(16)));
-
-#endif
-
 int dlsch_qpsk_qpsk_llr(LTE_DL_FRAME_PARMS *frame_parms,
                         int **rxdataF_comp,
                         int **rxdataF_comp_i,
@@ -953,8 +829,6 @@ int dlsch_qpsk_qpsk_llr(LTE_DL_FRAME_PARMS *frame_parms,
   return(0);
 }
 
-//__m128i ONE_OVER_SQRT_8 __attribute__((aligned(16)));
-
 void qpsk_qpsk(short *stream0_in,
                short *stream1_in,
                short *stream0_out,
@@ -974,176 +848,148 @@ void qpsk_qpsk(short *stream0_in,
     length = number of resource elements
   */
 
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *rho01_128i = (__m128i *)rho01;
-  __m128i *stream0_128i_in = (__m128i *)stream0_in;
-  __m128i *stream1_128i_in = (__m128i *)stream1_in;
-  __m128i *stream0_128i_out = (__m128i *)stream0_out;
-  __m128i ONE_OVER_SQRT_8 = _mm_set1_epi16(23170); //round(2^16/sqrt(8))
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *rho01_128i = (int16x8_t *)rho01;
-  int16x8_t *stream0_128i_in = (int16x8_t *)stream0_in;
-  int16x8_t *stream1_128i_in = (int16x8_t *)stream1_in;
-  int16x8_t *stream0_128i_out = (int16x8_t *)stream0_out;
-  int16x8_t ONE_OVER_SQRT_8 = vdupq_n_s16(23170); //round(2^16/sqrt(8))
-#endif
+  simde__m128i *rho01_128i = (simde__m128i *)rho01;
+  simde__m128i *stream0_128i_in = (simde__m128i *)stream0_in;
+  simde__m128i *stream1_128i_in = (simde__m128i *)stream1_in;
+  simde__m128i *stream0_128i_out = (simde__m128i *)stream0_out;
+  simde__m128i ONE_OVER_SQRT_8 = simde_mm_set1_epi16(23170); //round(2^16/sqrt(8))
 
   int i;
 
 
   for (i=0; i<length>>2; i+=2) {
     // in each iteration, we take 8 complex samples
-#if defined(__x86_64__) || defined(__i386__)
     simde__m128i xmm0 = rho01_128i[i]; // 4 symbols
     simde__m128i xmm1 = rho01_128i[i + 1];
 
     // put (rho_r + rho_i)/2sqrt2 in rho_rpi
     // put (rho_r - rho_i)/2sqrt2 in rho_rmi
 
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflelo_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflehi_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shuffle_epi32(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflelo_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflehi_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shuffle_epi32(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    simde__m128i xmm2 = _mm_unpacklo_epi64(xmm0, xmm1); // Re(rho)
-    simde__m128i xmm3 = _mm_unpackhi_epi64(xmm0, xmm1); // Im(rho)
-    simde__m128i rho_rpi = _mm_adds_epi16(xmm2, xmm3); // rho = Re(rho) + Im(rho)
-    simde__m128i rho_rmi = _mm_subs_epi16(xmm2, xmm3); // rho* = Re(rho) - Im(rho)
+    simde__m128i xmm2 = simde_mm_unpacklo_epi64(xmm0, xmm1); // Re(rho)
+    simde__m128i xmm3 = simde_mm_unpackhi_epi64(xmm0, xmm1); // Im(rho)
+    simde__m128i rho_rpi = simde_mm_adds_epi16(xmm2, xmm3); // rho = Re(rho) + Im(rho)
+    simde__m128i rho_rmi = simde_mm_subs_epi16(xmm2, xmm3); // rho* = Re(rho) - Im(rho)
 
     // divide by sqrt(8), no shift needed ONE_OVER_SQRT_8 = Q1.16
-    rho_rpi = _mm_mulhi_epi16(rho_rpi,ONE_OVER_SQRT_8);
-    rho_rmi = _mm_mulhi_epi16(rho_rmi,ONE_OVER_SQRT_8);
-#elif defined(__arm__) || defined(__aarch64__)
-
-
-#endif
+    rho_rpi = simde_mm_mulhi_epi16(rho_rpi,ONE_OVER_SQRT_8);
+    rho_rmi = simde_mm_mulhi_epi16(rho_rmi,ONE_OVER_SQRT_8);
     // Compute LLR for first bit of stream 0
 
     // Compute real and imaginary parts of MF output for stream 0
-#if defined(__x86_64__) || defined(__i386__)
     xmm0 = stream0_128i_in[i];
     xmm1 = stream0_128i_in[i+1];
 
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflelo_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflehi_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shuffle_epi32(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflelo_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflehi_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shuffle_epi32(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    simde__m128i y0r = _mm_unpacklo_epi64(xmm0, xmm1); // = [y0r(1),y0r(2),y0r(3),y0r(4)]
-    simde__m128i y0i = _mm_unpackhi_epi64(xmm0, xmm1);
-
-    y0r_over2  = _mm_srai_epi16(y0r,1);   // divide by 2
-    y0i_over2  = _mm_srai_epi16(y0i,1);   // divide by 2
-#elif defined(__arm__) || defined(__aarch64__)
+    simde__m128i y0r = simde_mm_unpacklo_epi64(xmm0, xmm1); // = [y0r(1),y0r(2),y0r(3),y0r(4)]
+    simde__m128i y0i = simde_mm_unpackhi_epi64(xmm0, xmm1);
 
-
-#endif
+    simde__m128i y0r_over2 = simde_mm_srai_epi16(y0r, 1); // divide by 2
+    simde__m128i y0i_over2 = simde_mm_srai_epi16(y0i, 1); // divide by 2
     // Compute real and imaginary parts of MF output for stream 1
-#if defined(__x86_64__) || defined(__i386__)
     xmm0 = stream1_128i_in[i];
     xmm1 = stream1_128i_in[i+1];
 
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflelo_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflehi_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shuffle_epi32(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflelo_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflehi_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shuffle_epi32(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    simde__m128i y1r = _mm_unpacklo_epi64(xmm0, xmm1); //[y1r(1),y1r(2),y1r(3),y1r(4)]
-    simde__m128i y1i = _mm_unpackhi_epi64(xmm0, xmm1); //[y1i(1),y1i(2),y1i(3),y1i(4)]
+    simde__m128i y1r = simde_mm_unpacklo_epi64(xmm0, xmm1); //[y1r(1),y1r(2),y1r(3),y1r(4)]
+    simde__m128i y1i = simde_mm_unpackhi_epi64(xmm0, xmm1); //[y1i(1),y1i(2),y1i(3),y1i(4)]
 
-    y1r_over2  = _mm_srai_epi16(y1r,1);   // divide by 2
-    y1i_over2  = _mm_srai_epi16(y1i,1);   // divide by 2
+    simde__m128i y1r_over2 = simde_mm_srai_epi16(y1r, 1); // divide by 2
+    simde__m128i y1i_over2 = simde_mm_srai_epi16(y1i, 1); // divide by 2
 
     // Compute the terms for the LLR of first bit
 
-    xmm0 = _mm_setzero_si128(); // ZERO
+    xmm0 = simde_mm_setzero_si128(); // ZERO
 
     // 1 term for numerator of LLR
-    xmm3 = _mm_subs_epi16(y1r_over2,rho_rpi);
-    A = _mm_abs_epi16(xmm3); // A = |y1r/2 - rho/sqrt(8)|
-    xmm2 = _mm_adds_epi16(A,y0i_over2); // = |y1r/2 - rho/sqrt(8)| + y0i/2
-    xmm3 = _mm_subs_epi16(y1i_over2,rho_rmi);
-    B = _mm_abs_epi16(xmm3); // B = |y1i/2 - rho*/sqrt(8)|
-    simde__m128i logmax_num_re0 = _mm_adds_epi16(B, xmm2); // = |y1r/2 - rho/sqrt(8)|+|y1i/2 - rho*/sqrt(8)| + y0i/2
+    xmm3 = simde_mm_subs_epi16(y1r_over2,rho_rpi);
+    simde__m128i A = simde_mm_abs_epi16(xmm3); // A = |y1r/2 - rho/sqrt(8)|
+    xmm2 = simde_mm_adds_epi16(A,y0i_over2); // = |y1r/2 - rho/sqrt(8)| + y0i/2
+    xmm3 = simde_mm_subs_epi16(y1i_over2,rho_rmi);
+    simde__m128i B = simde_mm_abs_epi16(xmm3); // B = |y1i/2 - rho*/sqrt(8)|
+    simde__m128i logmax_num_re0 = simde_mm_adds_epi16(B, xmm2); // = |y1r/2 - rho/sqrt(8)|+|y1i/2 - rho*/sqrt(8)| + y0i/2
 
     // 2 term for numerator of LLR
-    xmm3 = _mm_subs_epi16(y1r_over2,rho_rmi);
-    C = _mm_abs_epi16(xmm3); // C = |y1r/2 - rho*/4|
-    xmm2 = _mm_subs_epi16(C,y0i_over2); // = |y1r/2 - rho*/4| - y0i/2
-    xmm3 = _mm_adds_epi16(y1i_over2,rho_rpi);
-    D = _mm_abs_epi16(xmm3); // D = |y1i/2 + rho/4|
-    xmm2 = _mm_adds_epi16(xmm2,D); // |y1r/2 - rho*/4| + |y1i/2 + rho/4| - y0i/2
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0,xmm2); // max, numerator done
+    xmm3 = simde_mm_subs_epi16(y1r_over2,rho_rmi);
+    simde__m128i C = simde_mm_abs_epi16(xmm3); // C = |y1r/2 - rho*/4|
+    xmm2 = simde_mm_subs_epi16(C,y0i_over2); // = |y1r/2 - rho*/4| - y0i/2
+    xmm3 = simde_mm_adds_epi16(y1i_over2,rho_rpi);
+    simde__m128i D = simde_mm_abs_epi16(xmm3); // D = |y1i/2 + rho/4|
+    xmm2 = simde_mm_adds_epi16(xmm2,D); // |y1r/2 - rho*/4| + |y1i/2 + rho/4| - y0i/2
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0,xmm2); // max, numerator done
 
     // 1 term for denominator of LLR
-    xmm3 = _mm_adds_epi16(y1r_over2,rho_rmi);
-    E = _mm_abs_epi16(xmm3); // E = |y1r/2 + rho*/4|
-    xmm2 = _mm_adds_epi16(E,y0i_over2); // = |y1r/2 + rho*/4| + y0i/2
-    xmm3 = _mm_subs_epi16(y1i_over2,rho_rpi);
-    F = _mm_abs_epi16(xmm3); // F = |y1i/2 - rho/4|
-    simde__m128i logmax_den_re0 = _mm_adds_epi16(F, xmm2); // = |y1r/2 + rho*/4| + |y1i/2 - rho/4| + y0i/2
+    xmm3 = simde_mm_adds_epi16(y1r_over2,rho_rmi);
+    simde__m128i E = simde_mm_abs_epi16(xmm3); // E = |y1r/2 + rho*/4|
+    xmm2 = simde_mm_adds_epi16(E,y0i_over2); // = |y1r/2 + rho*/4| + y0i/2
+    xmm3 = simde_mm_subs_epi16(y1i_over2,rho_rpi);
+    simde__m128i F = simde_mm_abs_epi16(xmm3); // F = |y1i/2 - rho/4|
+    simde__m128i logmax_den_re0 = simde_mm_adds_epi16(F, xmm2); // = |y1r/2 + rho*/4| + |y1i/2 - rho/4| + y0i/2
 
     // 2 term for denominator of LLR
-    xmm3 = _mm_adds_epi16(y1r_over2,rho_rpi);
-    G = _mm_abs_epi16(xmm3); // G = |y1r/2 + rho/4|
-    xmm2 = _mm_subs_epi16(G,y0i_over2); // = |y1r/2 + rho/4| - y0i/2
-    xmm3 = _mm_adds_epi16(y1i_over2,rho_rmi);
-    H = _mm_abs_epi16(xmm3); // H = |y1i/2 + rho*/4|
-    xmm2 = _mm_adds_epi16(xmm2,H); // = |y1r/2 + rho/4| + |y1i/2 + rho*/4| - y0i/2
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0,xmm2); // max, denominator done
+    xmm3 = simde_mm_adds_epi16(y1r_over2,rho_rpi);
+    simde__m128i G = simde_mm_abs_epi16(xmm3); // G = |y1r/2 + rho/4|
+    xmm2 = simde_mm_subs_epi16(G,y0i_over2); // = |y1r/2 + rho/4| - y0i/2
+    xmm3 = simde_mm_adds_epi16(y1i_over2,rho_rmi);
+    simde__m128i H = simde_mm_abs_epi16(xmm3); // H = |y1i/2 + rho*/4|
+    xmm2 = simde_mm_adds_epi16(xmm2,H); // = |y1r/2 + rho/4| + |y1i/2 + rho*/4| - y0i/2
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0,xmm2); // max, denominator done
 
     // Compute the terms for the LLR of first bit
 
     // 1 term for nominator of LLR
-    xmm2 = _mm_adds_epi16(A,y0r_over2);
-    simde__m128i logmax_num_im0 = _mm_adds_epi16(B, xmm2); // = |y1r/2 - rho/4| + |y1i/2 - rho*/4| + y0r/2
+    xmm2 = simde_mm_adds_epi16(A,y0r_over2);
+    simde__m128i logmax_num_im0 = simde_mm_adds_epi16(B, xmm2); // = |y1r/2 - rho/4| + |y1i/2 - rho*/4| + y0r/2
 
     // 2 term for nominator of LLR
-    xmm2 = _mm_subs_epi16(E,y0r_over2);
-    xmm2 = _mm_adds_epi16(xmm2,F); // = |y1r/2 + rho*/4| + |y1i/2 - rho/4| - y0r/2
+    xmm2 = simde_mm_subs_epi16(E,y0r_over2);
+    xmm2 = simde_mm_adds_epi16(xmm2,F); // = |y1r/2 + rho*/4| + |y1i/2 - rho/4| - y0r/2
 
-    logmax_num_im0 = _mm_max_epi16(logmax_num_im0,xmm2); // max, nominator done
+    logmax_num_im0 = simde_mm_max_epi16(logmax_num_im0,xmm2); // max, nominator done
 
     // 1 term for denominator of LLR
-    xmm2 = _mm_adds_epi16(C,y0r_over2);
-    simde__m128i logmax_den_im0 = _mm_adds_epi16(D, xmm2); // = |y1r/2 - rho*/4| + |y1i/2 + rho/4| - y0r/2
+    xmm2 = simde_mm_adds_epi16(C,y0r_over2);
+    simde__m128i logmax_den_im0 = simde_mm_adds_epi16(D, xmm2); // = |y1r/2 - rho*/4| + |y1i/2 + rho/4| - y0r/2
 
-    xmm2 = _mm_subs_epi16(G,y0r_over2);
-    xmm2 = _mm_adds_epi16(xmm2,H); // = |y1r/2 + rho/4| + |y1i/2 + rho*/4| - y0r/2
+    xmm2 = simde_mm_subs_epi16(G,y0r_over2);
+    xmm2 = simde_mm_adds_epi16(xmm2,H); // = |y1r/2 + rho/4| + |y1i/2 + rho*/4| - y0r/2
 
-    logmax_den_im0 = _mm_max_epi16(logmax_den_im0,xmm2); // max, denominator done
+    logmax_den_im0 = simde_mm_max_epi16(logmax_den_im0,xmm2); // max, denominator done
 
     // LLR of first bit [L1(1), L1(2), L1(3), L1(4)]
-    y0r = _mm_adds_epi16(y0r,logmax_num_re0);
-    y0r = _mm_subs_epi16(y0r,logmax_den_re0);
+    y0r = simde_mm_adds_epi16(y0r,logmax_num_re0);
+    y0r = simde_mm_subs_epi16(y0r,logmax_den_re0);
 
     // LLR of second bit [L2(1), L2(2), L2(3), L2(4)]
-    y0i = _mm_adds_epi16(y0i,logmax_num_im0);
-    y0i = _mm_subs_epi16(y0i,logmax_den_im0);
+    y0i = simde_mm_adds_epi16(y0i,logmax_num_im0);
+    y0i = simde_mm_subs_epi16(y0i,logmax_den_im0);
 
-    _mm_storeu_si128(&stream0_128i_out[i],_mm_unpacklo_epi16(y0r,y0i)); // = [L1(1), L2(1), L1(2), L2(2)]
+    simde_mm_storeu_si128(&stream0_128i_out[i],simde_mm_unpacklo_epi16(y0r,y0i)); // = [L1(1), L2(1), L1(2), L2(2)]
 
     if (i<((length>>1) - 1)) // false if only 2 REs remain
-      _mm_storeu_si128(&stream0_128i_out[i+1],_mm_unpackhi_epi16(y0r,y0i));
-
-#elif defined(__x86_64__)
-
-#endif
+      simde_mm_storeu_si128(&stream0_128i_out[i + 1], simde_mm_unpackhi_epi16(y0r, y0i));
   }
-
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
 }
 
 int dlsch_qpsk_16qam_llr(LTE_DL_FRAME_PARMS *frame_parms,
@@ -1202,16 +1048,6 @@ int dlsch_qpsk_16qam_llr(LTE_DL_FRAME_PARMS *frame_parms,
   return(0);
 }
 
-/*
-#if defined(__x86_64__) || defined(__i386__)
-__m128i ONE_OVER_SQRT_2 __attribute__((aligned(16)));
-__m128i ONE_OVER_SQRT_10 __attribute__((aligned(16)));
-__m128i THREE_OVER_SQRT_10 __attribute__((aligned(16)));
-__m128i ONE_OVER_SQRT_10_Q15 __attribute__((aligned(16)));
-__m128i SQRT_10_OVER_FOUR __attribute__((aligned(16)));
-__m128i ch_mag_int;
-#endif
-*/
 void qpsk_qam16(int16_t *stream0_in,
                 int16_t *stream1_in,
                 int16_t *ch_mag_i,
@@ -1231,29 +1067,16 @@ void qpsk_qam16(int16_t *stream0_in,
     length = number of resource elements
   */
 
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *rho01_128i = (__m128i *)rho01;
-  __m128i *stream0_128i_in = (__m128i *)stream0_in;
-  __m128i *stream1_128i_in = (__m128i *)stream1_in;
-  __m128i *stream0_128i_out = (__m128i *)stream0_out;
-  __m128i *ch_mag_128i_i    = (__m128i *)ch_mag_i;
-  __m128i ONE_OVER_SQRT_2 = _mm_set1_epi16(23170); // round(1/sqrt(2)*2^15)
-  __m128i ONE_OVER_SQRT_10_Q15 = _mm_set1_epi16(10362); // round(1/sqrt(10)*2^15)
-  __m128i THREE_OVER_SQRT_10 = _mm_set1_epi16(31086); // round(3/sqrt(10)*2^15)
-  __m128i SQRT_10_OVER_FOUR = _mm_set1_epi16(25905); // round(sqrt(10)/4*2^15)
-  __m128i ch_mag_int __attribute__((aligned(16)));
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *rho01_128i = (int16x8_t *)rho01;
-  int16x8_t *stream0_128i_in = (int16x8_t *)stream0_in;
-  int16x8_t *stream1_128i_in = (int16x8_t *)stream1_in;
-  int16x8_t *stream0_128i_out = (int16x8_t *)stream0_out;
-  int16x8_t *ch_mag_128i_i    = (int16x8_t *)ch_mag_i;
-  int16x8_t ONE_OVER_SQRT_2 = vdupq_n_s16(23170); // round(1/sqrt(2)*2^15)
-  int16x8_t ONE_OVER_SQRT_10_Q15 = vdupq_n_s16(10362); // round(1/sqrt(10)*2^15)
-  int16x8_t THREE_OVER_SQRT_10 = vdupq_n_s16(31086); // round(3/sqrt(10)*2^15)
-  int16x8_t SQRT_10_OVER_FOUR = vdupq_n_s16(25905); // round(sqrt(10)/4*2^15)
-  int16x8_t ch_mag_int __attribute__((aligned(16)));
-#endif
+  simde__m128i *rho01_128i = (simde__m128i *)rho01;
+  simde__m128i *stream0_128i_in = (simde__m128i *)stream0_in;
+  simde__m128i *stream1_128i_in = (simde__m128i *)stream1_in;
+  simde__m128i *stream0_128i_out = (simde__m128i *)stream0_out;
+  simde__m128i *ch_mag_128i_i    = (simde__m128i *)ch_mag_i;
+  simde__m128i ONE_OVER_SQRT_2 = simde_mm_set1_epi16(23170); // round(1/sqrt(2)*2^15)
+  simde__m128i ONE_OVER_SQRT_10_Q15 = simde_mm_set1_epi16(10362); // round(1/sqrt(10)*2^15)
+  simde__m128i THREE_OVER_SQRT_10 = simde_mm_set1_epi16(31086); // round(3/sqrt(10)*2^15)
+  simde__m128i SQRT_10_OVER_FOUR = simde_mm_set1_epi16(25905); // round(sqrt(10)/4*2^15)
+  simde__m128i ch_mag_int;
 
 #ifdef DEBUG_LLR
   print_shorts2("rho01_128i:\n",rho01_128i);
@@ -1264,32 +1087,29 @@ void qpsk_qam16(int16_t *stream0_in,
 
   for (i=0; i<length>>2; i+=2) {
     // in each iteration, we take 8 complex samples
-
-#if defined(__x86_64__) || defined(__i386__)
-
     simde__m128i xmm0 = rho01_128i[i]; // 4 symbols
     simde__m128i xmm1 = rho01_128i[i + 1];
 
     // put (rho_r + rho_i)/2sqrt2 in rho_rpi
     // put (rho_r - rho_i)/2sqrt2 in rho_rmi
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflelo_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflehi_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shuffle_epi32(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflelo_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflehi_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shuffle_epi32(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    simde__m128i xmm2 = _mm_unpacklo_epi64(xmm0, xmm1); // Re(rho)
-    simde__m128i xmm3 = _mm_unpackhi_epi64(xmm0, xmm1); // Im(rho)
-    simde__m128i rho_rpi = _mm_adds_epi16(xmm2, xmm3); // rho = Re(rho) + Im(rho)
-    simde__m128i rho_rmi = _mm_subs_epi16(xmm2, xmm3); // rho* = Re(rho) - Im(rho)
+    simde__m128i xmm2 = simde_mm_unpacklo_epi64(xmm0, xmm1); // Re(rho)
+    simde__m128i xmm3 = simde_mm_unpackhi_epi64(xmm0, xmm1); // Im(rho)
+    simde__m128i rho_rpi = simde_mm_adds_epi16(xmm2, xmm3); // rho = Re(rho) + Im(rho)
+    simde__m128i rho_rmi = simde_mm_subs_epi16(xmm2, xmm3); // rho* = Re(rho) - Im(rho)
 
     // divide by sqrt(2)
-    rho_rpi = _mm_mulhi_epi16(rho_rpi, ONE_OVER_SQRT_2);
-    rho_rmi = _mm_mulhi_epi16(rho_rmi, ONE_OVER_SQRT_2);
-    rho_rpi = _mm_slli_epi16(rho_rpi,1);
-    rho_rmi = _mm_slli_epi16(rho_rmi,1);
+    rho_rpi = simde_mm_mulhi_epi16(rho_rpi, ONE_OVER_SQRT_2);
+    rho_rmi = simde_mm_mulhi_epi16(rho_rmi, ONE_OVER_SQRT_2);
+    rho_rpi = simde_mm_slli_epi16(rho_rpi,1);
+    rho_rmi = simde_mm_slli_epi16(rho_rmi,1);
 
     // Compute LLR for first bit of stream 0
 
@@ -1297,73 +1117,73 @@ void qpsk_qam16(int16_t *stream0_in,
     xmm0 = stream0_128i_in[i];
     xmm1 = stream0_128i_in[i+1];
 
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflelo_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflehi_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shuffle_epi32(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflelo_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflehi_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shuffle_epi32(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    simde__m128i y0r = _mm_unpacklo_epi64(xmm0, xmm1); // = [y0r(1),y0r(2),y0r(3),y0r(4)]
-    simde__m128i y0i = _mm_unpackhi_epi64(xmm0, xmm1);
+    simde__m128i y0r = simde_mm_unpacklo_epi64(xmm0, xmm1); // = [y0r(1),y0r(2),y0r(3),y0r(4)]
+    simde__m128i y0i = simde_mm_unpackhi_epi64(xmm0, xmm1);
 
     // divide by sqrt(2)
-    y0r_over2 = _mm_mulhi_epi16(y0r, ONE_OVER_SQRT_2);
-    y0i_over2 = _mm_mulhi_epi16(y0i, ONE_OVER_SQRT_2);
-    y0r_over2  = _mm_slli_epi16(y0r,1);
-    y0i_over2  = _mm_slli_epi16(y0i,1);
+    simde__m128i y0r_over2 = simde_mm_mulhi_epi16(y0r, ONE_OVER_SQRT_2);
+    simde__m128i y0i_over2 = simde_mm_mulhi_epi16(y0i, ONE_OVER_SQRT_2);
+    y0r_over2  = simde_mm_slli_epi16(y0r,1);
+    y0i_over2  = simde_mm_slli_epi16(y0i,1);
 
-    simde__m128i y0_p_1_1 = _mm_adds_epi16(y0r_over2, y0i_over2);
-    simde__m128i y0_m_1_1 = _mm_subs_epi16(y0r_over2, y0i_over2);
+    simde__m128i y0_p_1_1 = simde_mm_adds_epi16(y0r_over2, y0i_over2);
+    simde__m128i y0_m_1_1 = simde_mm_subs_epi16(y0r_over2, y0i_over2);
 
     // Compute real and imaginary parts of MF output for stream 1
     xmm0 = stream1_128i_in[i];
     xmm1 = stream1_128i_in[i+1];
 
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflelo_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflehi_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shuffle_epi32(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflelo_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflehi_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shuffle_epi32(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    simde__m128i y1r = _mm_unpacklo_epi64(xmm0, xmm1); //[y1r(1),y1r(2),y1r(3),y1r(4)]
-    simde__m128i y1i = _mm_unpackhi_epi64(xmm0, xmm1); //[y1i(1),y1i(2),y1i(3),y1i(4)]
+    simde__m128i y1r = simde_mm_unpacklo_epi64(xmm0, xmm1); //[y1r(1),y1r(2),y1r(3),y1r(4)]
+    simde__m128i y1i = simde_mm_unpackhi_epi64(xmm0, xmm1); //[y1i(1),y1i(2),y1i(3),y1i(4)]
 
-    xmm0 = _mm_setzero_si128(); // ZERO
+    xmm0 = simde_mm_setzero_si128(); // ZERO
 
     // compute psi
-    xmm3 = _mm_subs_epi16(y1r,rho_rpi);
-    simde__m128i psi_r_p1_p1 = _mm_abs_epi16(xmm3);
-    xmm3 = _mm_subs_epi16(y1i,rho_rmi);
-    simde__m128i psi_i_p1_p1 = _mm_abs_epi16(xmm3);
-    xmm3 = _mm_subs_epi16(y1r,rho_rmi);
-    simde__m128i psi_r_p1_m1 = _mm_abs_epi16(xmm3);
-    xmm3 = _mm_adds_epi16(y1i,rho_rpi);
-    simde__m128i psi_i_p1_m1 = _mm_abs_epi16(xmm3);
-    xmm3 = _mm_adds_epi16(y1r,rho_rmi);
-    simde__m128i psi_r_m1_p1 = _mm_abs_epi16(xmm3);
-    xmm3 = _mm_subs_epi16(y1i,rho_rpi);
-    simde__m128i psi_i_m1_p1 = _mm_abs_epi16(xmm3);
-    xmm3 = _mm_adds_epi16(y1r,rho_rpi);
-    simde__m128i psi_r_m1_m1 = _mm_abs_epi16(xmm3);
-    xmm3 = _mm_adds_epi16(y1i,rho_rmi);
-    simde__m128i psi_i_m1_m1 = _mm_abs_epi16(xmm3);
+    xmm3 = simde_mm_subs_epi16(y1r, rho_rpi);
+    simde__m128i psi_r_p1_p1 = simde_mm_abs_epi16(xmm3);
+    xmm3 = simde_mm_subs_epi16(y1i, rho_rmi);
+    simde__m128i psi_i_p1_p1 = simde_mm_abs_epi16(xmm3);
+    xmm3 = simde_mm_subs_epi16(y1r, rho_rmi);
+    simde__m128i psi_r_p1_m1 = simde_mm_abs_epi16(xmm3);
+    xmm3 = simde_mm_adds_epi16(y1i, rho_rpi);
+    simde__m128i psi_i_p1_m1 = simde_mm_abs_epi16(xmm3);
+    xmm3 = simde_mm_adds_epi16(y1r, rho_rmi);
+    simde__m128i psi_r_m1_p1 = simde_mm_abs_epi16(xmm3);
+    xmm3 = simde_mm_subs_epi16(y1i, rho_rpi);
+    simde__m128i psi_i_m1_p1 = simde_mm_abs_epi16(xmm3);
+    xmm3 = simde_mm_adds_epi16(y1r, rho_rpi);
+    simde__m128i psi_r_m1_m1 = simde_mm_abs_epi16(xmm3);
+    xmm3 = simde_mm_adds_epi16(y1i, rho_rmi);
+    simde__m128i psi_i_m1_m1 = simde_mm_abs_epi16(xmm3);
 
     // Rearrange interfering channel magnitudes
     xmm2 = ch_mag_128i_i[i];
     xmm3 = ch_mag_128i_i[i+1];
 
-    xmm2 = _mm_shufflelo_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = _mm_shufflehi_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = _mm_shuffle_epi32(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shufflelo_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shufflehi_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shuffle_epi32(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shufflelo_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shufflehi_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shuffle_epi32(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shufflelo_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shufflehi_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shuffle_epi32(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
 
-    ch_mag_int = _mm_unpacklo_epi64(xmm2,xmm3);
+    ch_mag_int = simde_mm_unpacklo_epi64(xmm2, xmm3);
     simde__m128i tmp_result, tmp_result2;
     // calculate optimal interference amplitudes
     interference_abs_epi16(psi_r_p1_p1 , ch_mag_int, a_r_p1_p1 , ONE_OVER_SQRT_10_Q15, THREE_OVER_SQRT_10);
@@ -1388,44 +1208,35 @@ void qpsk_qam16(int16_t *stream0_in,
     square_a_epi16(a_r_m1_m1, a_i_m1_m1, ch_mag_int, SQRT_10_OVER_FOUR, a_sq_m1_m1);
 
     // Computing Metrics
-    xmm0 = _mm_subs_epi16(psi_a_p1_p1, a_sq_p1_p1);
-    simde__m128i bit_met_p1_p1 = _mm_adds_epi16(xmm0, y0_p_1_1);
+    xmm0 = simde_mm_subs_epi16(psi_a_p1_p1, a_sq_p1_p1);
+    simde__m128i bit_met_p1_p1 = simde_mm_adds_epi16(xmm0, y0_p_1_1);
 
-    xmm0 = _mm_subs_epi16(psi_a_p1_m1, a_sq_p1_m1);
-    simde__m128i bit_met_p1_m1 = _mm_adds_epi16(xmm0, y0_m_1_1);
+    xmm0 = simde_mm_subs_epi16(psi_a_p1_m1, a_sq_p1_m1);
+    simde__m128i bit_met_p1_m1 = simde_mm_adds_epi16(xmm0, y0_m_1_1);
 
-    xmm0 = _mm_subs_epi16(psi_a_m1_p1, a_sq_m1_p1);
-    simde__m128i bit_met_m1_p1 = _mm_subs_epi16(xmm0, y0_m_1_1);
+    xmm0 = simde_mm_subs_epi16(psi_a_m1_p1, a_sq_m1_p1);
+    simde__m128i bit_met_m1_p1 = simde_mm_subs_epi16(xmm0, y0_m_1_1);
 
-    xmm0 = _mm_subs_epi16(psi_a_m1_m1, a_sq_m1_m1);
-    simde__m128i bit_met_m1_m1 = _mm_subs_epi16(xmm0, y0_p_1_1);
+    xmm0 = simde_mm_subs_epi16(psi_a_m1_m1, a_sq_m1_m1);
+    simde__m128i bit_met_m1_m1 = simde_mm_subs_epi16(xmm0, y0_p_1_1);
 
     // MSB
-    simde__m128i logmax_num_re0 = _mm_max_epi16(bit_met_p1_p1, bit_met_p1_m1); // bit=0
-    simde__m128i logmax_den_re0 = _mm_max_epi16(bit_met_m1_p1, bit_met_m1_m1); // bit=1
+    simde__m128i logmax_num_re0 = simde_mm_max_epi16(bit_met_p1_p1, bit_met_p1_m1); // bit=0
+    simde__m128i logmax_den_re0 = simde_mm_max_epi16(bit_met_m1_p1, bit_met_m1_m1); // bit=1
 
-    y0r = _mm_subs_epi16(logmax_num_re0,logmax_den_re0);
+    y0r = simde_mm_subs_epi16(logmax_num_re0,logmax_den_re0);
 
     // LSB
-    simde__m128i logmax_num_im0 = _mm_max_epi16(bit_met_p1_p1, bit_met_m1_p1); // bit=0
-    simde__m128i logmax_den_im0 = _mm_max_epi16(bit_met_p1_m1, bit_met_m1_m1); // bit=1
+    simde__m128i logmax_num_im0 = simde_mm_max_epi16(bit_met_p1_p1, bit_met_m1_p1); // bit=0
+    simde__m128i logmax_den_im0 = simde_mm_max_epi16(bit_met_p1_m1, bit_met_m1_m1); // bit=1
 
-    y0i = _mm_subs_epi16(logmax_num_im0,logmax_den_im0);
+    y0i = simde_mm_subs_epi16(logmax_num_im0,logmax_den_im0);
 
-    stream0_128i_out[i] = _mm_unpacklo_epi16(y0r,y0i); // = [L1(1), L2(1), L1(2), L2(2)]
+    stream0_128i_out[i] = simde_mm_unpacklo_epi16(y0r,y0i); // = [L1(1), L2(1), L1(2), L2(2)]
 
     if (i<((length>>1) - 1)) // false if only 2 REs remain
-      stream0_128i_out[i+1] = _mm_unpackhi_epi16(y0r,y0i);
-
-#elif defined(__arm__) || defined(__aarch64__)
-
-#endif
+      stream0_128i_out[i+1] = simde_mm_unpackhi_epi16(y0r,y0i);
   }
-
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
 }
 
 int dlsch_qpsk_64qam_llr(LTE_DL_FRAME_PARMS *frame_parms,
@@ -1483,17 +1294,6 @@ int dlsch_qpsk_64qam_llr(LTE_DL_FRAME_PARMS *frame_parms,
 
   return(0);
 }
-/*
-__m128i ONE_OVER_SQRT_2_42 __attribute__((aligned(16)));
-__m128i THREE_OVER_SQRT_2_42 __attribute__((aligned(16)));
-__m128i FIVE_OVER_SQRT_2_42 __attribute__((aligned(16)));
-__m128i SEVEN_OVER_SQRT_2_42 __attribute__((aligned(16)));
-
-__m128i ch_mag_int_with_sigma2 __attribute__((aligned(16)));
-__m128i two_ch_mag_int_with_sigma2 __attribute__((aligned(16)));
-__m128i three_ch_mag_int_with_sigma2 __attribute__((aligned(16)));
-__m128i SQRT_42_OVER_FOUR __attribute__((aligned(16)));
-*/
 void qpsk_qam64(short *stream0_in,
                 short *stream1_in,
                 short *ch_mag_i,
@@ -1514,25 +1314,22 @@ void qpsk_qam64(short *stream0_in,
     length = number of resource elements
   */
 
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *rho01_128i = (__m128i *)rho01;
-  __m128i *stream0_128i_in = (__m128i *)stream0_in;
-  __m128i *stream1_128i_in = (__m128i *)stream1_in;
-  __m128i *stream0_128i_out = (__m128i *)stream0_out;
-  __m128i *ch_mag_128i_i    = (__m128i *)ch_mag_i;
-  __m128i ONE_OVER_SQRT_2 = _mm_set1_epi16(23170); // round(1/sqrt(2)*2^15)
-  __m128i ONE_OVER_SQRT_2_42 = _mm_set1_epi16(3575); // round(1/sqrt(2*42)*2^15)
-  __m128i THREE_OVER_SQRT_2_42 = _mm_set1_epi16(10726); // round(3/sqrt(2*42)*2^15)
-  __m128i FIVE_OVER_SQRT_2_42 = _mm_set1_epi16(17876); // round(5/sqrt(2*42)*2^15)
-  __m128i SEVEN_OVER_SQRT_2_42 = _mm_set1_epi16(25027); // round(7/sqrt(2*42)*2^15)
-  __m128i SQRT_42_OVER_FOUR = _mm_set1_epi16(13272); // round(sqrt(42)/4*2^13), Q3.1
-  __m128i ch_mag_int;
-  __m128i ch_mag_int_with_sigma2;
-  __m128i two_ch_mag_int_with_sigma2;
-  __m128i three_ch_mag_int_with_sigma2;
-#elif defined(__arm__) || defined(__aarch64__)
+  simde__m128i *rho01_128i = (simde__m128i *)rho01;
+  simde__m128i *stream0_128i_in = (simde__m128i *)stream0_in;
+  simde__m128i *stream1_128i_in = (simde__m128i *)stream1_in;
+  simde__m128i *stream0_128i_out = (simde__m128i *)stream0_out;
+  simde__m128i *ch_mag_128i_i    = (simde__m128i *)ch_mag_i;
+  simde__m128i ONE_OVER_SQRT_2 = simde_mm_set1_epi16(23170); // round(1/sqrt(2)*2^15)
+  simde__m128i ONE_OVER_SQRT_2_42 = simde_mm_set1_epi16(3575); // round(1/sqrt(2*42)*2^15)
+  simde__m128i THREE_OVER_SQRT_2_42 = simde_mm_set1_epi16(10726); // round(3/sqrt(2*42)*2^15)
+  simde__m128i FIVE_OVER_SQRT_2_42 = simde_mm_set1_epi16(17876); // round(5/sqrt(2*42)*2^15)
+  simde__m128i SEVEN_OVER_SQRT_2_42 = simde_mm_set1_epi16(25027); // round(7/sqrt(2*42)*2^15)
+  simde__m128i SQRT_42_OVER_FOUR = simde_mm_set1_epi16(13272); // round(sqrt(42)/4*2^13), Q3.1
+  simde__m128i ch_mag_int;
+  simde__m128i ch_mag_int_with_sigma2;
+  simde__m128i two_ch_mag_int_with_sigma2;
+  simde__m128i three_ch_mag_int_with_sigma2;
 
-#endif
 
 #ifdef DEBUG_LLR
   print_shorts2("rho01_128i:\n",rho01_128i);
@@ -1544,31 +1341,30 @@ void qpsk_qam64(short *stream0_in,
   for (i=0; i<length>>2; i+=2) {
     // in each iteration, we take 8 complex samples
 
-#if defined(__x86_64__) || defined(__i386__)
 
     simde__m128i xmm0 = rho01_128i[i]; // 4 symbols
     simde__m128i xmm1 = rho01_128i[i + 1];
 
     // put (rho_r + rho_i)/sqrt2 in rho_rpi
     // put (rho_r - rho_i)/sqrt2 in rho_rmi
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflelo_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflehi_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shuffle_epi32(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflelo_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflehi_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shuffle_epi32(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    simde__m128i xmm2 = _mm_unpacklo_epi64(xmm0, xmm1); // Re(rho)
-    simde__m128i xmm3 = _mm_unpackhi_epi64(xmm0, xmm1); // Im(rho)
-    simde__m128i rho_rpi = _mm_adds_epi16(xmm2, xmm3); // rho = Re(rho) + Im(rho)
-    simde__m128i rho_rmi = _mm_subs_epi16(xmm2, xmm3); // rho* = Re(rho) - Im(rho)
+    simde__m128i xmm2 = simde_mm_unpacklo_epi64(xmm0, xmm1); // Re(rho)
+    simde__m128i xmm3 = simde_mm_unpackhi_epi64(xmm0, xmm1); // Im(rho)
+    simde__m128i rho_rpi = simde_mm_adds_epi16(xmm2, xmm3); // rho = Re(rho) + Im(rho)
+    simde__m128i rho_rmi = simde_mm_subs_epi16(xmm2, xmm3); // rho* = Re(rho) - Im(rho)
 
     // divide by sqrt(2)
-    rho_rpi = _mm_mulhi_epi16(rho_rpi, ONE_OVER_SQRT_2);
-    rho_rmi = _mm_mulhi_epi16(rho_rmi, ONE_OVER_SQRT_2);
-    rho_rpi = _mm_slli_epi16(rho_rpi,1);
-    rho_rmi = _mm_slli_epi16(rho_rmi,1);
+    rho_rpi = simde_mm_mulhi_epi16(rho_rpi, ONE_OVER_SQRT_2);
+    rho_rmi = simde_mm_mulhi_epi16(rho_rmi, ONE_OVER_SQRT_2);
+    rho_rpi = simde_mm_slli_epi16(rho_rpi,1);
+    rho_rmi = simde_mm_slli_epi16(rho_rmi,1);
 
     // Compute LLR for first bit of stream 0
 
@@ -1576,76 +1372,76 @@ void qpsk_qam64(short *stream0_in,
     xmm0 = stream0_128i_in[i];
     xmm1 = stream0_128i_in[i+1];
 
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflelo_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflehi_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shuffle_epi32(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflelo_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflehi_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shuffle_epi32(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    simde__m128i y0r = _mm_unpacklo_epi64(xmm0, xmm1); // = [y0r(1),y0r(2),y0r(3),y0r(4)]
-    simde__m128i y0i = _mm_unpackhi_epi64(xmm0, xmm1);
+    simde__m128i y0r = simde_mm_unpacklo_epi64(xmm0, xmm1); // = [y0r(1),y0r(2),y0r(3),y0r(4)]
+    simde__m128i y0i = simde_mm_unpackhi_epi64(xmm0, xmm1);
 
     // divide by sqrt(2)
-    y0r_over2 = _mm_mulhi_epi16(y0r, ONE_OVER_SQRT_2);
-    y0i_over2 = _mm_mulhi_epi16(y0i, ONE_OVER_SQRT_2);
-    y0r_over2  = _mm_slli_epi16(y0r,1);
-    y0i_over2  = _mm_slli_epi16(y0i,1);
+    simde__m128i y0r_over2 = simde_mm_mulhi_epi16(y0r, ONE_OVER_SQRT_2);
+    simde__m128i y0i_over2 = simde_mm_mulhi_epi16(y0i, ONE_OVER_SQRT_2);
+    y0r_over2  = simde_mm_slli_epi16(y0r,1);
+    y0i_over2  = simde_mm_slli_epi16(y0i,1);
 
-    simde__m128i y0_p_1_1 = _mm_adds_epi16(y0r_over2, y0i_over2);
-    simde__m128i y0_m_1_1 = _mm_subs_epi16(y0r_over2, y0i_over2);
+    simde__m128i y0_p_1_1 = simde_mm_adds_epi16(y0r_over2, y0i_over2);
+    simde__m128i y0_m_1_1 = simde_mm_subs_epi16(y0r_over2, y0i_over2);
 
     // Compute real and imaginary parts of MF output for stream 1
     xmm0 = stream1_128i_in[i];
     xmm1 = stream1_128i_in[i+1];
 
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflelo_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflehi_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shuffle_epi32(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflelo_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflehi_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shuffle_epi32(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    simde__m128i y1r = _mm_unpacklo_epi64(xmm0, xmm1); //[y1r(1),y1r(2),y1r(3),y1r(4)]
-    simde__m128i y1i = _mm_unpackhi_epi64(xmm0, xmm1); //[y1i(1),y1i(2),y1i(3),y1i(4)]
+    simde__m128i y1r = simde_mm_unpacklo_epi64(xmm0, xmm1); //[y1r(1),y1r(2),y1r(3),y1r(4)]
+    simde__m128i y1i = simde_mm_unpackhi_epi64(xmm0, xmm1); //[y1i(1),y1i(2),y1i(3),y1i(4)]
 
-    xmm0 = _mm_setzero_si128(); // ZERO
+    xmm0 = simde_mm_setzero_si128(); // ZERO
 
     // compute psi
-    xmm3 = _mm_subs_epi16(y1r,rho_rpi);
-    simde__m128i psi_r_p1_p1 = _mm_abs_epi16(xmm3);
-    xmm3 = _mm_subs_epi16(y1i,rho_rmi);
-    simde__m128i psi_i_p1_p1 = _mm_abs_epi16(xmm3);
-    xmm3 = _mm_subs_epi16(y1r,rho_rmi);
-    simde__m128i psi_r_p1_m1 = _mm_abs_epi16(xmm3);
-    xmm3 = _mm_adds_epi16(y1i,rho_rpi);
-    simde__m128i psi_i_p1_m1 = _mm_abs_epi16(xmm3);
-    xmm3 = _mm_adds_epi16(y1r,rho_rmi);
-    simde__m128i psi_r_m1_p1 = _mm_abs_epi16(xmm3);
-    xmm3 = _mm_subs_epi16(y1i,rho_rpi);
-    simde__m128i psi_i_m1_p1 = _mm_abs_epi16(xmm3);
-    xmm3 = _mm_adds_epi16(y1r,rho_rpi);
-    simde__m128i psi_r_m1_m1 = _mm_abs_epi16(xmm3);
-    xmm3 = _mm_adds_epi16(y1i,rho_rmi);
-    simde__m128i psi_i_m1_m1 = _mm_abs_epi16(xmm3);
+    xmm3 = simde_mm_subs_epi16(y1r, rho_rpi);
+    simde__m128i psi_r_p1_p1 = simde_mm_abs_epi16(xmm3);
+    xmm3 = simde_mm_subs_epi16(y1i, rho_rmi);
+    simde__m128i psi_i_p1_p1 = simde_mm_abs_epi16(xmm3);
+    xmm3 = simde_mm_subs_epi16(y1r, rho_rmi);
+    simde__m128i psi_r_p1_m1 = simde_mm_abs_epi16(xmm3);
+    xmm3 = simde_mm_adds_epi16(y1i, rho_rpi);
+    simde__m128i psi_i_p1_m1 = simde_mm_abs_epi16(xmm3);
+    xmm3 = simde_mm_adds_epi16(y1r, rho_rmi);
+    simde__m128i psi_r_m1_p1 = simde_mm_abs_epi16(xmm3);
+    xmm3 = simde_mm_subs_epi16(y1i, rho_rpi);
+    simde__m128i psi_i_m1_p1 = simde_mm_abs_epi16(xmm3);
+    xmm3 = simde_mm_adds_epi16(y1r, rho_rpi);
+    simde__m128i psi_r_m1_m1 = simde_mm_abs_epi16(xmm3);
+    xmm3 = simde_mm_adds_epi16(y1i, rho_rmi);
+    simde__m128i psi_i_m1_m1 = simde_mm_abs_epi16(xmm3);
 
     // Rearrange interfering channel magnitudes
     xmm2 = ch_mag_128i_i[i];
     xmm3 = ch_mag_128i_i[i+1];
 
-    xmm2 = _mm_shufflelo_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = _mm_shufflehi_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = _mm_shuffle_epi32(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shufflelo_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shufflehi_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shuffle_epi32(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shufflelo_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shufflehi_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shuffle_epi32(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shufflelo_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shufflehi_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shuffle_epi32(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
 
-    ch_mag_int = _mm_unpacklo_epi64(xmm2,xmm3);
-    ch_mag_int_with_sigma2       = _mm_srai_epi16(ch_mag_int, 1); // *2
+    ch_mag_int = simde_mm_unpacklo_epi64(xmm2,xmm3);
+    ch_mag_int_with_sigma2       = simde_mm_srai_epi16(ch_mag_int, 1); // *2
     two_ch_mag_int_with_sigma2   = ch_mag_int; // *4
-    three_ch_mag_int_with_sigma2 = _mm_adds_epi16(ch_mag_int_with_sigma2, two_ch_mag_int_with_sigma2); // *6
+    three_ch_mag_int_with_sigma2 = simde_mm_adds_epi16(ch_mag_int_with_sigma2, two_ch_mag_int_with_sigma2); // *6
     simde__m128i tmp_result, tmp_result2, tmp_result3, tmp_result4;
     interference_abs_64qam_epi16(psi_r_p1_p1, ch_mag_int_with_sigma2, two_ch_mag_int_with_sigma2, three_ch_mag_int_with_sigma2, a_r_p1_p1, ONE_OVER_SQRT_2_42, THREE_OVER_SQRT_2_42, FIVE_OVER_SQRT_2_42,
                                  SEVEN_OVER_SQRT_2_42);
@@ -1671,14 +1467,14 @@ void qpsk_qam64(short *stream0_in,
     prodsum_psi_a_epi16(psi_r_m1_m1, a_r_m1_m1, psi_i_m1_m1, a_i_m1_m1, psi_a_m1_m1);
 
     // Multiply by sqrt(2)
-    psi_a_p1_p1 = _mm_mulhi_epi16(psi_a_p1_p1, ONE_OVER_SQRT_2);
-    psi_a_p1_p1 = _mm_slli_epi16(psi_a_p1_p1, 2);
-    psi_a_p1_m1 = _mm_mulhi_epi16(psi_a_p1_m1, ONE_OVER_SQRT_2);
-    psi_a_p1_m1 = _mm_slli_epi16(psi_a_p1_m1, 2);
-    psi_a_m1_p1 = _mm_mulhi_epi16(psi_a_m1_p1, ONE_OVER_SQRT_2);
-    psi_a_m1_p1 = _mm_slli_epi16(psi_a_m1_p1, 2);
-    psi_a_m1_m1 = _mm_mulhi_epi16(psi_a_m1_m1, ONE_OVER_SQRT_2);
-    psi_a_m1_m1 = _mm_slli_epi16(psi_a_m1_m1, 2);
+    psi_a_p1_p1 = simde_mm_mulhi_epi16(psi_a_p1_p1, ONE_OVER_SQRT_2);
+    psi_a_p1_p1 = simde_mm_slli_epi16(psi_a_p1_p1, 2);
+    psi_a_p1_m1 = simde_mm_mulhi_epi16(psi_a_p1_m1, ONE_OVER_SQRT_2);
+    psi_a_p1_m1 = simde_mm_slli_epi16(psi_a_p1_m1, 2);
+    psi_a_m1_p1 = simde_mm_mulhi_epi16(psi_a_m1_p1, ONE_OVER_SQRT_2);
+    psi_a_m1_p1 = simde_mm_slli_epi16(psi_a_m1_p1, 2);
+    psi_a_m1_m1 = simde_mm_mulhi_epi16(psi_a_m1_m1, ONE_OVER_SQRT_2);
+    psi_a_m1_m1 = simde_mm_slli_epi16(psi_a_m1_m1, 2);
 
     square_a_64qam_epi16(a_r_p1_p1, a_i_p1_p1, ch_mag_int, SQRT_42_OVER_FOUR, a_sq_p1_p1);
     square_a_64qam_epi16(a_r_p1_m1, a_i_p1_m1, ch_mag_int, SQRT_42_OVER_FOUR, a_sq_p1_m1);
@@ -1686,44 +1482,35 @@ void qpsk_qam64(short *stream0_in,
     square_a_64qam_epi16(a_r_m1_m1, a_i_m1_m1, ch_mag_int, SQRT_42_OVER_FOUR, a_sq_m1_m1);
 
     // Computing Metrics
-    xmm0 = _mm_subs_epi16(psi_a_p1_p1, a_sq_p1_p1);
-    simde__m128i bit_met_p1_p1 = _mm_adds_epi16(xmm0, y0_p_1_1);
+    xmm0 = simde_mm_subs_epi16(psi_a_p1_p1, a_sq_p1_p1);
+    simde__m128i bit_met_p1_p1 = simde_mm_adds_epi16(xmm0, y0_p_1_1);
 
-    xmm0 = _mm_subs_epi16(psi_a_p1_m1, a_sq_p1_m1);
-    simde__m128i bit_met_p1_m1 = _mm_adds_epi16(xmm0, y0_m_1_1);
+    xmm0 = simde_mm_subs_epi16(psi_a_p1_m1, a_sq_p1_m1);
+    simde__m128i bit_met_p1_m1 = simde_mm_adds_epi16(xmm0, y0_m_1_1);
 
-    xmm0 = _mm_subs_epi16(psi_a_m1_p1, a_sq_m1_p1);
-    simde__m128i bit_met_m1_p1 = _mm_subs_epi16(xmm0, y0_m_1_1);
+    xmm0 = simde_mm_subs_epi16(psi_a_m1_p1, a_sq_m1_p1);
+    simde__m128i bit_met_m1_p1 = simde_mm_subs_epi16(xmm0, y0_m_1_1);
 
-    xmm0 = _mm_subs_epi16(psi_a_m1_m1, a_sq_m1_m1);
-    simde__m128i bit_met_m1_m1 = _mm_subs_epi16(xmm0, y0_p_1_1);
+    xmm0 = simde_mm_subs_epi16(psi_a_m1_m1, a_sq_m1_m1);
+    simde__m128i bit_met_m1_m1 = simde_mm_subs_epi16(xmm0, y0_p_1_1);
 
     // MSB
-    simde__m128i logmax_num_re0 = _mm_max_epi16(bit_met_p1_p1, bit_met_p1_m1); // bit=0
-    simde__m128i logmax_den_re0 = _mm_max_epi16(bit_met_m1_p1, bit_met_m1_m1); // bit=1
+    simde__m128i logmax_num_re0 = simde_mm_max_epi16(bit_met_p1_p1, bit_met_p1_m1); // bit=0
+    simde__m128i logmax_den_re0 = simde_mm_max_epi16(bit_met_m1_p1, bit_met_m1_m1); // bit=1
 
-    y0r = _mm_subs_epi16(logmax_num_re0,logmax_den_re0);
+    y0r = simde_mm_subs_epi16(logmax_num_re0,logmax_den_re0);
 
     // LSB
-    simde__m128i logmax_num_im0 = _mm_max_epi16(bit_met_p1_p1, bit_met_m1_p1); // bit=0
-    simde__m128i logmax_den_im0 = _mm_max_epi16(bit_met_p1_m1, bit_met_m1_m1); // bit=1
+    simde__m128i logmax_num_im0 = simde_mm_max_epi16(bit_met_p1_p1, bit_met_m1_p1); // bit=0
+    simde__m128i logmax_den_im0 = simde_mm_max_epi16(bit_met_p1_m1, bit_met_m1_m1); // bit=1
 
-    y0i = _mm_subs_epi16(logmax_num_im0,logmax_den_im0);
+    y0i = simde_mm_subs_epi16(logmax_num_im0,logmax_den_im0);
 
-    stream0_128i_out[i] = _mm_unpacklo_epi16(y0r,y0i); // = [L1(1), L2(1), L1(2), L2(2)]
+    stream0_128i_out[i] = simde_mm_unpacklo_epi16(y0r,y0i); // = [L1(1), L2(1), L1(2), L2(2)]
 
     if (i<((length>>1) - 1)) // false if only 2 REs remain
-      stream0_128i_out[i+1] = _mm_unpackhi_epi16(y0r,y0i);
-
-#elif defined(__arm__) || defined(__aarch64__)
-
-#endif
+      stream0_128i_out[i+1] = simde_mm_unpackhi_epi16(y0r,y0i);
   }
-
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
 }
 
 
@@ -1731,21 +1518,6 @@ void qpsk_qam64(short *stream0_in,
 // 16-QAM
 //----------------------------------------------------------------------------------------------
 
-/*
-__m128i ONE_OVER_TWO_SQRT_10 __attribute__((aligned(16)));
-__m128i NINE_OVER_TWO_SQRT_10 __attribute__((aligned(16)));
-
-__m128i  y0r_over_sqrt10 __attribute__ ((aligned(16)));
-__m128i  y0i_over_sqrt10 __attribute__ ((aligned(16)));
-__m128i  y0r_three_over_sqrt10 __attribute__ ((aligned(16)));
-__m128i  y0i_three_over_sqrt10 __attribute__ ((aligned(16)));
-
-__m128i ch_mag_des __attribute__((aligned(16)));
-__m128i ch_mag_over_10 __attribute__ ((aligned(16)));
-__m128i ch_mag_over_2 __attribute__ ((aligned(16)));
-__m128i ch_mag_9_over_10 __attribute__ ((aligned(16)));
-*/
-
 void qam16_qpsk(short *stream0_in,
                 short *stream1_in,
                 short *ch_mag,
@@ -1770,30 +1542,26 @@ void qam16_qpsk(short *stream0_in,
     stream0_out: output LLRs for 1st stream
   */
 
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *rho01_128i       = (__m128i *)rho01;
-  __m128i *stream0_128i_in  = (__m128i *)stream0_in;
-  __m128i *stream1_128i_in  = (__m128i *)stream1_in;
-  __m128i *stream0_128i_out = (__m128i *)stream0_out;
-  __m128i *ch_mag_128i      = (__m128i *)ch_mag;
-  __m128i ONE_OVER_SQRT_2 = _mm_set1_epi16(23170); // round(1/sqrt(2)*2^15)
-  __m128i ONE_OVER_SQRT_10 = _mm_set1_epi16(20724); // round(1/sqrt(10)*2^16)
-  __m128i THREE_OVER_SQRT_10 = _mm_set1_epi16(31086); // round(3/sqrt(10)*2^15)
-  __m128i SQRT_10_OVER_FOUR = _mm_set1_epi16(25905); // round(sqrt(10)/4*2^15)
-  __m128i ONE_OVER_TWO_SQRT_10 = _mm_set1_epi16(10362); // round(1/2/sqrt(10)*2^16)
-  __m128i NINE_OVER_TWO_SQRT_10 = _mm_set1_epi16(23315); // round(9/2/sqrt(10)*2^14)
-  __m128i  y0r_over_sqrt10;
-  __m128i  y0i_over_sqrt10;
-  __m128i  y0r_three_over_sqrt10;
-  __m128i  y0i_three_over_sqrt10;
-
-  __m128i ch_mag_des;
-  __m128i ch_mag_over_10;
-  __m128i ch_mag_over_2;
-  __m128i ch_mag_9_over_10;
-#elif defined(__arm__) || defined(__aarch64__)
-
-#endif
+  simde__m128i *rho01_128i       = (simde__m128i *)rho01;
+  simde__m128i *stream0_128i_in  = (simde__m128i *)stream0_in;
+  simde__m128i *stream1_128i_in  = (simde__m128i *)stream1_in;
+  simde__m128i *stream0_128i_out = (simde__m128i *)stream0_out;
+  simde__m128i *ch_mag_128i      = (simde__m128i *)ch_mag;
+  simde__m128i ONE_OVER_SQRT_2 = simde_mm_set1_epi16(23170); // round(1/sqrt(2)*2^15)
+  simde__m128i ONE_OVER_SQRT_10 = simde_mm_set1_epi16(20724); // round(1/sqrt(10)*2^16)
+  simde__m128i THREE_OVER_SQRT_10 = simde_mm_set1_epi16(31086); // round(3/sqrt(10)*2^15)
+  simde__m128i SQRT_10_OVER_FOUR = simde_mm_set1_epi16(25905); // round(sqrt(10)/4*2^15)
+  simde__m128i ONE_OVER_TWO_SQRT_10 = simde_mm_set1_epi16(10362); // round(1/2/sqrt(10)*2^16)
+  simde__m128i NINE_OVER_TWO_SQRT_10 = simde_mm_set1_epi16(23315); // round(9/2/sqrt(10)*2^14)
+  simde__m128i  y0r_over_sqrt10;
+  simde__m128i  y0i_over_sqrt10;
+  simde__m128i  y0r_three_over_sqrt10;
+  simde__m128i  y0i_three_over_sqrt10;
+
+  simde__m128i ch_mag_des;
+  simde__m128i ch_mag_over_10;
+  simde__m128i ch_mag_over_2;
+  simde__m128i ch_mag_9_over_10;
 
   int i;
 
@@ -1801,397 +1569,386 @@ void qam16_qpsk(short *stream0_in,
   for (i=0; i<length>>2; i+=2) {
     // In one iteration, we deal with 8 REs
 
-#if defined(__x86_64__) || defined(__i386__)
     // Get rho
     simde__m128i xmm0 = rho01_128i[i];
     simde__m128i xmm1 = rho01_128i[i + 1];
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflelo_epi16(xmm0, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflehi_epi16(xmm0, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shuffle_epi32(xmm0, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflelo_epi16(xmm1, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflehi_epi16(xmm1, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shuffle_epi32(xmm1, 0xd8); //_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    simde__m128i xmm2 = _mm_unpacklo_epi64(xmm0, xmm1); // Re(rho)
-    simde__m128i xmm3 = _mm_unpackhi_epi64(xmm0, xmm1); // Im(rho)
-    simde__m128i rho_rpi = _mm_adds_epi16(xmm2, xmm3); // rho = Re(rho) + Im(rho)
-    simde__m128i rho_rmi = _mm_subs_epi16(xmm2, xmm3); // rho* = Re(rho) - Im(rho)
+    simde__m128i xmm2 = simde_mm_unpacklo_epi64(xmm0, xmm1); // Re(rho)
+    simde__m128i xmm3 = simde_mm_unpackhi_epi64(xmm0, xmm1); // Im(rho)
+    simde__m128i rho_rpi = simde_mm_adds_epi16(xmm2, xmm3); // rho = Re(rho) + Im(rho)
+    simde__m128i rho_rmi = simde_mm_subs_epi16(xmm2, xmm3); // rho* = Re(rho) - Im(rho)
 
     // Compute the different rhos
-    simde__m128i rho_rpi_1_1 = _mm_mulhi_epi16(rho_rpi, ONE_OVER_SQRT_10);
-    simde__m128i rho_rmi_1_1 = _mm_mulhi_epi16(rho_rmi, ONE_OVER_SQRT_10);
-    simde__m128i rho_rpi_3_3 = _mm_mulhi_epi16(rho_rpi, THREE_OVER_SQRT_10);
-    simde__m128i rho_rmi_3_3 = _mm_mulhi_epi16(rho_rmi, THREE_OVER_SQRT_10);
-    rho_rpi_3_3 = _mm_slli_epi16(rho_rpi_3_3,1);
-    rho_rmi_3_3 = _mm_slli_epi16(rho_rmi_3_3,1);
+    simde__m128i rho_rpi_1_1 = simde_mm_mulhi_epi16(rho_rpi, ONE_OVER_SQRT_10);
+    simde__m128i rho_rmi_1_1 = simde_mm_mulhi_epi16(rho_rmi, ONE_OVER_SQRT_10);
+    simde__m128i rho_rpi_3_3 = simde_mm_mulhi_epi16(rho_rpi, THREE_OVER_SQRT_10);
+    simde__m128i rho_rmi_3_3 = simde_mm_mulhi_epi16(rho_rmi, THREE_OVER_SQRT_10);
+    rho_rpi_3_3 = simde_mm_slli_epi16(rho_rpi_3_3, 1);
+    rho_rmi_3_3 = simde_mm_slli_epi16(rho_rmi_3_3, 1);
 
-    simde__m128i xmm4 = _mm_mulhi_epi16(xmm2, ONE_OVER_SQRT_10); // Re(rho)
-    simde__m128i xmm5 = _mm_mulhi_epi16(xmm3, THREE_OVER_SQRT_10); // Im(rho)
-    xmm5 = _mm_slli_epi16(xmm5, 1);
+    simde__m128i xmm4 = simde_mm_mulhi_epi16(xmm2, ONE_OVER_SQRT_10); // Re(rho)
+    simde__m128i xmm5 = simde_mm_mulhi_epi16(xmm3, THREE_OVER_SQRT_10); // Im(rho)
+    xmm5 = simde_mm_slli_epi16(xmm5, 1);
 
-    simde__m128i rho_rpi_1_3 = _mm_adds_epi16(xmm4, xmm5);
-    simde__m128i rho_rmi_1_3 = _mm_subs_epi16(xmm4, xmm5);
+    simde__m128i rho_rpi_1_3 = simde_mm_adds_epi16(xmm4, xmm5);
+    simde__m128i rho_rmi_1_3 = simde_mm_subs_epi16(xmm4, xmm5);
 
-    simde__m128i xmm6 = _mm_mulhi_epi16(xmm2, THREE_OVER_SQRT_10); // Re(rho)
-    simde__m128i xmm7 = _mm_mulhi_epi16(xmm3, ONE_OVER_SQRT_10); // Im(rho)
-    xmm6 = _mm_slli_epi16(xmm6,1);
+    simde__m128i xmm6 = simde_mm_mulhi_epi16(xmm2, THREE_OVER_SQRT_10); // Re(rho)
+    simde__m128i xmm7 = simde_mm_mulhi_epi16(xmm3, ONE_OVER_SQRT_10); // Im(rho)
+    xmm6 = simde_mm_slli_epi16(xmm6, 1);
 
-    simde__m128i rho_rpi_3_1 = _mm_adds_epi16(xmm6, xmm7);
-    simde__m128i rho_rmi_3_1 = _mm_subs_epi16(xmm6, xmm7);
+    simde__m128i rho_rpi_3_1 = simde_mm_adds_epi16(xmm6, xmm7);
+    simde__m128i rho_rmi_3_1 = simde_mm_subs_epi16(xmm6, xmm7);
 
     // Rearrange interfering MF output
     xmm0 = stream1_128i_in[i];
     xmm1 = stream1_128i_in[i+1];
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflelo_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflehi_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shuffle_epi32(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflelo_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflehi_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shuffle_epi32(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    simde__m128i y1r = _mm_unpacklo_epi64(xmm0, xmm1); //[y1r(1),y1r(2),y1r(3),y1r(4)]
-    simde__m128i y1i = _mm_unpackhi_epi64(xmm0, xmm1); //[y1i(1),y1i(2),y1i(3),y1i(4)]
-
-    xmm0 = _mm_setzero_si128(); // ZERO
-    xmm2 = _mm_subs_epi16(rho_rpi_1_1,y1r); // = [Re(rho)+ Im(rho)]/sqrt(10) - y1r
-    simde__m128i psi_r_p1_p1 = _mm_abs_epi16(xmm2); // = |[Re(rho)+ Im(rho)]/sqrt(10) - y1r|
-
-    xmm2= _mm_subs_epi16(rho_rmi_1_1,y1r);
-    simde__m128i psi_r_p1_m1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rmi_1_1,y1i);
-    simde__m128i psi_i_p1_p1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rpi_1_3,y1r);
-    simde__m128i psi_r_p1_p3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rmi_1_3,y1r);
-    simde__m128i psi_r_p1_m3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rmi_3_1,y1i);
-    simde__m128i psi_i_p1_p3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rpi_3_1,y1r);
-    simde__m128i psi_r_p3_p1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rmi_3_1,y1r);
-    simde__m128i psi_r_p3_m1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rmi_1_3,y1i);
-    simde__m128i psi_i_p3_p1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rpi_3_3,y1r);
-    simde__m128i psi_r_p3_p3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rmi_3_3,y1r);
-    simde__m128i psi_r_p3_m3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rmi_3_3,y1i);
-    simde__m128i psi_i_p3_p3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rpi_1_1,y1i);
-    simde__m128i psi_i_m1_p1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rpi_3_1,y1i);
-    simde__m128i psi_i_m1_p3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rpi_1_3,y1i);
-    simde__m128i psi_i_m3_p1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rpi_3_3,y1i);
-    simde__m128i psi_i_m3_p3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(rho_rpi_1_1,y1i);
-    simde__m128i psi_i_p1_m1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(rho_rpi_3_1,y1i);
-    simde__m128i psi_i_p1_m3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(rho_rpi_1_3,y1i);
-    simde__m128i psi_i_p3_m1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(rho_rpi_3_3,y1i);
-    simde__m128i psi_i_p3_m3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(rho_rpi_1_1,y1r);
-    simde__m128i psi_r_m1_m1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(rho_rpi_1_3,y1r);
-    simde__m128i psi_r_m1_m3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(rho_rpi_3_1,y1r);
-    simde__m128i psi_r_m3_m1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(rho_rpi_3_3,y1r);
-    simde__m128i psi_r_m3_m3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(y1r,rho_rmi_1_1);
-    simde__m128i psi_r_m1_p1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(y1r,rho_rmi_1_3);
-    simde__m128i psi_r_m1_p3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(y1i,rho_rmi_1_1);
-    simde__m128i psi_i_m1_m1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(y1i,rho_rmi_3_1);
-    simde__m128i psi_i_m1_m3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(y1r,rho_rmi_3_1);
-    simde__m128i psi_r_m3_p1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(y1r,rho_rmi_3_3);
-    simde__m128i psi_r_m3_p3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(y1i,rho_rmi_1_3);
-    simde__m128i psi_i_m3_m1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(y1i,rho_rmi_3_3);
-    simde__m128i psi_i_m3_m3 = _mm_abs_epi16(xmm2);
+    simde__m128i y1r = simde_mm_unpacklo_epi64(xmm0, xmm1); //[y1r(1),y1r(2),y1r(3),y1r(4)]
+    simde__m128i y1i = simde_mm_unpackhi_epi64(xmm0, xmm1); //[y1i(1),y1i(2),y1i(3),y1i(4)]
+
+    xmm0 = simde_mm_setzero_si128(); // ZERO
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_1, y1r); // = [Re(rho)+ Im(rho)]/sqrt(10) - y1r
+    simde__m128i psi_r_p1_p1 = simde_mm_abs_epi16(xmm2); // = |[Re(rho)+ Im(rho)]/sqrt(10) - y1r|
+
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_1, y1r);
+    simde__m128i psi_r_p1_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_1, y1i);
+    simde__m128i psi_i_p1_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_3, y1r);
+    simde__m128i psi_r_p1_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_3, y1r);
+    simde__m128i psi_r_p1_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_1, y1i);
+    simde__m128i psi_i_p1_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_1, y1r);
+    simde__m128i psi_r_p3_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_1, y1r);
+    simde__m128i psi_r_p3_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_3, y1i);
+    simde__m128i psi_i_p3_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_3, y1r);
+    simde__m128i psi_r_p3_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_3, y1r);
+    simde__m128i psi_r_p3_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_3, y1i);
+    simde__m128i psi_i_p3_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_1, y1i);
+    simde__m128i psi_i_m1_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_1, y1i);
+    simde__m128i psi_i_m1_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_3, y1i);
+    simde__m128i psi_i_m3_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_3, y1i);
+    simde__m128i psi_i_m3_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_1, y1i);
+    simde__m128i psi_i_p1_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_1, y1i);
+    simde__m128i psi_i_p1_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_3, y1i);
+    simde__m128i psi_i_p3_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_3, y1i);
+    simde__m128i psi_i_p3_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_1, y1r);
+    simde__m128i psi_r_m1_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_3, y1r);
+    simde__m128i psi_r_m1_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_1, y1r);
+    simde__m128i psi_r_m3_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_3, y1r);
+    simde__m128i psi_r_m3_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(y1r, rho_rmi_1_1);
+    simde__m128i psi_r_m1_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(y1r, rho_rmi_1_3);
+    simde__m128i psi_r_m1_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(y1i, rho_rmi_1_1);
+    simde__m128i psi_i_m1_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(y1i, rho_rmi_3_1);
+    simde__m128i psi_i_m1_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(y1r, rho_rmi_3_1);
+    simde__m128i psi_r_m3_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(y1r, rho_rmi_3_3);
+    simde__m128i psi_r_m3_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(y1i, rho_rmi_1_3);
+    simde__m128i psi_i_m3_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(y1i, rho_rmi_3_3);
+    simde__m128i psi_i_m3_m3 = simde_mm_abs_epi16(xmm2);
 
     // Rearrange desired MF output
     xmm0 = stream0_128i_in[i];
     xmm1 = stream0_128i_in[i+1];
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflelo_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflehi_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shuffle_epi32(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflelo_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflehi_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shuffle_epi32(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    simde__m128i y0r = _mm_unpacklo_epi64(xmm0, xmm1); // = [y0r(1),y0r(2),y0r(3),y0r(4)]
-    simde__m128i y0i = _mm_unpackhi_epi64(xmm0, xmm1);
+    simde__m128i y0r = simde_mm_unpacklo_epi64(xmm0, xmm1); // = [y0r(1),y0r(2),y0r(3),y0r(4)]
+    simde__m128i y0i = simde_mm_unpackhi_epi64(xmm0, xmm1);
 
     // Rearrange desired channel magnitudes
     xmm2 = ch_mag_128i[i]; // = [|h|^2(1),|h|^2(1),|h|^2(2),|h|^2(2)]*(2/sqrt(10))
     xmm3 = ch_mag_128i[i+1]; // = [|h|^2(3),|h|^2(3),|h|^2(4),|h|^2(4)]*(2/sqrt(10))
-    xmm2 = _mm_shufflelo_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = _mm_shufflehi_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = _mm_shuffle_epi32(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shufflelo_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shufflehi_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shuffle_epi32(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shufflelo_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shufflehi_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shuffle_epi32(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shufflelo_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shufflehi_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shuffle_epi32(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
 
-    ch_mag_des = _mm_unpacklo_epi64(xmm2,xmm3); // = [|h|^2(1),|h|^2(2),|h|^2(3),|h|^2(4)]*(2/sqrt(10))
+    ch_mag_des = simde_mm_unpacklo_epi64(xmm2,xmm3); // = [|h|^2(1),|h|^2(2),|h|^2(3),|h|^2(4)]*(2/sqrt(10))
 
     // Scale MF output of desired signal
-    y0r_over_sqrt10 = _mm_mulhi_epi16(y0r,ONE_OVER_SQRT_10);
-    y0i_over_sqrt10 = _mm_mulhi_epi16(y0i,ONE_OVER_SQRT_10);
-    y0r_three_over_sqrt10 = _mm_mulhi_epi16(y0r,THREE_OVER_SQRT_10);
-    y0i_three_over_sqrt10 = _mm_mulhi_epi16(y0i,THREE_OVER_SQRT_10);
-    y0r_three_over_sqrt10 = _mm_slli_epi16(y0r_three_over_sqrt10,1);
-    y0i_three_over_sqrt10 = _mm_slli_epi16(y0i_three_over_sqrt10,1);
+    y0r_over_sqrt10 = simde_mm_mulhi_epi16(y0r,ONE_OVER_SQRT_10);
+    y0i_over_sqrt10 = simde_mm_mulhi_epi16(y0i,ONE_OVER_SQRT_10);
+    y0r_three_over_sqrt10 = simde_mm_mulhi_epi16(y0r,THREE_OVER_SQRT_10);
+    y0i_three_over_sqrt10 = simde_mm_mulhi_epi16(y0i,THREE_OVER_SQRT_10);
+    y0r_three_over_sqrt10 = simde_mm_slli_epi16(y0r_three_over_sqrt10,1);
+    y0i_three_over_sqrt10 = simde_mm_slli_epi16(y0i_three_over_sqrt10,1);
 
     // Compute necessary combination of required terms
-    simde__m128i y0_p_1_1 = _mm_adds_epi16(y0r_over_sqrt10, y0i_over_sqrt10);
-    simde__m128i y0_m_1_1 = _mm_subs_epi16(y0r_over_sqrt10, y0i_over_sqrt10);
+    simde__m128i y0_p_1_1 = simde_mm_adds_epi16(y0r_over_sqrt10, y0i_over_sqrt10);
+    simde__m128i y0_m_1_1 = simde_mm_subs_epi16(y0r_over_sqrt10, y0i_over_sqrt10);
 
-    simde__m128i y0_p_1_3 = _mm_adds_epi16(y0r_over_sqrt10, y0i_three_over_sqrt10);
-    simde__m128i y0_m_1_3 = _mm_subs_epi16(y0r_over_sqrt10, y0i_three_over_sqrt10);
+    simde__m128i y0_p_1_3 = simde_mm_adds_epi16(y0r_over_sqrt10, y0i_three_over_sqrt10);
+    simde__m128i y0_m_1_3 = simde_mm_subs_epi16(y0r_over_sqrt10, y0i_three_over_sqrt10);
 
-    simde__m128i y0_p_3_1 = _mm_adds_epi16(y0r_three_over_sqrt10, y0i_over_sqrt10);
-    simde__m128i y0_m_3_1 = _mm_subs_epi16(y0r_three_over_sqrt10, y0i_over_sqrt10);
+    simde__m128i y0_p_3_1 = simde_mm_adds_epi16(y0r_three_over_sqrt10, y0i_over_sqrt10);
+    simde__m128i y0_m_3_1 = simde_mm_subs_epi16(y0r_three_over_sqrt10, y0i_over_sqrt10);
 
-    simde__m128i y0_p_3_3 = _mm_adds_epi16(y0r_three_over_sqrt10, y0i_three_over_sqrt10);
-    simde__m128i y0_m_3_3 = _mm_subs_epi16(y0r_three_over_sqrt10, y0i_three_over_sqrt10);
+    simde__m128i y0_p_3_3 = simde_mm_adds_epi16(y0r_three_over_sqrt10, y0i_three_over_sqrt10);
+    simde__m128i y0_m_3_3 = simde_mm_subs_epi16(y0r_three_over_sqrt10, y0i_three_over_sqrt10);
 
     // Add psi
-    simde__m128i psi_a_p1_p1 = _mm_adds_epi16(psi_r_p1_p1, psi_i_p1_p1);
-    simde__m128i psi_a_p1_p3 = _mm_adds_epi16(psi_r_p1_p3, psi_i_p1_p3);
-    simde__m128i psi_a_p3_p1 = _mm_adds_epi16(psi_r_p3_p1, psi_i_p3_p1);
-    simde__m128i psi_a_p3_p3 = _mm_adds_epi16(psi_r_p3_p3, psi_i_p3_p3);
-    simde__m128i psi_a_p1_m1 = _mm_adds_epi16(psi_r_p1_m1, psi_i_p1_m1);
-    simde__m128i psi_a_p1_m3 = _mm_adds_epi16(psi_r_p1_m3, psi_i_p1_m3);
-    simde__m128i psi_a_p3_m1 = _mm_adds_epi16(psi_r_p3_m1, psi_i_p3_m1);
-    simde__m128i psi_a_p3_m3 = _mm_adds_epi16(psi_r_p3_m3, psi_i_p3_m3);
-    simde__m128i psi_a_m1_p1 = _mm_adds_epi16(psi_r_m1_p1, psi_i_m1_p1);
-    simde__m128i psi_a_m1_p3 = _mm_adds_epi16(psi_r_m1_p3, psi_i_m1_p3);
-    simde__m128i psi_a_m3_p1 = _mm_adds_epi16(psi_r_m3_p1, psi_i_m3_p1);
-    simde__m128i psi_a_m3_p3 = _mm_adds_epi16(psi_r_m3_p3, psi_i_m3_p3);
-    simde__m128i psi_a_m1_m1 = _mm_adds_epi16(psi_r_m1_m1, psi_i_m1_m1);
-    simde__m128i psi_a_m1_m3 = _mm_adds_epi16(psi_r_m1_m3, psi_i_m1_m3);
-    simde__m128i psi_a_m3_m1 = _mm_adds_epi16(psi_r_m3_m1, psi_i_m3_m1);
-    simde__m128i psi_a_m3_m3 = _mm_adds_epi16(psi_r_m3_m3, psi_i_m3_m3);
+    simde__m128i psi_a_p1_p1 = simde_mm_adds_epi16(psi_r_p1_p1, psi_i_p1_p1);
+    simde__m128i psi_a_p1_p3 = simde_mm_adds_epi16(psi_r_p1_p3, psi_i_p1_p3);
+    simde__m128i psi_a_p3_p1 = simde_mm_adds_epi16(psi_r_p3_p1, psi_i_p3_p1);
+    simde__m128i psi_a_p3_p3 = simde_mm_adds_epi16(psi_r_p3_p3, psi_i_p3_p3);
+    simde__m128i psi_a_p1_m1 = simde_mm_adds_epi16(psi_r_p1_m1, psi_i_p1_m1);
+    simde__m128i psi_a_p1_m3 = simde_mm_adds_epi16(psi_r_p1_m3, psi_i_p1_m3);
+    simde__m128i psi_a_p3_m1 = simde_mm_adds_epi16(psi_r_p3_m1, psi_i_p3_m1);
+    simde__m128i psi_a_p3_m3 = simde_mm_adds_epi16(psi_r_p3_m3, psi_i_p3_m3);
+    simde__m128i psi_a_m1_p1 = simde_mm_adds_epi16(psi_r_m1_p1, psi_i_m1_p1);
+    simde__m128i psi_a_m1_p3 = simde_mm_adds_epi16(psi_r_m1_p3, psi_i_m1_p3);
+    simde__m128i psi_a_m3_p1 = simde_mm_adds_epi16(psi_r_m3_p1, psi_i_m3_p1);
+    simde__m128i psi_a_m3_p3 = simde_mm_adds_epi16(psi_r_m3_p3, psi_i_m3_p3);
+    simde__m128i psi_a_m1_m1 = simde_mm_adds_epi16(psi_r_m1_m1, psi_i_m1_m1);
+    simde__m128i psi_a_m1_m3 = simde_mm_adds_epi16(psi_r_m1_m3, psi_i_m1_m3);
+    simde__m128i psi_a_m3_m1 = simde_mm_adds_epi16(psi_r_m3_m1, psi_i_m3_m1);
+    simde__m128i psi_a_m3_m3 = simde_mm_adds_epi16(psi_r_m3_m3, psi_i_m3_m3);
 
     // scale by sqrt(2)
-    psi_a_p1_p1 = _mm_mulhi_epi16(psi_a_p1_p1,ONE_OVER_SQRT_2);
-    psi_a_p1_p1 = _mm_slli_epi16(psi_a_p1_p1,1);
-    psi_a_p1_p3 = _mm_mulhi_epi16(psi_a_p1_p3,ONE_OVER_SQRT_2);
-    psi_a_p1_p3 = _mm_slli_epi16(psi_a_p1_p3,1);
-    psi_a_p3_p1 = _mm_mulhi_epi16(psi_a_p3_p1,ONE_OVER_SQRT_2);
-    psi_a_p3_p1 = _mm_slli_epi16(psi_a_p3_p1,1);
-    psi_a_p3_p3 = _mm_mulhi_epi16(psi_a_p3_p3,ONE_OVER_SQRT_2);
-    psi_a_p3_p3 = _mm_slli_epi16(psi_a_p3_p3,1);
-
-    psi_a_p1_m1 = _mm_mulhi_epi16(psi_a_p1_m1,ONE_OVER_SQRT_2);
-    psi_a_p1_m1 = _mm_slli_epi16(psi_a_p1_m1,1);
-    psi_a_p1_m3 = _mm_mulhi_epi16(psi_a_p1_m3,ONE_OVER_SQRT_2);
-    psi_a_p1_m3 = _mm_slli_epi16(psi_a_p1_m3,1);
-    psi_a_p3_m1 = _mm_mulhi_epi16(psi_a_p3_m1,ONE_OVER_SQRT_2);
-    psi_a_p3_m1 = _mm_slli_epi16(psi_a_p3_m1,1);
-    psi_a_p3_m3 = _mm_mulhi_epi16(psi_a_p3_m3,ONE_OVER_SQRT_2);
-    psi_a_p3_m3 = _mm_slli_epi16(psi_a_p3_m3,1);
-
-    psi_a_m1_p1 = _mm_mulhi_epi16(psi_a_m1_p1,ONE_OVER_SQRT_2);
-    psi_a_m1_p1 = _mm_slli_epi16(psi_a_m1_p1,1);
-    psi_a_m1_p3 = _mm_mulhi_epi16(psi_a_m1_p3,ONE_OVER_SQRT_2);
-    psi_a_m1_p3 = _mm_slli_epi16(psi_a_m1_p3,1);
-    psi_a_m3_p1 = _mm_mulhi_epi16(psi_a_m3_p1,ONE_OVER_SQRT_2);
-    psi_a_m3_p1 = _mm_slli_epi16(psi_a_m3_p1,1);
-    psi_a_m3_p3 = _mm_mulhi_epi16(psi_a_m3_p3,ONE_OVER_SQRT_2);
-    psi_a_m3_p3 = _mm_slli_epi16(psi_a_m3_p3,1);
-
-    psi_a_m1_m1 = _mm_mulhi_epi16(psi_a_m1_m1,ONE_OVER_SQRT_2);
-    psi_a_m1_m1 = _mm_slli_epi16(psi_a_m1_m1,1);
-    psi_a_m1_m3 = _mm_mulhi_epi16(psi_a_m1_m3,ONE_OVER_SQRT_2);
-    psi_a_m1_m3 = _mm_slli_epi16(psi_a_m1_m3,1);
-    psi_a_m3_m1 = _mm_mulhi_epi16(psi_a_m3_m1,ONE_OVER_SQRT_2);
-    psi_a_m3_m1 = _mm_slli_epi16(psi_a_m3_m1,1);
-    psi_a_m3_m3 = _mm_mulhi_epi16(psi_a_m3_m3,ONE_OVER_SQRT_2);
-    psi_a_m3_m3 = _mm_slli_epi16(psi_a_m3_m3,1);
+    psi_a_p1_p1 = simde_mm_mulhi_epi16(psi_a_p1_p1,ONE_OVER_SQRT_2);
+    psi_a_p1_p1 = simde_mm_slli_epi16(psi_a_p1_p1,1);
+    psi_a_p1_p3 = simde_mm_mulhi_epi16(psi_a_p1_p3,ONE_OVER_SQRT_2);
+    psi_a_p1_p3 = simde_mm_slli_epi16(psi_a_p1_p3,1);
+    psi_a_p3_p1 = simde_mm_mulhi_epi16(psi_a_p3_p1,ONE_OVER_SQRT_2);
+    psi_a_p3_p1 = simde_mm_slli_epi16(psi_a_p3_p1,1);
+    psi_a_p3_p3 = simde_mm_mulhi_epi16(psi_a_p3_p3,ONE_OVER_SQRT_2);
+    psi_a_p3_p3 = simde_mm_slli_epi16(psi_a_p3_p3,1);
+
+    psi_a_p1_m1 = simde_mm_mulhi_epi16(psi_a_p1_m1,ONE_OVER_SQRT_2);
+    psi_a_p1_m1 = simde_mm_slli_epi16(psi_a_p1_m1,1);
+    psi_a_p1_m3 = simde_mm_mulhi_epi16(psi_a_p1_m3,ONE_OVER_SQRT_2);
+    psi_a_p1_m3 = simde_mm_slli_epi16(psi_a_p1_m3,1);
+    psi_a_p3_m1 = simde_mm_mulhi_epi16(psi_a_p3_m1,ONE_OVER_SQRT_2);
+    psi_a_p3_m1 = simde_mm_slli_epi16(psi_a_p3_m1,1);
+    psi_a_p3_m3 = simde_mm_mulhi_epi16(psi_a_p3_m3,ONE_OVER_SQRT_2);
+    psi_a_p3_m3 = simde_mm_slli_epi16(psi_a_p3_m3,1);
+
+    psi_a_m1_p1 = simde_mm_mulhi_epi16(psi_a_m1_p1,ONE_OVER_SQRT_2);
+    psi_a_m1_p1 = simde_mm_slli_epi16(psi_a_m1_p1,1);
+    psi_a_m1_p3 = simde_mm_mulhi_epi16(psi_a_m1_p3,ONE_OVER_SQRT_2);
+    psi_a_m1_p3 = simde_mm_slli_epi16(psi_a_m1_p3,1);
+    psi_a_m3_p1 = simde_mm_mulhi_epi16(psi_a_m3_p1,ONE_OVER_SQRT_2);
+    psi_a_m3_p1 = simde_mm_slli_epi16(psi_a_m3_p1,1);
+    psi_a_m3_p3 = simde_mm_mulhi_epi16(psi_a_m3_p3,ONE_OVER_SQRT_2);
+    psi_a_m3_p3 = simde_mm_slli_epi16(psi_a_m3_p3,1);
+
+    psi_a_m1_m1 = simde_mm_mulhi_epi16(psi_a_m1_m1,ONE_OVER_SQRT_2);
+    psi_a_m1_m1 = simde_mm_slli_epi16(psi_a_m1_m1,1);
+    psi_a_m1_m3 = simde_mm_mulhi_epi16(psi_a_m1_m3,ONE_OVER_SQRT_2);
+    psi_a_m1_m3 = simde_mm_slli_epi16(psi_a_m1_m3,1);
+    psi_a_m3_m1 = simde_mm_mulhi_epi16(psi_a_m3_m1,ONE_OVER_SQRT_2);
+    psi_a_m3_m1 = simde_mm_slli_epi16(psi_a_m3_m1,1);
+    psi_a_m3_m3 = simde_mm_mulhi_epi16(psi_a_m3_m3,ONE_OVER_SQRT_2);
+    psi_a_m3_m3 = simde_mm_slli_epi16(psi_a_m3_m3,1);
 
     // Computing different multiples of channel norms
-    ch_mag_over_10=_mm_mulhi_epi16(ch_mag_des, ONE_OVER_TWO_SQRT_10);
-    ch_mag_over_2=_mm_mulhi_epi16(ch_mag_des, SQRT_10_OVER_FOUR);
-    ch_mag_over_2=_mm_slli_epi16(ch_mag_over_2, 1);
-    ch_mag_9_over_10=_mm_mulhi_epi16(ch_mag_des, NINE_OVER_TWO_SQRT_10);
-    ch_mag_9_over_10=_mm_slli_epi16(ch_mag_9_over_10, 2);
+    ch_mag_over_10   = simde_mm_mulhi_epi16(ch_mag_des, ONE_OVER_TWO_SQRT_10);
+    ch_mag_over_2    = simde_mm_mulhi_epi16(ch_mag_des, SQRT_10_OVER_FOUR);
+    ch_mag_over_2    = simde_mm_slli_epi16(ch_mag_over_2, 1);
+    ch_mag_9_over_10 = simde_mm_mulhi_epi16(ch_mag_des, NINE_OVER_TWO_SQRT_10);
+    ch_mag_9_over_10 = simde_mm_slli_epi16(ch_mag_9_over_10, 2);
 
     // Computing Metrics
-    xmm1 = _mm_adds_epi16(psi_a_p1_p1, y0_p_1_1);
-    simde__m128i bit_met_p1_p1 = _mm_subs_epi16(xmm1, ch_mag_over_10);
+    xmm1 = simde_mm_adds_epi16(psi_a_p1_p1, y0_p_1_1);
+    simde__m128i bit_met_p1_p1 = simde_mm_subs_epi16(xmm1, ch_mag_over_10);
 
-    xmm1 = _mm_adds_epi16(psi_a_p1_p3, y0_p_1_3);
-    simde__m128i bit_met_p1_p3 = _mm_subs_epi16(xmm1, ch_mag_over_2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p1_p3, y0_p_1_3);
+    simde__m128i bit_met_p1_p3 = simde_mm_subs_epi16(xmm1, ch_mag_over_2);
 
-    xmm1 = _mm_adds_epi16(psi_a_p1_m1, y0_m_1_1);
-    simde__m128i bit_met_p1_m1 = _mm_subs_epi16(xmm1, ch_mag_over_10);
+    xmm1 = simde_mm_adds_epi16(psi_a_p1_m1, y0_m_1_1);
+    simde__m128i bit_met_p1_m1 = simde_mm_subs_epi16(xmm1, ch_mag_over_10);
 
-    xmm1 = _mm_adds_epi16(psi_a_p1_m3, y0_m_1_3);
-    simde__m128i bit_met_p1_m3 = _mm_subs_epi16(xmm1, ch_mag_over_2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p1_m3, y0_m_1_3);
+    simde__m128i bit_met_p1_m3 = simde_mm_subs_epi16(xmm1, ch_mag_over_2);
 
-    xmm1 = _mm_adds_epi16(psi_a_p3_p1, y0_p_3_1);
-    simde__m128i bit_met_p3_p1 = _mm_subs_epi16(xmm1, ch_mag_over_2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p3_p1, y0_p_3_1);
+    simde__m128i bit_met_p3_p1 = simde_mm_subs_epi16(xmm1, ch_mag_over_2);
 
-    xmm1 = _mm_adds_epi16(psi_a_p3_p3, y0_p_3_3);
-    simde__m128i bit_met_p3_p3 = _mm_subs_epi16(xmm1, ch_mag_9_over_10);
+    xmm1 = simde_mm_adds_epi16(psi_a_p3_p3, y0_p_3_3);
+    simde__m128i bit_met_p3_p3 = simde_mm_subs_epi16(xmm1, ch_mag_9_over_10);
 
-    xmm1 = _mm_adds_epi16(psi_a_p3_m1, y0_m_3_1);
-    simde__m128i bit_met_p3_m1 = _mm_subs_epi16(xmm1, ch_mag_over_2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p3_m1, y0_m_3_1);
+    simde__m128i bit_met_p3_m1 = simde_mm_subs_epi16(xmm1, ch_mag_over_2);
 
-    xmm1 = _mm_adds_epi16(psi_a_p3_m3, y0_m_3_3);
-    simde__m128i bit_met_p3_m3 = _mm_subs_epi16(xmm1, ch_mag_9_over_10);
+    xmm1 = simde_mm_adds_epi16(psi_a_p3_m3, y0_m_3_3);
+    simde__m128i bit_met_p3_m3 = simde_mm_subs_epi16(xmm1, ch_mag_9_over_10);
 
-    xmm1 = _mm_subs_epi16(psi_a_m1_p1, y0_m_1_1);
-    simde__m128i bit_met_m1_p1 = _mm_subs_epi16(xmm1, ch_mag_over_10);
+    xmm1 = simde_mm_subs_epi16(psi_a_m1_p1, y0_m_1_1);
+    simde__m128i bit_met_m1_p1 = simde_mm_subs_epi16(xmm1, ch_mag_over_10);
 
-    xmm1 = _mm_subs_epi16(psi_a_m1_p3, y0_m_1_3);
-    simde__m128i bit_met_m1_p3 = _mm_subs_epi16(xmm1, ch_mag_over_2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m1_p3, y0_m_1_3);
+    simde__m128i bit_met_m1_p3 = simde_mm_subs_epi16(xmm1, ch_mag_over_2);
 
-    xmm1 = _mm_subs_epi16(psi_a_m1_m1, y0_p_1_1);
-    simde__m128i bit_met_m1_m1 = _mm_subs_epi16(xmm1, ch_mag_over_10);
+    xmm1 = simde_mm_subs_epi16(psi_a_m1_m1, y0_p_1_1);
+    simde__m128i bit_met_m1_m1 = simde_mm_subs_epi16(xmm1, ch_mag_over_10);
 
-    xmm1 = _mm_subs_epi16(psi_a_m1_m3, y0_p_1_3);
-    simde__m128i bit_met_m1_m3 = _mm_subs_epi16(xmm1, ch_mag_over_2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m1_m3, y0_p_1_3);
+    simde__m128i bit_met_m1_m3 = simde_mm_subs_epi16(xmm1, ch_mag_over_2);
 
-    xmm1 = _mm_subs_epi16(psi_a_m3_p1, y0_m_3_1);
-    simde__m128i bit_met_m3_p1 = _mm_subs_epi16(xmm1, ch_mag_over_2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m3_p1, y0_m_3_1);
+    simde__m128i bit_met_m3_p1 = simde_mm_subs_epi16(xmm1, ch_mag_over_2);
 
-    xmm1 = _mm_subs_epi16(psi_a_m3_p3, y0_m_3_3);
-    simde__m128i bit_met_m3_p3 = _mm_subs_epi16(xmm1, ch_mag_9_over_10);
+    xmm1 = simde_mm_subs_epi16(psi_a_m3_p3, y0_m_3_3);
+    simde__m128i bit_met_m3_p3 = simde_mm_subs_epi16(xmm1, ch_mag_9_over_10);
 
-    xmm1 = _mm_subs_epi16(psi_a_m3_m1, y0_p_3_1);
-    simde__m128i bit_met_m3_m1 = _mm_subs_epi16(xmm1, ch_mag_over_2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m3_m1, y0_p_3_1);
+    simde__m128i bit_met_m3_m1 = simde_mm_subs_epi16(xmm1, ch_mag_over_2);
 
-    xmm1 = _mm_subs_epi16(psi_a_m3_m3, y0_p_3_3);
-    simde__m128i bit_met_m3_m3 = _mm_subs_epi16(xmm1, ch_mag_9_over_10);
+    xmm1 = simde_mm_subs_epi16(psi_a_m3_m3, y0_p_3_3);
+    simde__m128i bit_met_m3_m3 = simde_mm_subs_epi16(xmm1, ch_mag_9_over_10);
 
     // LLR of the first bit
     // Bit = 1
-    xmm0 = _mm_max_epi16(bit_met_m1_p1,bit_met_m1_p3);
-    xmm1 = _mm_max_epi16(bit_met_m1_m1,bit_met_m1_m3);
-    xmm2 = _mm_max_epi16(bit_met_m3_p1,bit_met_m3_p3);
-    xmm3 = _mm_max_epi16(bit_met_m3_m1,bit_met_m3_m3);
-    xmm4 = _mm_max_epi16(xmm0,xmm1);
-    xmm5 = _mm_max_epi16(xmm2,xmm3);
-    simde__m128i logmax_num_re0 = _mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m1_p1, bit_met_m1_p3);
+    xmm1 = simde_mm_max_epi16(bit_met_m1_m1, bit_met_m1_m3);
+    xmm2 = simde_mm_max_epi16(bit_met_m3_p1, bit_met_m3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m3_m1, bit_met_m3_m3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_num_re0 = simde_mm_max_epi16(xmm4, xmm5);
 
     // Bit = 0
-    xmm0 = _mm_max_epi16(bit_met_p1_p1,bit_met_p1_p3);
-    xmm1 = _mm_max_epi16(bit_met_p1_m1,bit_met_p1_m3);
-    xmm2 = _mm_max_epi16(bit_met_p3_p1,bit_met_p3_p3);
-    xmm3 = _mm_max_epi16(bit_met_p3_m1,bit_met_p3_m3);
-    xmm4 = _mm_max_epi16(xmm0,xmm1);
-    xmm5 = _mm_max_epi16(xmm2,xmm3);
-    simde__m128i logmax_den_re0 = _mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p1_p1, bit_met_p1_p3);
+    xmm1 = simde_mm_max_epi16(bit_met_p1_m1, bit_met_p1_m3);
+    xmm2 = simde_mm_max_epi16(bit_met_p3_p1, bit_met_p3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p3_m1, bit_met_p3_m3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_den_re0 = simde_mm_max_epi16(xmm4, xmm5);
 
     // LLR of first bit [L1(1), L1(2), L1(3), L1(4), L1(5), L1(6), L1(7), L1(8)]
-    y0r = _mm_subs_epi16(logmax_den_re0,logmax_num_re0);
+    y0r = simde_mm_subs_epi16(logmax_den_re0,logmax_num_re0);
 
     // LLR of the second bit
     // Bit = 1
-    xmm0 = _mm_max_epi16(bit_met_p1_m1,bit_met_p3_m1);
-    xmm1 = _mm_max_epi16(bit_met_m1_m1,bit_met_m3_m1);
-    xmm2 = _mm_max_epi16(bit_met_p1_m3,bit_met_p3_m3);
-    xmm3 = _mm_max_epi16(bit_met_m1_m3,bit_met_m3_m3);
-    xmm4 = _mm_max_epi16(xmm0,xmm1);
-    xmm5 = _mm_max_epi16(xmm2,xmm3);
-    simde__m128i logmax_num_re1 = _mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p1_m1, bit_met_p3_m1);
+    xmm1 = simde_mm_max_epi16(bit_met_m1_m1, bit_met_m3_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p1_m3, bit_met_p3_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_m1_m3, bit_met_m3_m3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_num_re1 = simde_mm_max_epi16(xmm4, xmm5);
 
     // Bit = 0
-    xmm0 = _mm_max_epi16(bit_met_p1_p1,bit_met_p3_p1);
-    xmm1 = _mm_max_epi16(bit_met_m1_p1,bit_met_m3_p1);
-    xmm2 = _mm_max_epi16(bit_met_p1_p3,bit_met_p3_p3);
-    xmm3 = _mm_max_epi16(bit_met_m1_p3,bit_met_m3_p3);
-    xmm4 = _mm_max_epi16(xmm0,xmm1);
-    xmm5 = _mm_max_epi16(xmm2,xmm3);
-    simde__m128i logmax_den_re1 = _mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p1_p1, bit_met_p3_p1);
+    xmm1 = simde_mm_max_epi16(bit_met_m1_p1, bit_met_m3_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_p1_p3, bit_met_p3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m1_p3, bit_met_m3_p3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_den_re1 = simde_mm_max_epi16(xmm4, xmm5);
 
     // LLR of second bit [L2(1), L2(2), L2(3), L2(4)]
-    y1r = _mm_subs_epi16(logmax_den_re1,logmax_num_re1);
+    y1r = simde_mm_subs_epi16(logmax_den_re1,logmax_num_re1);
 
     // LLR of the third bit
     // Bit = 1
-    xmm0 = _mm_max_epi16(bit_met_m3_p1,bit_met_m3_p3);
-    xmm1 = _mm_max_epi16(bit_met_m3_m1,bit_met_m3_m3);
-    xmm2 = _mm_max_epi16(bit_met_p3_p1,bit_met_p3_p3);
-    xmm3 = _mm_max_epi16(bit_met_p3_m1,bit_met_p3_m3);
-    xmm4 = _mm_max_epi16(xmm0,xmm1);
-    xmm5 = _mm_max_epi16(xmm2,xmm3);
-    simde__m128i logmax_num_im0 = _mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m3_p1, bit_met_m3_p3);
+    xmm1 = simde_mm_max_epi16(bit_met_m3_m1, bit_met_m3_m3);
+    xmm2 = simde_mm_max_epi16(bit_met_p3_p1, bit_met_p3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p3_m1, bit_met_p3_m3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_num_im0 = simde_mm_max_epi16(xmm4, xmm5);
 
     // Bit = 0
-    xmm0 = _mm_max_epi16(bit_met_m1_p1,bit_met_m1_p3);
-    xmm1 = _mm_max_epi16(bit_met_m1_m1,bit_met_m1_m3);
-    xmm2 = _mm_max_epi16(bit_met_p1_p1,bit_met_p1_p3);
-    xmm3 = _mm_max_epi16(bit_met_p1_m1,bit_met_p1_m3);
-    xmm4 = _mm_max_epi16(xmm0,xmm1);
-    xmm5 = _mm_max_epi16(xmm2,xmm3);
-    simde__m128i logmax_den_im0 = _mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m1_p1, bit_met_m1_p3);
+    xmm1 = simde_mm_max_epi16(bit_met_m1_m1, bit_met_m1_m3);
+    xmm2 = simde_mm_max_epi16(bit_met_p1_p1, bit_met_p1_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p1_m1, bit_met_p1_m3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_den_im0 = simde_mm_max_epi16(xmm4, xmm5);
 
     // LLR of third bit [L3(1), L3(2), L3(3), L3(4)]
-    y0i = _mm_subs_epi16(logmax_den_im0,logmax_num_im0);
+    y0i = simde_mm_subs_epi16(logmax_den_im0,logmax_num_im0);
 
     // LLR of the fourth bit
     // Bit = 1
-    xmm0 = _mm_max_epi16(bit_met_p1_m3,bit_met_p3_m3);
-    xmm1 = _mm_max_epi16(bit_met_m1_m3,bit_met_m3_m3);
-    xmm2 = _mm_max_epi16(bit_met_p1_p3,bit_met_p3_p3);
-    xmm3 = _mm_max_epi16(bit_met_m1_p3,bit_met_m3_p3);
-    xmm4 = _mm_max_epi16(xmm0,xmm1);
-    xmm5 = _mm_max_epi16(xmm2,xmm3);
-    simde__m128i logmax_num_im1 = _mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p1_m3, bit_met_p3_m3);
+    xmm1 = simde_mm_max_epi16(bit_met_m1_m3, bit_met_m3_m3);
+    xmm2 = simde_mm_max_epi16(bit_met_p1_p3, bit_met_p3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m1_p3, bit_met_m3_p3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_num_im1 = simde_mm_max_epi16(xmm4, xmm5);
 
     // Bit = 0
-    xmm0 = _mm_max_epi16(bit_met_p1_m1,bit_met_p3_m1);
-    xmm1 = _mm_max_epi16(bit_met_m1_m1,bit_met_m3_m1);
-    xmm2 = _mm_max_epi16(bit_met_p1_p1,bit_met_p3_p1);
-    xmm3 = _mm_max_epi16(bit_met_m1_p1,bit_met_m3_p1);
-    xmm4 = _mm_max_epi16(xmm0,xmm1);
-    xmm5 = _mm_max_epi16(xmm2,xmm3);
-    simde__m128i logmax_den_im1 = _mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p1_m1, bit_met_p3_m1);
+    xmm1 = simde_mm_max_epi16(bit_met_m1_m1, bit_met_m3_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p1_p1, bit_met_p3_p1);
+    xmm3 = simde_mm_max_epi16(bit_met_m1_p1, bit_met_m3_p1);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_den_im1 = simde_mm_max_epi16(xmm4, xmm5);
 
     // LLR of fourth bit [L4(1), L4(2), L4(3), L4(4)]
-    y1i = _mm_subs_epi16(logmax_den_im1,logmax_num_im1);
+    y1i = simde_mm_subs_epi16(logmax_den_im1,logmax_num_im1);
 
     // Pack LLRs in output
     // [L1(1), L2(1), L1(2), L2(2), L1(3), L2(3), L1(4), L2(4)]
-    xmm0 = _mm_unpacklo_epi16(y0r,y1r);
+    xmm0 = simde_mm_unpacklo_epi16(y0r,y1r);
     // [L1(5), L2(5), L1(6), L2(6), L1(7), L2(7), L1(8), L2(8)]
-    xmm1 = _mm_unpackhi_epi16(y0r,y1r);
+    xmm1 = simde_mm_unpackhi_epi16(y0r,y1r);
     // [L3(1), L4(1), L3(2), L4(2), L3(3), L4(3), L3(4), L4(4)]
-    xmm2 = _mm_unpacklo_epi16(y0i,y1i);
+    xmm2 = simde_mm_unpacklo_epi16(y0i,y1i);
     // [L3(5), L4(5), L3(6), L4(6), L3(7), L4(7), L3(8), L4(8)]
-    xmm3 = _mm_unpackhi_epi16(y0i,y1i);
+    xmm3 = simde_mm_unpackhi_epi16(y0i,y1i);
 
-    stream0_128i_out[2*i+0] = _mm_unpacklo_epi32(xmm0,xmm2); // 8LLRs, 2REs
-    stream0_128i_out[2*i+1] = _mm_unpackhi_epi32(xmm0,xmm2);
-    stream0_128i_out[2*i+2] = _mm_unpacklo_epi32(xmm1,xmm3);
-    stream0_128i_out[2*i+3] = _mm_unpackhi_epi32(xmm1,xmm3);
-
-#elif defined(__arm__) || defined(__aarch64__)
-
-#endif
+    stream0_128i_out[2*i+0] = simde_mm_unpacklo_epi32(xmm0,xmm2); // 8LLRs, 2REs
+    stream0_128i_out[2*i+1] = simde_mm_unpackhi_epi32(xmm0,xmm2);
+    stream0_128i_out[2*i+2] = simde_mm_unpacklo_epi32(xmm1,xmm3);
+    stream0_128i_out[2*i+3] = simde_mm_unpackhi_epi32(xmm1,xmm3);
   }
-
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
-
 }
 
 int dlsch_16qam_qpsk_llr(LTE_DL_FRAME_PARMS *frame_parms,
@@ -2276,218 +2033,213 @@ void qam16_qam16(short *stream0_in,
     Output:
     stream0_out: output LLRs for 1st stream
   */
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *rho01_128i       = (__m128i *)rho01;
-  __m128i *stream0_128i_in  = (__m128i *)stream0_in;
-  __m128i *stream1_128i_in  = (__m128i *)stream1_in;
-  __m128i *stream0_128i_out = (__m128i *)stream0_out;
-  __m128i *ch_mag_128i      = (__m128i *)ch_mag;
-  __m128i *ch_mag_128i_i    = (__m128i *)ch_mag_i;
-
-
-
-  __m128i ONE_OVER_SQRT_10 = _mm_set1_epi16(20724); // round(1/sqrt(10)*2^16)
-  __m128i ONE_OVER_SQRT_10_Q15 = _mm_set1_epi16(10362); // round(1/sqrt(10)*2^15)
-  __m128i THREE_OVER_SQRT_10 = _mm_set1_epi16(31086); // round(3/sqrt(10)*2^15)
-  __m128i SQRT_10_OVER_FOUR = _mm_set1_epi16(25905); // round(sqrt(10)/4*2^15)
-  __m128i ONE_OVER_TWO_SQRT_10 = _mm_set1_epi16(10362); // round(1/2/sqrt(10)*2^16)
-  __m128i NINE_OVER_TWO_SQRT_10 = _mm_set1_epi16(23315); // round(9/2/sqrt(10)*2^14)
-  __m128i ch_mag_des,ch_mag_int;
-  __m128i  y0r_over_sqrt10;
-  __m128i  y0i_over_sqrt10;
-  __m128i  y0r_three_over_sqrt10;
-  __m128i  y0i_three_over_sqrt10;
-  __m128i ch_mag_over_10;
-  __m128i ch_mag_over_2;
-  __m128i ch_mag_9_over_10;
-#elif defined(__arm__) || defined(__aarch64__)
-
-#endif
+  simde__m128i *rho01_128i       = (simde__m128i *)rho01;
+  simde__m128i *stream0_128i_in  = (simde__m128i *)stream0_in;
+  simde__m128i *stream1_128i_in  = (simde__m128i *)stream1_in;
+  simde__m128i *stream0_128i_out = (simde__m128i *)stream0_out;
+  simde__m128i *ch_mag_128i      = (simde__m128i *)ch_mag;
+  simde__m128i *ch_mag_128i_i    = (simde__m128i *)ch_mag_i;
+
+
+
+  simde__m128i ONE_OVER_SQRT_10 = simde_mm_set1_epi16(20724); // round(1/sqrt(10)*2^16)
+  simde__m128i ONE_OVER_SQRT_10_Q15 = simde_mm_set1_epi16(10362); // round(1/sqrt(10)*2^15)
+  simde__m128i THREE_OVER_SQRT_10 = simde_mm_set1_epi16(31086); // round(3/sqrt(10)*2^15)
+  simde__m128i SQRT_10_OVER_FOUR = simde_mm_set1_epi16(25905); // round(sqrt(10)/4*2^15)
+  simde__m128i ONE_OVER_TWO_SQRT_10 = simde_mm_set1_epi16(10362); // round(1/2/sqrt(10)*2^16)
+  simde__m128i NINE_OVER_TWO_SQRT_10 = simde_mm_set1_epi16(23315); // round(9/2/sqrt(10)*2^14)
+  simde__m128i ch_mag_des,ch_mag_int;
+  simde__m128i  y0r_over_sqrt10;
+  simde__m128i  y0i_over_sqrt10;
+  simde__m128i  y0r_three_over_sqrt10;
+  simde__m128i  y0i_three_over_sqrt10;
+  simde__m128i ch_mag_over_10;
+  simde__m128i ch_mag_over_2;
+  simde__m128i ch_mag_9_over_10;
 
   int i;
 
   for (i=0; i<length>>2; i+=2) {
     // In one iteration, we deal with 8 REs
 
-#if defined(__x86_64__) || defined(__i386__)
     // Get rho
     simde__m128i xmm0 = rho01_128i[i];
     simde__m128i xmm1 = rho01_128i[i + 1];
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflelo_epi16(xmm0, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflehi_epi16(xmm0, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shuffle_epi32(xmm0, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflelo_epi16(xmm1, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflehi_epi16(xmm1, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shuffle_epi32(xmm1, 0xd8); //_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    simde__m128i xmm2 = _mm_unpacklo_epi64(xmm0, xmm1); // Re(rho)
-    simde__m128i xmm3 = _mm_unpackhi_epi64(xmm0, xmm1); // Im(rho)
-    simde__m128i rho_rpi = _mm_adds_epi16(xmm2, xmm3); // rho = Re(rho) + Im(rho)
-    simde__m128i rho_rmi = _mm_subs_epi16(xmm2, xmm3); // rho* = Re(rho) - Im(rho)
+    simde__m128i xmm2 = simde_mm_unpacklo_epi64(xmm0, xmm1); // Re(rho)
+    simde__m128i xmm3 = simde_mm_unpackhi_epi64(xmm0, xmm1); // Im(rho)
+    simde__m128i rho_rpi = simde_mm_adds_epi16(xmm2, xmm3); // rho = Re(rho) + Im(rho)
+    simde__m128i rho_rmi = simde_mm_subs_epi16(xmm2, xmm3); // rho* = Re(rho) - Im(rho)
 
     // Compute the different rhos
-    simde__m128i rho_rpi_1_1 = _mm_mulhi_epi16(rho_rpi, ONE_OVER_SQRT_10);
-    simde__m128i rho_rmi_1_1 = _mm_mulhi_epi16(rho_rmi, ONE_OVER_SQRT_10);
-    simde__m128i rho_rpi_3_3 = _mm_mulhi_epi16(rho_rpi, THREE_OVER_SQRT_10);
-    simde__m128i rho_rmi_3_3 = _mm_mulhi_epi16(rho_rmi, THREE_OVER_SQRT_10);
-    rho_rpi_3_3 = _mm_slli_epi16(rho_rpi_3_3, 1);
-    rho_rmi_3_3 = _mm_slli_epi16(rho_rmi_3_3, 1);
+    simde__m128i rho_rpi_1_1 = simde_mm_mulhi_epi16(rho_rpi, ONE_OVER_SQRT_10);
+    simde__m128i rho_rmi_1_1 = simde_mm_mulhi_epi16(rho_rmi, ONE_OVER_SQRT_10);
+    simde__m128i rho_rpi_3_3 = simde_mm_mulhi_epi16(rho_rpi, THREE_OVER_SQRT_10);
+    simde__m128i rho_rmi_3_3 = simde_mm_mulhi_epi16(rho_rmi, THREE_OVER_SQRT_10);
+    rho_rpi_3_3 = simde_mm_slli_epi16(rho_rpi_3_3, 1);
+    rho_rmi_3_3 = simde_mm_slli_epi16(rho_rmi_3_3, 1);
 
-    simde__m128i xmm4 = _mm_mulhi_epi16(xmm2, ONE_OVER_SQRT_10); // Re(rho)
-    simde__m128i xmm5 = _mm_mulhi_epi16(xmm3, THREE_OVER_SQRT_10); // Im(rho)
-    xmm5 = _mm_slli_epi16(xmm5,1);
+    simde__m128i xmm4 = simde_mm_mulhi_epi16(xmm2, ONE_OVER_SQRT_10); // Re(rho)
+    simde__m128i xmm5 = simde_mm_mulhi_epi16(xmm3, THREE_OVER_SQRT_10); // Im(rho)
+    xmm5 = simde_mm_slli_epi16(xmm5, 1);
 
-    simde__m128i rho_rpi_1_3 = _mm_adds_epi16(xmm4, xmm5);
-    simde__m128i rho_rmi_1_3 = _mm_subs_epi16(xmm4, xmm5);
+    simde__m128i rho_rpi_1_3 = simde_mm_adds_epi16(xmm4, xmm5);
+    simde__m128i rho_rmi_1_3 = simde_mm_subs_epi16(xmm4, xmm5);
 
-    simde__m128i xmm6 = _mm_mulhi_epi16(xmm2, THREE_OVER_SQRT_10); // Re(rho)
-    simde__m128i xmm7 = _mm_mulhi_epi16(xmm3, ONE_OVER_SQRT_10); // Im(rho)
-    xmm6 = _mm_slli_epi16(xmm6,1);
+    simde__m128i xmm6 = simde_mm_mulhi_epi16(xmm2, THREE_OVER_SQRT_10); // Re(rho)
+    simde__m128i xmm7 = simde_mm_mulhi_epi16(xmm3, ONE_OVER_SQRT_10); // Im(rho)
+    xmm6 = simde_mm_slli_epi16(xmm6, 1);
 
-    simde__m128i rho_rpi_3_1 = _mm_adds_epi16(xmm6, xmm7);
-    simde__m128i rho_rmi_3_1 = _mm_subs_epi16(xmm6, xmm7);
+    simde__m128i rho_rpi_3_1 = simde_mm_adds_epi16(xmm6, xmm7);
+    simde__m128i rho_rmi_3_1 = simde_mm_subs_epi16(xmm6, xmm7);
 
     // Rearrange interfering MF output
     xmm0 = stream1_128i_in[i];
     xmm1 = stream1_128i_in[i+1];
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflelo_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflehi_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shuffle_epi32(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflelo_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflehi_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shuffle_epi32(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    simde__m128i y1r = _mm_unpacklo_epi64(xmm0, xmm1); //[y1r(1),y1r(2),y1r(3),y1r(4)]
-    simde__m128i y1i = _mm_unpackhi_epi64(xmm0, xmm1); //[y1i(1),y1i(2),y1i(3),y1i(4)]
-
-    xmm0 = _mm_setzero_si128(); // ZERO
-    xmm2 = _mm_subs_epi16(rho_rpi_1_1,y1r); // = [Re(rho)+ Im(rho)]/sqrt(10) - y1r
-    simde__m128i psi_r_p1_p1 = _mm_abs_epi16(xmm2); // = |[Re(rho)+ Im(rho)]/sqrt(10) - y1r|
-
-    xmm2= _mm_subs_epi16(rho_rmi_1_1,y1r);
-    simde__m128i psi_r_p1_m1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rmi_1_1,y1i);
-    simde__m128i psi_i_p1_p1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rpi_1_3,y1r);
-    simde__m128i psi_r_p1_p3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rmi_1_3,y1r);
-    simde__m128i psi_r_p1_m3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rmi_3_1,y1i);
-    simde__m128i psi_i_p1_p3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rpi_3_1,y1r);
-    simde__m128i psi_r_p3_p1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rmi_3_1,y1r);
-    simde__m128i psi_r_p3_m1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rmi_1_3,y1i);
-    simde__m128i psi_i_p3_p1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rpi_3_3,y1r);
-    simde__m128i psi_r_p3_p3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rmi_3_3,y1r);
-    simde__m128i psi_r_p3_m3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rmi_3_3,y1i);
-    simde__m128i psi_i_p3_p3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rpi_1_1,y1i);
-    simde__m128i psi_i_m1_p1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rpi_3_1,y1i);
-    simde__m128i psi_i_m1_p3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rpi_1_3,y1i);
-    simde__m128i psi_i_m3_p1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rpi_3_3,y1i);
-    simde__m128i psi_i_m3_p3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(rho_rpi_1_1,y1i);
-    simde__m128i psi_i_p1_m1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(rho_rpi_3_1,y1i);
-    simde__m128i psi_i_p1_m3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(rho_rpi_1_3,y1i);
-    simde__m128i psi_i_p3_m1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(rho_rpi_3_3,y1i);
-    simde__m128i psi_i_p3_m3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(rho_rpi_1_1,y1r);
-    simde__m128i psi_r_m1_m1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(rho_rpi_1_3,y1r);
-    simde__m128i psi_r_m1_m3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(rho_rpi_3_1,y1r);
-    simde__m128i psi_r_m3_m1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(rho_rpi_3_3,y1r);
-    simde__m128i psi_r_m3_m3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(y1r,rho_rmi_1_1);
-    simde__m128i psi_r_m1_p1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(y1r,rho_rmi_1_3);
-    simde__m128i psi_r_m1_p3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(y1i,rho_rmi_1_1);
-    simde__m128i psi_i_m1_m1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(y1i,rho_rmi_3_1);
-    simde__m128i psi_i_m1_m3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(y1r,rho_rmi_3_1);
-    simde__m128i psi_r_m3_p1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(y1r,rho_rmi_3_3);
-    simde__m128i psi_r_m3_p3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(y1i,rho_rmi_1_3);
-    simde__m128i psi_i_m3_m1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(y1i,rho_rmi_3_3);
-    simde__m128i psi_i_m3_m3 = _mm_abs_epi16(xmm2);
+    simde__m128i y1r = simde_mm_unpacklo_epi64(xmm0, xmm1); //[y1r(1),y1r(2),y1r(3),y1r(4)]
+    simde__m128i y1i = simde_mm_unpackhi_epi64(xmm0, xmm1); //[y1i(1),y1i(2),y1i(3),y1i(4)]
+
+    xmm0 = simde_mm_setzero_si128(); // ZERO
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_1, y1r); // = [Re(rho)+ Im(rho)]/sqrt(10) - y1r
+    simde__m128i psi_r_p1_p1 = simde_mm_abs_epi16(xmm2); // = |[Re(rho)+ Im(rho)]/sqrt(10) - y1r|
+
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_1, y1r);
+    simde__m128i psi_r_p1_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_1, y1i);
+    simde__m128i psi_i_p1_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_3, y1r);
+    simde__m128i psi_r_p1_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_3, y1r);
+    simde__m128i psi_r_p1_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_1, y1i);
+    simde__m128i psi_i_p1_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_1, y1r);
+    simde__m128i psi_r_p3_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_1, y1r);
+    simde__m128i psi_r_p3_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_3, y1i);
+    simde__m128i psi_i_p3_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_3, y1r);
+    simde__m128i psi_r_p3_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_3, y1r);
+    simde__m128i psi_r_p3_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_3, y1i);
+    simde__m128i psi_i_p3_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_1, y1i);
+    simde__m128i psi_i_m1_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_1, y1i);
+    simde__m128i psi_i_m1_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_3, y1i);
+    simde__m128i psi_i_m3_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_3, y1i);
+    simde__m128i psi_i_m3_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_1, y1i);
+    simde__m128i psi_i_p1_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_1, y1i);
+    simde__m128i psi_i_p1_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_3, y1i);
+    simde__m128i psi_i_p3_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_3, y1i);
+    simde__m128i psi_i_p3_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_1, y1r);
+    simde__m128i psi_r_m1_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_3, y1r);
+    simde__m128i psi_r_m1_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_1, y1r);
+    simde__m128i psi_r_m3_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_3, y1r);
+    simde__m128i psi_r_m3_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(y1r, rho_rmi_1_1);
+    simde__m128i psi_r_m1_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(y1r, rho_rmi_1_3);
+    simde__m128i psi_r_m1_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(y1i, rho_rmi_1_1);
+    simde__m128i psi_i_m1_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(y1i, rho_rmi_3_1);
+    simde__m128i psi_i_m1_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(y1r, rho_rmi_3_1);
+    simde__m128i psi_r_m3_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(y1r, rho_rmi_3_3);
+    simde__m128i psi_r_m3_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(y1i, rho_rmi_1_3);
+    simde__m128i psi_i_m3_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(y1i, rho_rmi_3_3);
+    simde__m128i psi_i_m3_m3 = simde_mm_abs_epi16(xmm2);
 
     // Rearrange desired MF output
     xmm0 = stream0_128i_in[i];
     xmm1 = stream0_128i_in[i+1];
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflelo_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflehi_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shuffle_epi32(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflelo_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflehi_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shuffle_epi32(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    simde__m128i y0r = _mm_unpacklo_epi64(xmm0, xmm1); // = [y0r(1),y0r(2),y0r(3),y0r(4)]
-    simde__m128i y0i = _mm_unpackhi_epi64(xmm0, xmm1);
+    simde__m128i y0r = simde_mm_unpacklo_epi64(xmm0, xmm1); // = [y0r(1),y0r(2),y0r(3),y0r(4)]
+    simde__m128i y0i = simde_mm_unpackhi_epi64(xmm0, xmm1);
 
     // Rearrange desired channel magnitudes
     xmm2 = ch_mag_128i[i]; // = [|h|^2(1),|h|^2(1),|h|^2(2),|h|^2(2)]*(2/sqrt(10))
     xmm3 = ch_mag_128i[i+1]; // = [|h|^2(3),|h|^2(3),|h|^2(4),|h|^2(4)]*(2/sqrt(10))
-    xmm2 = _mm_shufflelo_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = _mm_shufflehi_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = _mm_shuffle_epi32(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shufflelo_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shufflehi_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shuffle_epi32(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shufflelo_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shufflehi_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shuffle_epi32(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shufflelo_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shufflehi_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shuffle_epi32(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
 
-    ch_mag_des = _mm_unpacklo_epi64(xmm2,xmm3); // = [|h|^2(1),|h|^2(2),|h|^2(3),|h|^2(4)]*(2/sqrt(10))
+    ch_mag_des = simde_mm_unpacklo_epi64(xmm2,xmm3); // = [|h|^2(1),|h|^2(2),|h|^2(3),|h|^2(4)]*(2/sqrt(10))
 
     // Rearrange interfering channel magnitudes
     xmm2 = ch_mag_128i_i[i];
     xmm3 = ch_mag_128i_i[i+1];
 
-    xmm2 = _mm_shufflelo_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = _mm_shufflehi_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = _mm_shuffle_epi32(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shufflelo_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shufflehi_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shuffle_epi32(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shufflelo_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shufflehi_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shuffle_epi32(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shufflelo_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shufflehi_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shuffle_epi32(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
 
-    ch_mag_int  = _mm_unpacklo_epi64(xmm2,xmm3);
+    ch_mag_int  = simde_mm_unpacklo_epi64(xmm2,xmm3);
 
     // Scale MF output of desired signal
-    y0r_over_sqrt10 = _mm_mulhi_epi16(y0r,ONE_OVER_SQRT_10);
-    y0i_over_sqrt10 = _mm_mulhi_epi16(y0i,ONE_OVER_SQRT_10);
-    y0r_three_over_sqrt10 = _mm_mulhi_epi16(y0r,THREE_OVER_SQRT_10);
-    y0i_three_over_sqrt10 = _mm_mulhi_epi16(y0i,THREE_OVER_SQRT_10);
-    y0r_three_over_sqrt10 = _mm_slli_epi16(y0r_three_over_sqrt10,1);
-    y0i_three_over_sqrt10 = _mm_slli_epi16(y0i_three_over_sqrt10,1);
+    y0r_over_sqrt10 = simde_mm_mulhi_epi16(y0r,ONE_OVER_SQRT_10);
+    y0i_over_sqrt10 = simde_mm_mulhi_epi16(y0i,ONE_OVER_SQRT_10);
+    y0r_three_over_sqrt10 = simde_mm_mulhi_epi16(y0r,THREE_OVER_SQRT_10);
+    y0i_three_over_sqrt10 = simde_mm_mulhi_epi16(y0i,THREE_OVER_SQRT_10);
+    y0r_three_over_sqrt10 = simde_mm_slli_epi16(y0r_three_over_sqrt10,1);
+    y0i_three_over_sqrt10 = simde_mm_slli_epi16(y0i_three_over_sqrt10,1);
 
     // Compute necessary combination of required terms
-    simde__m128i y0_p_1_1 = _mm_adds_epi16(y0r_over_sqrt10, y0i_over_sqrt10);
-    simde__m128i y0_m_1_1 = _mm_subs_epi16(y0r_over_sqrt10, y0i_over_sqrt10);
+    simde__m128i y0_p_1_1 = simde_mm_adds_epi16(y0r_over_sqrt10, y0i_over_sqrt10);
+    simde__m128i y0_m_1_1 = simde_mm_subs_epi16(y0r_over_sqrt10, y0i_over_sqrt10);
 
-    simde__m128i y0_p_1_3 = _mm_adds_epi16(y0r_over_sqrt10, y0i_three_over_sqrt10);
-    simde__m128i y0_m_1_3 = _mm_subs_epi16(y0r_over_sqrt10, y0i_three_over_sqrt10);
+    simde__m128i y0_p_1_3 = simde_mm_adds_epi16(y0r_over_sqrt10, y0i_three_over_sqrt10);
+    simde__m128i y0_m_1_3 = simde_mm_subs_epi16(y0r_over_sqrt10, y0i_three_over_sqrt10);
 
-    simde__m128i y0_p_3_1 = _mm_adds_epi16(y0r_three_over_sqrt10, y0i_over_sqrt10);
-    simde__m128i y0_m_3_1 = _mm_subs_epi16(y0r_three_over_sqrt10, y0i_over_sqrt10);
+    simde__m128i y0_p_3_1 = simde_mm_adds_epi16(y0r_three_over_sqrt10, y0i_over_sqrt10);
+    simde__m128i y0_m_3_1 = simde_mm_subs_epi16(y0r_three_over_sqrt10, y0i_over_sqrt10);
 
-    simde__m128i y0_p_3_3 = _mm_adds_epi16(y0r_three_over_sqrt10, y0i_three_over_sqrt10);
-    simde__m128i y0_m_3_3 = _mm_subs_epi16(y0r_three_over_sqrt10, y0i_three_over_sqrt10);
+    simde__m128i y0_p_3_3 = simde_mm_adds_epi16(y0r_three_over_sqrt10, y0i_three_over_sqrt10);
+    simde__m128i y0_m_3_3 = simde_mm_subs_epi16(y0r_three_over_sqrt10, y0i_three_over_sqrt10);
     simde__m128i tmp_result, tmp_result2;
     // Compute optimal interfering symbol magnitude
     interference_abs_epi16(psi_r_p1_p1 ,ch_mag_int,a_r_p1_p1,ONE_OVER_SQRT_10_Q15, THREE_OVER_SQRT_10);
@@ -2561,189 +2313,180 @@ void qam16_qam16(short *stream0_in,
     square_a_epi16(a_r_m3_m3,a_i_m3_m3,ch_mag_int,SQRT_10_OVER_FOUR,a_sq_m3_m3);
 
     // Computing different multiples of channel norms
-    ch_mag_over_10=_mm_mulhi_epi16(ch_mag_des, ONE_OVER_TWO_SQRT_10);
-    ch_mag_over_2=_mm_mulhi_epi16(ch_mag_des, SQRT_10_OVER_FOUR);
-    ch_mag_over_2=_mm_slli_epi16(ch_mag_over_2, 1);
-    ch_mag_9_over_10=_mm_mulhi_epi16(ch_mag_des, NINE_OVER_TWO_SQRT_10);
-    ch_mag_9_over_10=_mm_slli_epi16(ch_mag_9_over_10, 2);
+    ch_mag_over_10   = simde_mm_mulhi_epi16(ch_mag_des, ONE_OVER_TWO_SQRT_10);
+    ch_mag_over_2    = simde_mm_mulhi_epi16(ch_mag_des, SQRT_10_OVER_FOUR);
+    ch_mag_over_2    = simde_mm_slli_epi16(ch_mag_over_2, 1);
+    ch_mag_9_over_10 = simde_mm_mulhi_epi16(ch_mag_des, NINE_OVER_TWO_SQRT_10);
+    ch_mag_9_over_10 = simde_mm_slli_epi16(ch_mag_9_over_10, 2);
 
     // Computing Metrics
-    xmm0 = _mm_subs_epi16(psi_a_p1_p1,a_sq_p1_p1);
-    xmm1 = _mm_adds_epi16(xmm0,y0_p_1_1);
-    simde__m128i bit_met_p1_p1 = _mm_subs_epi16(xmm1, ch_mag_over_10);
+    xmm0 = simde_mm_subs_epi16(psi_a_p1_p1, a_sq_p1_p1);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_1_1);
+    simde__m128i bit_met_p1_p1 = simde_mm_subs_epi16(xmm1, ch_mag_over_10);
 
-    xmm0 = _mm_subs_epi16(psi_a_p1_p3,a_sq_p1_p3);
-    xmm1 = _mm_adds_epi16(xmm0,y0_p_1_3);
-    simde__m128i bit_met_p1_p3 = _mm_subs_epi16(xmm1, ch_mag_over_2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p1_p3, a_sq_p1_p3);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_1_3);
+    simde__m128i bit_met_p1_p3 = simde_mm_subs_epi16(xmm1, ch_mag_over_2);
 
-    xmm0 = _mm_subs_epi16(psi_a_p1_m1,a_sq_p1_m1);
-    xmm1 = _mm_adds_epi16(xmm0,y0_m_1_1);
-    simde__m128i bit_met_p1_m1 = _mm_subs_epi16(xmm1, ch_mag_over_10);
+    xmm0 = simde_mm_subs_epi16(psi_a_p1_m1, a_sq_p1_m1);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_1_1);
+    simde__m128i bit_met_p1_m1 = simde_mm_subs_epi16(xmm1, ch_mag_over_10);
 
-    xmm0 = _mm_subs_epi16(psi_a_p1_m3,a_sq_p1_m3);
-    xmm1 = _mm_adds_epi16(xmm0,y0_m_1_3);
-    simde__m128i bit_met_p1_m3 = _mm_subs_epi16(xmm1, ch_mag_over_2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p1_m3, a_sq_p1_m3);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_1_3);
+    simde__m128i bit_met_p1_m3 = simde_mm_subs_epi16(xmm1, ch_mag_over_2);
 
-    xmm0 = _mm_subs_epi16(psi_a_p3_p1,a_sq_p3_p1);
-    xmm1 = _mm_adds_epi16(xmm0,y0_p_3_1);
-    simde__m128i bit_met_p3_p1 = _mm_subs_epi16(xmm1, ch_mag_over_2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p3_p1, a_sq_p3_p1);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_3_1);
+    simde__m128i bit_met_p3_p1 = simde_mm_subs_epi16(xmm1, ch_mag_over_2);
 
-    xmm0 = _mm_subs_epi16(psi_a_p3_p3,a_sq_p3_p3);
-    xmm1 = _mm_adds_epi16(xmm0,y0_p_3_3);
-    simde__m128i bit_met_p3_p3 = _mm_subs_epi16(xmm1, ch_mag_9_over_10);
+    xmm0 = simde_mm_subs_epi16(psi_a_p3_p3, a_sq_p3_p3);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_3_3);
+    simde__m128i bit_met_p3_p3 = simde_mm_subs_epi16(xmm1, ch_mag_9_over_10);
 
-    xmm0 = _mm_subs_epi16(psi_a_p3_m1,a_sq_p3_m1);
-    xmm1 = _mm_adds_epi16(xmm0,y0_m_3_1);
-    simde__m128i bit_met_p3_m1 = _mm_subs_epi16(xmm1, ch_mag_over_2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p3_m1, a_sq_p3_m1);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_3_1);
+    simde__m128i bit_met_p3_m1 = simde_mm_subs_epi16(xmm1, ch_mag_over_2);
 
-    xmm0 = _mm_subs_epi16(psi_a_p3_m3,a_sq_p3_m3);
-    xmm1 = _mm_adds_epi16(xmm0,y0_m_3_3);
-    simde__m128i bit_met_p3_m3 = _mm_subs_epi16(xmm1, ch_mag_9_over_10);
+    xmm0 = simde_mm_subs_epi16(psi_a_p3_m3, a_sq_p3_m3);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_3_3);
+    simde__m128i bit_met_p3_m3 = simde_mm_subs_epi16(xmm1, ch_mag_9_over_10);
 
-    xmm0 = _mm_subs_epi16(psi_a_m1_p1,a_sq_m1_p1);
-    xmm1 = _mm_subs_epi16(xmm0,y0_m_1_1);
-    simde__m128i bit_met_m1_p1 = _mm_subs_epi16(xmm1, ch_mag_over_10);
+    xmm0 = simde_mm_subs_epi16(psi_a_m1_p1, a_sq_m1_p1);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_1_1);
+    simde__m128i bit_met_m1_p1 = simde_mm_subs_epi16(xmm1, ch_mag_over_10);
 
-    xmm0 = _mm_subs_epi16(psi_a_m1_p3,a_sq_m1_p3);
-    xmm1 = _mm_subs_epi16(xmm0,y0_m_1_3);
-    simde__m128i bit_met_m1_p3 = _mm_subs_epi16(xmm1, ch_mag_over_2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m1_p3, a_sq_m1_p3);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_1_3);
+    simde__m128i bit_met_m1_p3 = simde_mm_subs_epi16(xmm1, ch_mag_over_2);
 
-    xmm0 = _mm_subs_epi16(psi_a_m1_m1,a_sq_m1_m1);
-    xmm1 = _mm_subs_epi16(xmm0,y0_p_1_1);
-    simde__m128i bit_met_m1_m1 = _mm_subs_epi16(xmm1, ch_mag_over_10);
+    xmm0 = simde_mm_subs_epi16(psi_a_m1_m1, a_sq_m1_m1);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_1_1);
+    simde__m128i bit_met_m1_m1 = simde_mm_subs_epi16(xmm1, ch_mag_over_10);
 
-    xmm0 = _mm_subs_epi16(psi_a_m1_m3,a_sq_m1_m3);
-    xmm1 = _mm_subs_epi16(xmm0,y0_p_1_3);
-    simde__m128i bit_met_m1_m3 = _mm_subs_epi16(xmm1, ch_mag_over_2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m1_m3, a_sq_m1_m3);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_1_3);
+    simde__m128i bit_met_m1_m3 = simde_mm_subs_epi16(xmm1, ch_mag_over_2);
 
-    xmm0 = _mm_subs_epi16(psi_a_m3_p1,a_sq_m3_p1);
-    xmm1 = _mm_subs_epi16(xmm0,y0_m_3_1);
-    simde__m128i bit_met_m3_p1 = _mm_subs_epi16(xmm1, ch_mag_over_2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m3_p1, a_sq_m3_p1);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_3_1);
+    simde__m128i bit_met_m3_p1 = simde_mm_subs_epi16(xmm1, ch_mag_over_2);
 
-    xmm0 = _mm_subs_epi16(psi_a_m3_p3,a_sq_m3_p3);
-    xmm1 = _mm_subs_epi16(xmm0,y0_m_3_3);
-    simde__m128i bit_met_m3_p3 = _mm_subs_epi16(xmm1, ch_mag_9_over_10);
+    xmm0 = simde_mm_subs_epi16(psi_a_m3_p3, a_sq_m3_p3);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_3_3);
+    simde__m128i bit_met_m3_p3 = simde_mm_subs_epi16(xmm1, ch_mag_9_over_10);
 
-    xmm0 = _mm_subs_epi16(psi_a_m3_m1,a_sq_m3_m1);
-    xmm1 = _mm_subs_epi16(xmm0,y0_p_3_1);
-    simde__m128i bit_met_m3_m1 = _mm_subs_epi16(xmm1, ch_mag_over_2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m3_m1, a_sq_m3_m1);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_3_1);
+    simde__m128i bit_met_m3_m1 = simde_mm_subs_epi16(xmm1, ch_mag_over_2);
 
-    xmm0 = _mm_subs_epi16(psi_a_m3_m3,a_sq_m3_m3);
-    xmm1 = _mm_subs_epi16(xmm0,y0_p_3_3);
-    simde__m128i bit_met_m3_m3 = _mm_subs_epi16(xmm1, ch_mag_9_over_10);
+    xmm0 = simde_mm_subs_epi16(psi_a_m3_m3, a_sq_m3_m3);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_3_3);
+    simde__m128i bit_met_m3_m3 = simde_mm_subs_epi16(xmm1, ch_mag_9_over_10);
 
     // LLR of the first bit
     // Bit = 1
-    xmm0 = _mm_max_epi16(bit_met_m1_p1,bit_met_m1_p3);
-    xmm1 = _mm_max_epi16(bit_met_m1_m1,bit_met_m1_m3);
-    xmm2 = _mm_max_epi16(bit_met_m3_p1,bit_met_m3_p3);
-    xmm3 = _mm_max_epi16(bit_met_m3_m1,bit_met_m3_m3);
-    xmm4 = _mm_max_epi16(xmm0,xmm1);
-    xmm5 = _mm_max_epi16(xmm2,xmm3);
-    simde__m128i logmax_num_re0 = _mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m1_p1, bit_met_m1_p3);
+    xmm1 = simde_mm_max_epi16(bit_met_m1_m1, bit_met_m1_m3);
+    xmm2 = simde_mm_max_epi16(bit_met_m3_p1, bit_met_m3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m3_m1, bit_met_m3_m3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_num_re0 = simde_mm_max_epi16(xmm4, xmm5);
 
     // Bit = 0
-    xmm0 = _mm_max_epi16(bit_met_p1_p1,bit_met_p1_p3);
-    xmm1 = _mm_max_epi16(bit_met_p1_m1,bit_met_p1_m3);
-    xmm2 = _mm_max_epi16(bit_met_p3_p1,bit_met_p3_p3);
-    xmm3 = _mm_max_epi16(bit_met_p3_m1,bit_met_p3_m3);
-    xmm4 = _mm_max_epi16(xmm0,xmm1);
-    xmm5 = _mm_max_epi16(xmm2,xmm3);
-    simde__m128i logmax_den_re0 = _mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p1_p1, bit_met_p1_p3);
+    xmm1 = simde_mm_max_epi16(bit_met_p1_m1, bit_met_p1_m3);
+    xmm2 = simde_mm_max_epi16(bit_met_p3_p1, bit_met_p3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p3_m1, bit_met_p3_m3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_den_re0 = simde_mm_max_epi16(xmm4, xmm5);
 
     // LLR of first bit [L1(1), L1(2), L1(3), L1(4), L1(5), L1(6), L1(7), L1(8)]
-    y0r = _mm_subs_epi16(logmax_den_re0,logmax_num_re0);
+    y0r = simde_mm_subs_epi16(logmax_den_re0,logmax_num_re0);
 
     // LLR of the second bit
     // Bit = 1
-    xmm0 = _mm_max_epi16(bit_met_p1_m1,bit_met_p3_m1);
-    xmm1 = _mm_max_epi16(bit_met_m1_m1,bit_met_m3_m1);
-    xmm2 = _mm_max_epi16(bit_met_p1_m3,bit_met_p3_m3);
-    xmm3 = _mm_max_epi16(bit_met_m1_m3,bit_met_m3_m3);
-    xmm4 = _mm_max_epi16(xmm0,xmm1);
-    xmm5 = _mm_max_epi16(xmm2,xmm3);
-    simde__m128i logmax_num_re1 = _mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p1_m1, bit_met_p3_m1);
+    xmm1 = simde_mm_max_epi16(bit_met_m1_m1, bit_met_m3_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p1_m3, bit_met_p3_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_m1_m3, bit_met_m3_m3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_num_re1 = simde_mm_max_epi16(xmm4, xmm5);
 
     // Bit = 0
-    xmm0 = _mm_max_epi16(bit_met_p1_p1,bit_met_p3_p1);
-    xmm1 = _mm_max_epi16(bit_met_m1_p1,bit_met_m3_p1);
-    xmm2 = _mm_max_epi16(bit_met_p1_p3,bit_met_p3_p3);
-    xmm3 = _mm_max_epi16(bit_met_m1_p3,bit_met_m3_p3);
-    xmm4 = _mm_max_epi16(xmm0,xmm1);
-    xmm5 = _mm_max_epi16(xmm2,xmm3);
-    simde__m128i logmax_den_re1 = _mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p1_p1, bit_met_p3_p1);
+    xmm1 = simde_mm_max_epi16(bit_met_m1_p1, bit_met_m3_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_p1_p3, bit_met_p3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m1_p3, bit_met_m3_p3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_den_re1 = simde_mm_max_epi16(xmm4, xmm5);
 
     // LLR of second bit [L2(1), L2(2), L2(3), L2(4)]
-    y1r = _mm_subs_epi16(logmax_den_re1,logmax_num_re1);
+    y1r = simde_mm_subs_epi16(logmax_den_re1,logmax_num_re1);
 
     // LLR of the third bit
     // Bit = 1
-    xmm0 = _mm_max_epi16(bit_met_m3_p1,bit_met_m3_p3);
-    xmm1 = _mm_max_epi16(bit_met_m3_m1,bit_met_m3_m3);
-    xmm2 = _mm_max_epi16(bit_met_p3_p1,bit_met_p3_p3);
-    xmm3 = _mm_max_epi16(bit_met_p3_m1,bit_met_p3_m3);
-    xmm4 = _mm_max_epi16(xmm0,xmm1);
-    xmm5 = _mm_max_epi16(xmm2,xmm3);
-    simde__m128i logmax_num_im0 = _mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m3_p1, bit_met_m3_p3);
+    xmm1 = simde_mm_max_epi16(bit_met_m3_m1, bit_met_m3_m3);
+    xmm2 = simde_mm_max_epi16(bit_met_p3_p1, bit_met_p3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p3_m1, bit_met_p3_m3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_num_im0 = simde_mm_max_epi16(xmm4, xmm5);
 
     // Bit = 0
-    xmm0 = _mm_max_epi16(bit_met_m1_p1,bit_met_m1_p3);
-    xmm1 = _mm_max_epi16(bit_met_m1_m1,bit_met_m1_m3);
-    xmm2 = _mm_max_epi16(bit_met_p1_p1,bit_met_p1_p3);
-    xmm3 = _mm_max_epi16(bit_met_p1_m1,bit_met_p1_m3);
-    xmm4 = _mm_max_epi16(xmm0,xmm1);
-    xmm5 = _mm_max_epi16(xmm2,xmm3);
-    simde__m128i logmax_den_im0 = _mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m1_p1, bit_met_m1_p3);
+    xmm1 = simde_mm_max_epi16(bit_met_m1_m1, bit_met_m1_m3);
+    xmm2 = simde_mm_max_epi16(bit_met_p1_p1, bit_met_p1_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p1_m1, bit_met_p1_m3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_den_im0 = simde_mm_max_epi16(xmm4, xmm5);
 
     // LLR of third bit [L3(1), L3(2), L3(3), L3(4)]
-    y0i = _mm_subs_epi16(logmax_den_im0,logmax_num_im0);
+    y0i = simde_mm_subs_epi16(logmax_den_im0,logmax_num_im0);
 
     // LLR of the fourth bit
     // Bit = 1
-    xmm0 = _mm_max_epi16(bit_met_p1_m3,bit_met_p3_m3);
-    xmm1 = _mm_max_epi16(bit_met_m1_m3,bit_met_m3_m3);
-    xmm2 = _mm_max_epi16(bit_met_p1_p3,bit_met_p3_p3);
-    xmm3 = _mm_max_epi16(bit_met_m1_p3,bit_met_m3_p3);
-    xmm4 = _mm_max_epi16(xmm0,xmm1);
-    xmm5 = _mm_max_epi16(xmm2,xmm3);
-    simde__m128i logmax_num_im1 = _mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p1_m3, bit_met_p3_m3);
+    xmm1 = simde_mm_max_epi16(bit_met_m1_m3, bit_met_m3_m3);
+    xmm2 = simde_mm_max_epi16(bit_met_p1_p3, bit_met_p3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m1_p3, bit_met_m3_p3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_num_im1 = simde_mm_max_epi16(xmm4, xmm5);
 
     // Bit = 0
-    xmm0 = _mm_max_epi16(bit_met_p1_m1,bit_met_p3_m1);
-    xmm1 = _mm_max_epi16(bit_met_m1_m1,bit_met_m3_m1);
-    xmm2 = _mm_max_epi16(bit_met_p1_p1,bit_met_p3_p1);
-    xmm3 = _mm_max_epi16(bit_met_m1_p1,bit_met_m3_p1);
-    xmm4 = _mm_max_epi16(xmm0,xmm1);
-    xmm5 = _mm_max_epi16(xmm2,xmm3);
-    simde__m128i logmax_den_im1 = _mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p1_m1, bit_met_p3_m1);
+    xmm1 = simde_mm_max_epi16(bit_met_m1_m1, bit_met_m3_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p1_p1, bit_met_p3_p1);
+    xmm3 = simde_mm_max_epi16(bit_met_m1_p1, bit_met_m3_p1);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_den_im1 = simde_mm_max_epi16(xmm4, xmm5);
 
     // LLR of fourth bit [L4(1), L4(2), L4(3), L4(4)]
-    y1i = _mm_subs_epi16(logmax_den_im1,logmax_num_im1);
+    y1i = simde_mm_subs_epi16(logmax_den_im1,logmax_num_im1);
 
     // Pack LLRs in output
     // [L1(1), L2(1), L1(2), L2(2), L1(3), L2(3), L1(4), L2(4)]
-    xmm0 = _mm_unpacklo_epi16(y0r,y1r);
+    xmm0 = simde_mm_unpacklo_epi16(y0r,y1r);
     // [L1(5), L2(5), L1(6), L2(6), L1(7), L2(7), L1(8), L2(8)]
-    xmm1 = _mm_unpackhi_epi16(y0r,y1r);
+    xmm1 = simde_mm_unpackhi_epi16(y0r,y1r);
     // [L3(1), L4(1), L3(2), L4(2), L3(3), L4(3), L3(4), L4(4)]
-    xmm2 = _mm_unpacklo_epi16(y0i,y1i);
+    xmm2 = simde_mm_unpacklo_epi16(y0i,y1i);
     // [L3(5), L4(5), L3(6), L4(6), L3(7), L4(7), L3(8), L4(8)]
-    xmm3 = _mm_unpackhi_epi16(y0i,y1i);
-
-    stream0_128i_out[2*i+0] = _mm_unpacklo_epi32(xmm0,xmm2); // 8LLRs, 2REs
-    stream0_128i_out[2*i+1] = _mm_unpackhi_epi32(xmm0,xmm2);
-    stream0_128i_out[2*i+2] = _mm_unpacklo_epi32(xmm1,xmm3);
-    stream0_128i_out[2*i+3] = _mm_unpackhi_epi32(xmm1,xmm3);
-#elif defined(__arm__) || defined(__aarch64__)
-
-#endif
+    xmm3 = simde_mm_unpackhi_epi16(y0i,y1i);
 
+    stream0_128i_out[2*i+0] = simde_mm_unpacklo_epi32(xmm0,xmm2); // 8LLRs, 2REs
+    stream0_128i_out[2*i+1] = simde_mm_unpackhi_epi32(xmm0,xmm2);
+    stream0_128i_out[2*i+2] = simde_mm_unpacklo_epi32(xmm1,xmm3);
+    stream0_128i_out[2*i+3] = simde_mm_unpackhi_epi32(xmm1,xmm3);
   }
-
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
 }
 
 int dlsch_16qam_16qam_llr(LTE_DL_FRAME_PARMS *frame_parms,
@@ -2833,231 +2576,226 @@ void qam16_qam64(int16_t *stream0_in,
     stream0_out: output LLRs for 1st stream
   */
 
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *rho01_128i       = (__m128i *)rho01;
-  __m128i *stream0_128i_in  = (__m128i *)stream0_in;
-  __m128i *stream1_128i_in  = (__m128i *)stream1_in;
-  __m128i *stream0_128i_out = (__m128i *)stream0_out;
-  __m128i *ch_mag_128i      = (__m128i *)ch_mag;
-  __m128i *ch_mag_128i_i    = (__m128i *)ch_mag_i;
-
-
-  __m128i ONE_OVER_SQRT_2 = _mm_set1_epi16(23170); // round(1/sqrt(2)*2^15)
-  __m128i ONE_OVER_SQRT_10 = _mm_set1_epi16(20724); // round(1/sqrt(10)*2^16)
-  __m128i THREE_OVER_SQRT_10 = _mm_set1_epi16(31086); // round(3/sqrt(10)*2^15)
-  __m128i SQRT_10_OVER_FOUR = _mm_set1_epi16(25905); // round(sqrt(10)/4*2^15)
-  __m128i ONE_OVER_TWO_SQRT_10 = _mm_set1_epi16(10362); // round(1/2/sqrt(10)*2^16)
-  __m128i NINE_OVER_TWO_SQRT_10 = _mm_set1_epi16(23315); // round(9/2/sqrt(10)*2^14)
-  __m128i ONE_OVER_SQRT_2_42 = _mm_set1_epi16(3575); // round(1/sqrt(2*42)*2^15)
-  __m128i THREE_OVER_SQRT_2_42 = _mm_set1_epi16(10726); // round(3/sqrt(2*42)*2^15)
-  __m128i FIVE_OVER_SQRT_2_42 = _mm_set1_epi16(17876); // round(5/sqrt(2*42)*2^15)
-  __m128i SEVEN_OVER_SQRT_2_42 = _mm_set1_epi16(25027); // round(7/sqrt(2*42)*2^15)
-  __m128i SQRT_42_OVER_FOUR = _mm_set1_epi16(13272); // round(sqrt(42)/4*2^13), Q3.
-  __m128i ch_mag_des,ch_mag_int;
-  __m128i  y0r_over_sqrt10;
-  __m128i  y0i_over_sqrt10;
-  __m128i  y0r_three_over_sqrt10;
-  __m128i  y0i_three_over_sqrt10;
-  __m128i ch_mag_over_10;
-  __m128i ch_mag_over_2;
-  __m128i ch_mag_9_over_10;
-  __m128i ch_mag_int_with_sigma2;
-  __m128i two_ch_mag_int_with_sigma2;
-  __m128i three_ch_mag_int_with_sigma2;
-
-#elif defined(__arm__) || defined(__aarch64__)
+  simde__m128i *rho01_128i       = (simde__m128i *)rho01;
+  simde__m128i *stream0_128i_in  = (simde__m128i *)stream0_in;
+  simde__m128i *stream1_128i_in  = (simde__m128i *)stream1_in;
+  simde__m128i *stream0_128i_out = (simde__m128i *)stream0_out;
+  simde__m128i *ch_mag_128i      = (simde__m128i *)ch_mag;
+  simde__m128i *ch_mag_128i_i    = (simde__m128i *)ch_mag_i;
+
+
+  simde__m128i ONE_OVER_SQRT_2 = simde_mm_set1_epi16(23170); // round(1/sqrt(2)*2^15)
+  simde__m128i ONE_OVER_SQRT_10 = simde_mm_set1_epi16(20724); // round(1/sqrt(10)*2^16)
+  simde__m128i THREE_OVER_SQRT_10 = simde_mm_set1_epi16(31086); // round(3/sqrt(10)*2^15)
+  simde__m128i SQRT_10_OVER_FOUR = simde_mm_set1_epi16(25905); // round(sqrt(10)/4*2^15)
+  simde__m128i ONE_OVER_TWO_SQRT_10 = simde_mm_set1_epi16(10362); // round(1/2/sqrt(10)*2^16)
+  simde__m128i NINE_OVER_TWO_SQRT_10 = simde_mm_set1_epi16(23315); // round(9/2/sqrt(10)*2^14)
+  simde__m128i ONE_OVER_SQRT_2_42 = simde_mm_set1_epi16(3575); // round(1/sqrt(2*42)*2^15)
+  simde__m128i THREE_OVER_SQRT_2_42 = simde_mm_set1_epi16(10726); // round(3/sqrt(2*42)*2^15)
+  simde__m128i FIVE_OVER_SQRT_2_42 = simde_mm_set1_epi16(17876); // round(5/sqrt(2*42)*2^15)
+  simde__m128i SEVEN_OVER_SQRT_2_42 = simde_mm_set1_epi16(25027); // round(7/sqrt(2*42)*2^15)
+  simde__m128i SQRT_42_OVER_FOUR = simde_mm_set1_epi16(13272); // round(sqrt(42)/4*2^13), Q3.
+  simde__m128i ch_mag_des,ch_mag_int;
+  simde__m128i  y0r_over_sqrt10;
+  simde__m128i  y0i_over_sqrt10;
+  simde__m128i  y0r_three_over_sqrt10;
+  simde__m128i  y0i_three_over_sqrt10;
+  simde__m128i ch_mag_over_10;
+  simde__m128i ch_mag_over_2;
+  simde__m128i ch_mag_9_over_10;
+  simde__m128i ch_mag_int_with_sigma2;
+  simde__m128i two_ch_mag_int_with_sigma2;
+  simde__m128i three_ch_mag_int_with_sigma2;
 
-#endif
   int i;
 
   for (i=0; i<length>>2; i+=2) {
     // In one iteration, we deal with 8 REs
 
-#if defined(__x86_64__) || defined(__i386__)
     // Get rho
     simde__m128i xmm0 = rho01_128i[i];
     simde__m128i xmm1 = rho01_128i[i + 1];
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflelo_epi16(xmm0, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflehi_epi16(xmm0, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shuffle_epi32(xmm0, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflelo_epi16(xmm1, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflehi_epi16(xmm1, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shuffle_epi32(xmm1, 0xd8); //_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    simde__m128i xmm2 = _mm_unpacklo_epi64(xmm0, xmm1); // Re(rho)
-    simde__m128i xmm3 = _mm_unpackhi_epi64(xmm0, xmm1); // Im(rho)
-    simde__m128i rho_rpi = _mm_adds_epi16(xmm2, xmm3); // rho = Re(rho) + Im(rho)
-    simde__m128i rho_rmi = _mm_subs_epi16(xmm2, xmm3); // rho* = Re(rho) - Im(rho)
+    simde__m128i xmm2 = simde_mm_unpacklo_epi64(xmm0, xmm1); // Re(rho)
+    simde__m128i xmm3 = simde_mm_unpackhi_epi64(xmm0, xmm1); // Im(rho)
+    simde__m128i rho_rpi = simde_mm_adds_epi16(xmm2, xmm3); // rho = Re(rho) + Im(rho)
+    simde__m128i rho_rmi = simde_mm_subs_epi16(xmm2, xmm3); // rho* = Re(rho) - Im(rho)
 
     // Compute the different rhos
-    simde__m128i rho_rpi_1_1 = _mm_mulhi_epi16(rho_rpi, ONE_OVER_SQRT_10);
-    simde__m128i rho_rmi_1_1 = _mm_mulhi_epi16(rho_rmi, ONE_OVER_SQRT_10);
-    simde__m128i rho_rpi_3_3 = _mm_mulhi_epi16(rho_rpi, THREE_OVER_SQRT_10);
-    simde__m128i rho_rmi_3_3 = _mm_mulhi_epi16(rho_rmi, THREE_OVER_SQRT_10);
-    rho_rpi_3_3 = _mm_slli_epi16(rho_rpi_3_3,1);
-    rho_rmi_3_3 = _mm_slli_epi16(rho_rmi_3_3,1);
+    simde__m128i rho_rpi_1_1 = simde_mm_mulhi_epi16(rho_rpi, ONE_OVER_SQRT_10);
+    simde__m128i rho_rmi_1_1 = simde_mm_mulhi_epi16(rho_rmi, ONE_OVER_SQRT_10);
+    simde__m128i rho_rpi_3_3 = simde_mm_mulhi_epi16(rho_rpi, THREE_OVER_SQRT_10);
+    simde__m128i rho_rmi_3_3 = simde_mm_mulhi_epi16(rho_rmi, THREE_OVER_SQRT_10);
+    rho_rpi_3_3 = simde_mm_slli_epi16(rho_rpi_3_3, 1);
+    rho_rmi_3_3 = simde_mm_slli_epi16(rho_rmi_3_3, 1);
 
-    simde__m128i xmm4 = _mm_mulhi_epi16(xmm2, ONE_OVER_SQRT_10); // Re(rho)
-    simde__m128i xmm5 = _mm_mulhi_epi16(xmm3, THREE_OVER_SQRT_10); // Im(rho)
-    xmm5 = _mm_slli_epi16(xmm5,1);
+    simde__m128i xmm4 = simde_mm_mulhi_epi16(xmm2, ONE_OVER_SQRT_10); // Re(rho)
+    simde__m128i xmm5 = simde_mm_mulhi_epi16(xmm3, THREE_OVER_SQRT_10); // Im(rho)
+    xmm5 = simde_mm_slli_epi16(xmm5, 1);
 
-    simde__m128i rho_rpi_1_3 = _mm_adds_epi16(xmm4, xmm5);
-    simde__m128i rho_rmi_1_3 = _mm_subs_epi16(xmm4, xmm5);
+    simde__m128i rho_rpi_1_3 = simde_mm_adds_epi16(xmm4, xmm5);
+    simde__m128i rho_rmi_1_3 = simde_mm_subs_epi16(xmm4, xmm5);
 
-    simde__m128i xmm6 = _mm_mulhi_epi16(xmm2, THREE_OVER_SQRT_10); // Re(rho)
-    simde__m128i xmm7 = _mm_mulhi_epi16(xmm3, ONE_OVER_SQRT_10); // Im(rho)
-    xmm6 = _mm_slli_epi16(xmm6,1);
+    simde__m128i xmm6 = simde_mm_mulhi_epi16(xmm2, THREE_OVER_SQRT_10); // Re(rho)
+    simde__m128i xmm7 = simde_mm_mulhi_epi16(xmm3, ONE_OVER_SQRT_10); // Im(rho)
+    xmm6 = simde_mm_slli_epi16(xmm6, 1);
 
-    simde__m128i rho_rpi_3_1 = _mm_adds_epi16(xmm6, xmm7);
-    simde__m128i rho_rmi_3_1 = _mm_subs_epi16(xmm6, xmm7);
+    simde__m128i rho_rpi_3_1 = simde_mm_adds_epi16(xmm6, xmm7);
+    simde__m128i rho_rmi_3_1 = simde_mm_subs_epi16(xmm6, xmm7);
 
     // Rearrange interfering MF output
     xmm0 = stream1_128i_in[i];
     xmm1 = stream1_128i_in[i+1];
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflelo_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflehi_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shuffle_epi32(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflelo_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflehi_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shuffle_epi32(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    simde__m128i y1r = _mm_unpacklo_epi64(xmm0, xmm1); //[y1r(1),y1r(2),y1r(3),y1r(4)]
-    simde__m128i y1i = _mm_unpackhi_epi64(xmm0, xmm1); //[y1i(1),y1i(2),y1i(3),y1i(4)]
-
-    xmm0 = _mm_setzero_si128(); // ZERO
-    xmm2 = _mm_subs_epi16(rho_rpi_1_1,y1r); // = [Re(rho)+ Im(rho)]/sqrt(10) - y1r
-    simde__m128i psi_r_p1_p1 = _mm_abs_epi16(xmm2); // = |[Re(rho)+ Im(rho)]/sqrt(10) - y1r|
-
-    xmm2= _mm_subs_epi16(rho_rmi_1_1,y1r);
-    simde__m128i psi_r_p1_m1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rmi_1_1,y1i);
-    simde__m128i psi_i_p1_p1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rpi_1_3,y1r);
-    simde__m128i psi_r_p1_p3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rmi_1_3,y1r);
-    simde__m128i psi_r_p1_m3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rmi_3_1,y1i);
-    simde__m128i psi_i_p1_p3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rpi_3_1,y1r);
-    simde__m128i psi_r_p3_p1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rmi_3_1,y1r);
-    simde__m128i psi_r_p3_m1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rmi_1_3,y1i);
-    simde__m128i psi_i_p3_p1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rpi_3_3,y1r);
-    simde__m128i psi_r_p3_p3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rmi_3_3,y1r);
-    simde__m128i psi_r_p3_m3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rmi_3_3,y1i);
-    simde__m128i psi_i_p3_p3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rpi_1_1,y1i);
-    simde__m128i psi_i_m1_p1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rpi_3_1,y1i);
-    simde__m128i psi_i_m1_p3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rpi_1_3,y1i);
-    simde__m128i psi_i_m3_p1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_subs_epi16(rho_rpi_3_3,y1i);
-    simde__m128i psi_i_m3_p3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(rho_rpi_1_1,y1i);
-    simde__m128i psi_i_p1_m1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(rho_rpi_3_1,y1i);
-    simde__m128i psi_i_p1_m3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(rho_rpi_1_3,y1i);
-    simde__m128i psi_i_p3_m1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(rho_rpi_3_3,y1i);
-    simde__m128i psi_i_p3_m3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(rho_rpi_1_1,y1r);
-    simde__m128i psi_r_m1_m1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(rho_rpi_1_3,y1r);
-    simde__m128i psi_r_m1_m3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(rho_rpi_3_1,y1r);
-    simde__m128i psi_r_m3_m1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(rho_rpi_3_3,y1r);
-    simde__m128i psi_r_m3_m3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(y1r,rho_rmi_1_1);
-    simde__m128i psi_r_m1_p1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(y1r,rho_rmi_1_3);
-    simde__m128i psi_r_m1_p3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(y1i,rho_rmi_1_1);
-    simde__m128i psi_i_m1_m1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(y1i,rho_rmi_3_1);
-    simde__m128i psi_i_m1_m3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(y1r,rho_rmi_3_1);
-    simde__m128i psi_r_m3_p1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(y1r,rho_rmi_3_3);
-    simde__m128i psi_r_m3_p3 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(y1i,rho_rmi_1_3);
-    simde__m128i psi_i_m3_m1 = _mm_abs_epi16(xmm2);
-    xmm2= _mm_adds_epi16(y1i,rho_rmi_3_3);
-    simde__m128i psi_i_m3_m3 = _mm_abs_epi16(xmm2);
+    simde__m128i y1r = simde_mm_unpacklo_epi64(xmm0, xmm1); //[y1r(1),y1r(2),y1r(3),y1r(4)]
+    simde__m128i y1i = simde_mm_unpackhi_epi64(xmm0, xmm1); //[y1i(1),y1i(2),y1i(3),y1i(4)]
+
+    xmm0 = simde_mm_setzero_si128(); // ZERO
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_1, y1r); // = [Re(rho)+ Im(rho)]/sqrt(10) - y1r
+    simde__m128i psi_r_p1_p1 = simde_mm_abs_epi16(xmm2); // = |[Re(rho)+ Im(rho)]/sqrt(10) - y1r|
+
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_1, y1r);
+    simde__m128i psi_r_p1_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_1, y1i);
+    simde__m128i psi_i_p1_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_3, y1r);
+    simde__m128i psi_r_p1_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_3, y1r);
+    simde__m128i psi_r_p1_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_1, y1i);
+    simde__m128i psi_i_p1_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_1, y1r);
+    simde__m128i psi_r_p3_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_1, y1r);
+    simde__m128i psi_r_p3_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_3, y1i);
+    simde__m128i psi_i_p3_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_3, y1r);
+    simde__m128i psi_r_p3_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_3, y1r);
+    simde__m128i psi_r_p3_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_3, y1i);
+    simde__m128i psi_i_p3_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_1, y1i);
+    simde__m128i psi_i_m1_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_1, y1i);
+    simde__m128i psi_i_m1_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_3, y1i);
+    simde__m128i psi_i_m3_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_3, y1i);
+    simde__m128i psi_i_m3_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_1, y1i);
+    simde__m128i psi_i_p1_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_1, y1i);
+    simde__m128i psi_i_p1_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_3, y1i);
+    simde__m128i psi_i_p3_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_3, y1i);
+    simde__m128i psi_i_p3_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_1, y1r);
+    simde__m128i psi_r_m1_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_3, y1r);
+    simde__m128i psi_r_m1_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_1, y1r);
+    simde__m128i psi_r_m3_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_3, y1r);
+    simde__m128i psi_r_m3_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(y1r, rho_rmi_1_1);
+    simde__m128i psi_r_m1_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(y1r, rho_rmi_1_3);
+    simde__m128i psi_r_m1_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(y1i, rho_rmi_1_1);
+    simde__m128i psi_i_m1_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(y1i, rho_rmi_3_1);
+    simde__m128i psi_i_m1_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(y1r, rho_rmi_3_1);
+    simde__m128i psi_r_m3_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(y1r, rho_rmi_3_3);
+    simde__m128i psi_r_m3_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(y1i, rho_rmi_1_3);
+    simde__m128i psi_i_m3_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(y1i, rho_rmi_3_3);
+    simde__m128i psi_i_m3_m3 = simde_mm_abs_epi16(xmm2);
 
     // Rearrange desired MF output
     xmm0 = stream0_128i_in[i];
     xmm1 = stream0_128i_in[i+1];
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflelo_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflehi_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shuffle_epi32(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflelo_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflehi_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shuffle_epi32(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    simde__m128i y0r = _mm_unpacklo_epi64(xmm0, xmm1); // = [y0r(1),y0r(2),y0r(3),y0r(4)]
-    simde__m128i y0i = _mm_unpackhi_epi64(xmm0, xmm1);
+    simde__m128i y0r = simde_mm_unpacklo_epi64(xmm0, xmm1); // = [y0r(1),y0r(2),y0r(3),y0r(4)]
+    simde__m128i y0i = simde_mm_unpackhi_epi64(xmm0, xmm1);
 
     // Rearrange desired channel magnitudes
     xmm2 = ch_mag_128i[i]; // = [|h|^2(1),|h|^2(1),|h|^2(2),|h|^2(2)]*(2/sqrt(10))
     xmm3 = ch_mag_128i[i+1]; // = [|h|^2(3),|h|^2(3),|h|^2(4),|h|^2(4)]*(2/sqrt(10))
-    xmm2 = _mm_shufflelo_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = _mm_shufflehi_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = _mm_shuffle_epi32(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shufflelo_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shufflehi_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shuffle_epi32(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shufflelo_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shufflehi_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shuffle_epi32(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shufflelo_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shufflehi_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shuffle_epi32(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
 
-    ch_mag_des = _mm_unpacklo_epi64(xmm2,xmm3); // = [|h|^2(1),|h|^2(2),|h|^2(3),|h|^2(4)]*(2/sqrt(10))
+    ch_mag_des = simde_mm_unpacklo_epi64(xmm2,xmm3); // = [|h|^2(1),|h|^2(2),|h|^2(3),|h|^2(4)]*(2/sqrt(10))
 
     // Rearrange interfering channel magnitudes
     xmm2 = ch_mag_128i_i[i];
     xmm3 = ch_mag_128i_i[i+1];
 
-    xmm2 = _mm_shufflelo_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = _mm_shufflehi_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = _mm_shuffle_epi32(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shufflelo_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shufflehi_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shuffle_epi32(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shufflelo_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shufflehi_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shuffle_epi32(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shufflelo_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shufflehi_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shuffle_epi32(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
 
-    ch_mag_int  = _mm_unpacklo_epi64(xmm2,xmm3);
+    ch_mag_int  = simde_mm_unpacklo_epi64(xmm2,xmm3);
 
     // Scale MF output of desired signal
-    y0r_over_sqrt10 = _mm_mulhi_epi16(y0r,ONE_OVER_SQRT_10);
-    y0i_over_sqrt10 = _mm_mulhi_epi16(y0i,ONE_OVER_SQRT_10);
-    y0r_three_over_sqrt10 = _mm_mulhi_epi16(y0r,THREE_OVER_SQRT_10);
-    y0i_three_over_sqrt10 = _mm_mulhi_epi16(y0i,THREE_OVER_SQRT_10);
-    y0r_three_over_sqrt10 = _mm_slli_epi16(y0r_three_over_sqrt10,1);
-    y0i_three_over_sqrt10 = _mm_slli_epi16(y0i_three_over_sqrt10,1);
+    y0r_over_sqrt10 = simde_mm_mulhi_epi16(y0r,ONE_OVER_SQRT_10);
+    y0i_over_sqrt10 = simde_mm_mulhi_epi16(y0i,ONE_OVER_SQRT_10);
+    y0r_three_over_sqrt10 = simde_mm_mulhi_epi16(y0r,THREE_OVER_SQRT_10);
+    y0i_three_over_sqrt10 = simde_mm_mulhi_epi16(y0i,THREE_OVER_SQRT_10);
+    y0r_three_over_sqrt10 = simde_mm_slli_epi16(y0r_three_over_sqrt10,1);
+    y0i_three_over_sqrt10 = simde_mm_slli_epi16(y0i_three_over_sqrt10,1);
 
     // Compute necessary combination of required terms
-    simde__m128i y0_p_1_1 = _mm_adds_epi16(y0r_over_sqrt10, y0i_over_sqrt10);
-    simde__m128i y0_m_1_1 = _mm_subs_epi16(y0r_over_sqrt10, y0i_over_sqrt10);
+    simde__m128i y0_p_1_1 = simde_mm_adds_epi16(y0r_over_sqrt10, y0i_over_sqrt10);
+    simde__m128i y0_m_1_1 = simde_mm_subs_epi16(y0r_over_sqrt10, y0i_over_sqrt10);
 
-    simde__m128i y0_p_1_3 = _mm_adds_epi16(y0r_over_sqrt10, y0i_three_over_sqrt10);
-    simde__m128i y0_m_1_3 = _mm_subs_epi16(y0r_over_sqrt10, y0i_three_over_sqrt10);
+    simde__m128i y0_p_1_3 = simde_mm_adds_epi16(y0r_over_sqrt10, y0i_three_over_sqrt10);
+    simde__m128i y0_m_1_3 = simde_mm_subs_epi16(y0r_over_sqrt10, y0i_three_over_sqrt10);
 
-    simde__m128i y0_p_3_1 = _mm_adds_epi16(y0r_three_over_sqrt10, y0i_over_sqrt10);
-    simde__m128i y0_m_3_1 = _mm_subs_epi16(y0r_three_over_sqrt10, y0i_over_sqrt10);
+    simde__m128i y0_p_3_1 = simde_mm_adds_epi16(y0r_three_over_sqrt10, y0i_over_sqrt10);
+    simde__m128i y0_m_3_1 = simde_mm_subs_epi16(y0r_three_over_sqrt10, y0i_over_sqrt10);
 
-    simde__m128i y0_p_3_3 = _mm_adds_epi16(y0r_three_over_sqrt10, y0i_three_over_sqrt10);
-    simde__m128i y0_m_3_3 = _mm_subs_epi16(y0r_three_over_sqrt10, y0i_three_over_sqrt10);
+    simde__m128i y0_p_3_3 = simde_mm_adds_epi16(y0r_three_over_sqrt10, y0i_three_over_sqrt10);
+    simde__m128i y0_m_3_3 = simde_mm_subs_epi16(y0r_three_over_sqrt10, y0i_three_over_sqrt10);
 
     // Compute optimal interfering symbol magnitude
-    ch_mag_int_with_sigma2       = _mm_srai_epi16(ch_mag_int, 1); // *2
+    ch_mag_int_with_sigma2       = simde_mm_srai_epi16(ch_mag_int, 1); // *2
     two_ch_mag_int_with_sigma2   = ch_mag_int; // *4
     simde__m128i tmp_result, tmp_result2, tmp_result3, tmp_result4;
-    three_ch_mag_int_with_sigma2 = _mm_adds_epi16(ch_mag_int_with_sigma2, two_ch_mag_int_with_sigma2); // *6
+    three_ch_mag_int_with_sigma2 = simde_mm_adds_epi16(ch_mag_int_with_sigma2, two_ch_mag_int_with_sigma2); // *6
 
     interference_abs_64qam_epi16(psi_r_p1_p1 ,ch_mag_int_with_sigma2, two_ch_mag_int_with_sigma2, three_ch_mag_int_with_sigma2, a_r_p1_p1,ONE_OVER_SQRT_2_42, THREE_OVER_SQRT_2_42,FIVE_OVER_SQRT_2_42,
                                  SEVEN_OVER_SQRT_2_42);
@@ -3143,38 +2881,38 @@ void qam16_qam64(int16_t *stream0_in,
     prodsum_psi_a_epi16(psi_r_m3_m3,a_r_m3_m3,psi_i_m3_m3,a_i_m3_m3,psi_a_m3_m3);
 
     // Multiply by sqrt(2)
-    psi_a_p1_p1 = _mm_mulhi_epi16(psi_a_p1_p1, ONE_OVER_SQRT_2);
-    psi_a_p1_p1 = _mm_slli_epi16(psi_a_p1_p1, 2);
-    psi_a_p1_p3 = _mm_mulhi_epi16(psi_a_p1_p3, ONE_OVER_SQRT_2);
-    psi_a_p1_p3 = _mm_slli_epi16(psi_a_p1_p3, 2);
-    psi_a_p3_p1 = _mm_mulhi_epi16(psi_a_p3_p1, ONE_OVER_SQRT_2);
-    psi_a_p3_p1 = _mm_slli_epi16(psi_a_p3_p1, 2);
-    psi_a_p3_p3 = _mm_mulhi_epi16(psi_a_p3_p3, ONE_OVER_SQRT_2);
-    psi_a_p3_p3 = _mm_slli_epi16(psi_a_p3_p3, 2);
-    psi_a_p1_m1 = _mm_mulhi_epi16(psi_a_p1_m1, ONE_OVER_SQRT_2);
-    psi_a_p1_m1 = _mm_slli_epi16(psi_a_p1_m1, 2);
-    psi_a_p1_m3 = _mm_mulhi_epi16(psi_a_p1_m3, ONE_OVER_SQRT_2);
-    psi_a_p1_m3 = _mm_slli_epi16(psi_a_p1_m3, 2);
-    psi_a_p3_m1 = _mm_mulhi_epi16(psi_a_p3_m1, ONE_OVER_SQRT_2);
-    psi_a_p3_m1 = _mm_slli_epi16(psi_a_p3_m1, 2);
-    psi_a_p3_m3 = _mm_mulhi_epi16(psi_a_p3_m3, ONE_OVER_SQRT_2);
-    psi_a_p3_m3 = _mm_slli_epi16(psi_a_p3_m3, 2);
-    psi_a_m1_p1 = _mm_mulhi_epi16(psi_a_m1_p1, ONE_OVER_SQRT_2);
-    psi_a_m1_p1 = _mm_slli_epi16(psi_a_m1_p1, 2);
-    psi_a_m1_p3 = _mm_mulhi_epi16(psi_a_m1_p3, ONE_OVER_SQRT_2);
-    psi_a_m1_p3 = _mm_slli_epi16(psi_a_m1_p3, 2);
-    psi_a_m3_p1 = _mm_mulhi_epi16(psi_a_m3_p1, ONE_OVER_SQRT_2);
-    psi_a_m3_p1 = _mm_slli_epi16(psi_a_m3_p1, 2);
-    psi_a_m3_p3 = _mm_mulhi_epi16(psi_a_m3_p3, ONE_OVER_SQRT_2);
-    psi_a_m3_p3 = _mm_slli_epi16(psi_a_m3_p3, 2);
-    psi_a_m1_m1 = _mm_mulhi_epi16(psi_a_m1_m1, ONE_OVER_SQRT_2);
-    psi_a_m1_m1 = _mm_slli_epi16(psi_a_m1_m1, 2);
-    psi_a_m1_m3 = _mm_mulhi_epi16(psi_a_m1_m3, ONE_OVER_SQRT_2);
-    psi_a_m1_m3 = _mm_slli_epi16(psi_a_m1_m3, 2);
-    psi_a_m3_m1 = _mm_mulhi_epi16(psi_a_m3_m1, ONE_OVER_SQRT_2);
-    psi_a_m3_m1 = _mm_slli_epi16(psi_a_m3_m1, 2);
-    psi_a_m3_m3 = _mm_mulhi_epi16(psi_a_m3_m3, ONE_OVER_SQRT_2);
-    psi_a_m3_m3 = _mm_slli_epi16(psi_a_m3_m3, 2);
+    psi_a_p1_p1 = simde_mm_mulhi_epi16(psi_a_p1_p1, ONE_OVER_SQRT_2);
+    psi_a_p1_p1 = simde_mm_slli_epi16(psi_a_p1_p1, 2);
+    psi_a_p1_p3 = simde_mm_mulhi_epi16(psi_a_p1_p3, ONE_OVER_SQRT_2);
+    psi_a_p1_p3 = simde_mm_slli_epi16(psi_a_p1_p3, 2);
+    psi_a_p3_p1 = simde_mm_mulhi_epi16(psi_a_p3_p1, ONE_OVER_SQRT_2);
+    psi_a_p3_p1 = simde_mm_slli_epi16(psi_a_p3_p1, 2);
+    psi_a_p3_p3 = simde_mm_mulhi_epi16(psi_a_p3_p3, ONE_OVER_SQRT_2);
+    psi_a_p3_p3 = simde_mm_slli_epi16(psi_a_p3_p3, 2);
+    psi_a_p1_m1 = simde_mm_mulhi_epi16(psi_a_p1_m1, ONE_OVER_SQRT_2);
+    psi_a_p1_m1 = simde_mm_slli_epi16(psi_a_p1_m1, 2);
+    psi_a_p1_m3 = simde_mm_mulhi_epi16(psi_a_p1_m3, ONE_OVER_SQRT_2);
+    psi_a_p1_m3 = simde_mm_slli_epi16(psi_a_p1_m3, 2);
+    psi_a_p3_m1 = simde_mm_mulhi_epi16(psi_a_p3_m1, ONE_OVER_SQRT_2);
+    psi_a_p3_m1 = simde_mm_slli_epi16(psi_a_p3_m1, 2);
+    psi_a_p3_m3 = simde_mm_mulhi_epi16(psi_a_p3_m3, ONE_OVER_SQRT_2);
+    psi_a_p3_m3 = simde_mm_slli_epi16(psi_a_p3_m3, 2);
+    psi_a_m1_p1 = simde_mm_mulhi_epi16(psi_a_m1_p1, ONE_OVER_SQRT_2);
+    psi_a_m1_p1 = simde_mm_slli_epi16(psi_a_m1_p1, 2);
+    psi_a_m1_p3 = simde_mm_mulhi_epi16(psi_a_m1_p3, ONE_OVER_SQRT_2);
+    psi_a_m1_p3 = simde_mm_slli_epi16(psi_a_m1_p3, 2);
+    psi_a_m3_p1 = simde_mm_mulhi_epi16(psi_a_m3_p1, ONE_OVER_SQRT_2);
+    psi_a_m3_p1 = simde_mm_slli_epi16(psi_a_m3_p1, 2);
+    psi_a_m3_p3 = simde_mm_mulhi_epi16(psi_a_m3_p3, ONE_OVER_SQRT_2);
+    psi_a_m3_p3 = simde_mm_slli_epi16(psi_a_m3_p3, 2);
+    psi_a_m1_m1 = simde_mm_mulhi_epi16(psi_a_m1_m1, ONE_OVER_SQRT_2);
+    psi_a_m1_m1 = simde_mm_slli_epi16(psi_a_m1_m1, 2);
+    psi_a_m1_m3 = simde_mm_mulhi_epi16(psi_a_m1_m3, ONE_OVER_SQRT_2);
+    psi_a_m1_m3 = simde_mm_slli_epi16(psi_a_m1_m3, 2);
+    psi_a_m3_m1 = simde_mm_mulhi_epi16(psi_a_m3_m1, ONE_OVER_SQRT_2);
+    psi_a_m3_m1 = simde_mm_slli_epi16(psi_a_m3_m1, 2);
+    psi_a_m3_m3 = simde_mm_mulhi_epi16(psi_a_m3_m3, ONE_OVER_SQRT_2);
+    psi_a_m3_m3 = simde_mm_slli_epi16(psi_a_m3_m3, 2);
 
     // squared interference magnitude times int. ch. power
     square_a_64qam_epi16(a_r_p1_p1,a_i_p1_p1,ch_mag_int,SQRT_42_OVER_FOUR,a_sq_p1_p1);
@@ -3195,189 +2933,180 @@ void qam16_qam64(int16_t *stream0_in,
     square_a_64qam_epi16(a_r_m3_m3,a_i_m3_m3,ch_mag_int,SQRT_42_OVER_FOUR,a_sq_m3_m3);
 
     // Computing different multiples of channel norms
-    ch_mag_over_10=_mm_mulhi_epi16(ch_mag_des, ONE_OVER_TWO_SQRT_10);
-    ch_mag_over_2=_mm_mulhi_epi16(ch_mag_des, SQRT_10_OVER_FOUR);
-    ch_mag_over_2=_mm_slli_epi16(ch_mag_over_2, 1);
-    ch_mag_9_over_10=_mm_mulhi_epi16(ch_mag_des, NINE_OVER_TWO_SQRT_10);
-    ch_mag_9_over_10=_mm_slli_epi16(ch_mag_9_over_10, 2);
+    ch_mag_over_10   = simde_mm_mulhi_epi16(ch_mag_des, ONE_OVER_TWO_SQRT_10);
+    ch_mag_over_2    = simde_mm_mulhi_epi16(ch_mag_des, SQRT_10_OVER_FOUR);
+    ch_mag_over_2    = simde_mm_slli_epi16(ch_mag_over_2, 1);
+    ch_mag_9_over_10 = simde_mm_mulhi_epi16(ch_mag_des, NINE_OVER_TWO_SQRT_10);
+    ch_mag_9_over_10 = simde_mm_slli_epi16(ch_mag_9_over_10, 2);
 
     // Computing Metrics
-    xmm0 = _mm_subs_epi16(psi_a_p1_p1,a_sq_p1_p1);
-    xmm1 = _mm_adds_epi16(xmm0,y0_p_1_1);
-    simde__m128i bit_met_p1_p1 = _mm_subs_epi16(xmm1, ch_mag_over_10);
+    xmm0 = simde_mm_subs_epi16(psi_a_p1_p1, a_sq_p1_p1);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_1_1);
+    simde__m128i bit_met_p1_p1 = simde_mm_subs_epi16(xmm1, ch_mag_over_10);
 
-    xmm0 = _mm_subs_epi16(psi_a_p1_p3,a_sq_p1_p3);
-    xmm1 = _mm_adds_epi16(xmm0,y0_p_1_3);
-    simde__m128i bit_met_p1_p3 = _mm_subs_epi16(xmm1, ch_mag_over_2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p1_p3, a_sq_p1_p3);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_1_3);
+    simde__m128i bit_met_p1_p3 = simde_mm_subs_epi16(xmm1, ch_mag_over_2);
 
-    xmm0 = _mm_subs_epi16(psi_a_p1_m1,a_sq_p1_m1);
-    xmm1 = _mm_adds_epi16(xmm0,y0_m_1_1);
-    simde__m128i bit_met_p1_m1 = _mm_subs_epi16(xmm1, ch_mag_over_10);
+    xmm0 = simde_mm_subs_epi16(psi_a_p1_m1, a_sq_p1_m1);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_1_1);
+    simde__m128i bit_met_p1_m1 = simde_mm_subs_epi16(xmm1, ch_mag_over_10);
 
-    xmm0 = _mm_subs_epi16(psi_a_p1_m3,a_sq_p1_m3);
-    xmm1 = _mm_adds_epi16(xmm0,y0_m_1_3);
-    simde__m128i bit_met_p1_m3 = _mm_subs_epi16(xmm1, ch_mag_over_2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p1_m3, a_sq_p1_m3);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_1_3);
+    simde__m128i bit_met_p1_m3 = simde_mm_subs_epi16(xmm1, ch_mag_over_2);
 
-    xmm0 = _mm_subs_epi16(psi_a_p3_p1,a_sq_p3_p1);
-    xmm1 = _mm_adds_epi16(xmm0,y0_p_3_1);
-    simde__m128i bit_met_p3_p1 = _mm_subs_epi16(xmm1, ch_mag_over_2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p3_p1, a_sq_p3_p1);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_3_1);
+    simde__m128i bit_met_p3_p1 = simde_mm_subs_epi16(xmm1, ch_mag_over_2);
 
-    xmm0 = _mm_subs_epi16(psi_a_p3_p3,a_sq_p3_p3);
-    xmm1 = _mm_adds_epi16(xmm0,y0_p_3_3);
-    simde__m128i bit_met_p3_p3 = _mm_subs_epi16(xmm1, ch_mag_9_over_10);
+    xmm0 = simde_mm_subs_epi16(psi_a_p3_p3, a_sq_p3_p3);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_3_3);
+    simde__m128i bit_met_p3_p3 = simde_mm_subs_epi16(xmm1, ch_mag_9_over_10);
 
-    xmm0 = _mm_subs_epi16(psi_a_p3_m1,a_sq_p3_m1);
-    xmm1 = _mm_adds_epi16(xmm0,y0_m_3_1);
-    simde__m128i bit_met_p3_m1 = _mm_subs_epi16(xmm1, ch_mag_over_2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p3_m1, a_sq_p3_m1);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_3_1);
+    simde__m128i bit_met_p3_m1 = simde_mm_subs_epi16(xmm1, ch_mag_over_2);
 
-    xmm0 = _mm_subs_epi16(psi_a_p3_m3,a_sq_p3_m3);
-    xmm1 = _mm_adds_epi16(xmm0,y0_m_3_3);
-    simde__m128i bit_met_p3_m3 = _mm_subs_epi16(xmm1, ch_mag_9_over_10);
+    xmm0 = simde_mm_subs_epi16(psi_a_p3_m3, a_sq_p3_m3);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_3_3);
+    simde__m128i bit_met_p3_m3 = simde_mm_subs_epi16(xmm1, ch_mag_9_over_10);
 
-    xmm0 = _mm_subs_epi16(psi_a_m1_p1,a_sq_m1_p1);
-    xmm1 = _mm_subs_epi16(xmm0,y0_m_1_1);
-    simde__m128i bit_met_m1_p1 = _mm_subs_epi16(xmm1, ch_mag_over_10);
+    xmm0 = simde_mm_subs_epi16(psi_a_m1_p1, a_sq_m1_p1);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_1_1);
+    simde__m128i bit_met_m1_p1 = simde_mm_subs_epi16(xmm1, ch_mag_over_10);
 
-    xmm0 = _mm_subs_epi16(psi_a_m1_p3,a_sq_m1_p3);
-    xmm1 = _mm_subs_epi16(xmm0,y0_m_1_3);
-    simde__m128i bit_met_m1_p3 = _mm_subs_epi16(xmm1, ch_mag_over_2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m1_p3, a_sq_m1_p3);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_1_3);
+    simde__m128i bit_met_m1_p3 = simde_mm_subs_epi16(xmm1, ch_mag_over_2);
 
-    xmm0 = _mm_subs_epi16(psi_a_m1_m1,a_sq_m1_m1);
-    xmm1 = _mm_subs_epi16(xmm0,y0_p_1_1);
-    simde__m128i bit_met_m1_m1 = _mm_subs_epi16(xmm1, ch_mag_over_10);
+    xmm0 = simde_mm_subs_epi16(psi_a_m1_m1, a_sq_m1_m1);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_1_1);
+    simde__m128i bit_met_m1_m1 = simde_mm_subs_epi16(xmm1, ch_mag_over_10);
 
-    xmm0 = _mm_subs_epi16(psi_a_m1_m3,a_sq_m1_m3);
-    xmm1 = _mm_subs_epi16(xmm0,y0_p_1_3);
-    simde__m128i bit_met_m1_m3 = _mm_subs_epi16(xmm1, ch_mag_over_2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m1_m3, a_sq_m1_m3);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_1_3);
+    simde__m128i bit_met_m1_m3 = simde_mm_subs_epi16(xmm1, ch_mag_over_2);
 
-    xmm0 = _mm_subs_epi16(psi_a_m3_p1,a_sq_m3_p1);
-    xmm1 = _mm_subs_epi16(xmm0,y0_m_3_1);
-    simde__m128i bit_met_m3_p1 = _mm_subs_epi16(xmm1, ch_mag_over_2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m3_p1, a_sq_m3_p1);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_3_1);
+    simde__m128i bit_met_m3_p1 = simde_mm_subs_epi16(xmm1, ch_mag_over_2);
 
-    xmm0 = _mm_subs_epi16(psi_a_m3_p3,a_sq_m3_p3);
-    xmm1 = _mm_subs_epi16(xmm0,y0_m_3_3);
-    simde__m128i bit_met_m3_p3 = _mm_subs_epi16(xmm1, ch_mag_9_over_10);
+    xmm0 = simde_mm_subs_epi16(psi_a_m3_p3, a_sq_m3_p3);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_3_3);
+    simde__m128i bit_met_m3_p3 = simde_mm_subs_epi16(xmm1, ch_mag_9_over_10);
 
-    xmm0 = _mm_subs_epi16(psi_a_m3_m1,a_sq_m3_m1);
-    xmm1 = _mm_subs_epi16(xmm0,y0_p_3_1);
-    simde__m128i bit_met_m3_m1 = _mm_subs_epi16(xmm1, ch_mag_over_2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m3_m1, a_sq_m3_m1);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_3_1);
+    simde__m128i bit_met_m3_m1 = simde_mm_subs_epi16(xmm1, ch_mag_over_2);
 
-    xmm0 = _mm_subs_epi16(psi_a_m3_m3,a_sq_m3_m3);
-    xmm1 = _mm_subs_epi16(xmm0,y0_p_3_3);
-    simde__m128i bit_met_m3_m3 = _mm_subs_epi16(xmm1, ch_mag_9_over_10);
+    xmm0 = simde_mm_subs_epi16(psi_a_m3_m3, a_sq_m3_m3);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_3_3);
+    simde__m128i bit_met_m3_m3 = simde_mm_subs_epi16(xmm1, ch_mag_9_over_10);
 
     // LLR of the first bit
     // Bit = 1
-    xmm0 = _mm_max_epi16(bit_met_m1_p1,bit_met_m1_p3);
-    xmm1 = _mm_max_epi16(bit_met_m1_m1,bit_met_m1_m3);
-    xmm2 = _mm_max_epi16(bit_met_m3_p1,bit_met_m3_p3);
-    xmm3 = _mm_max_epi16(bit_met_m3_m1,bit_met_m3_m3);
-    xmm4 = _mm_max_epi16(xmm0,xmm1);
-    xmm5 = _mm_max_epi16(xmm2,xmm3);
-    simde__m128i logmax_num_re0 = _mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m1_p1, bit_met_m1_p3);
+    xmm1 = simde_mm_max_epi16(bit_met_m1_m1, bit_met_m1_m3);
+    xmm2 = simde_mm_max_epi16(bit_met_m3_p1, bit_met_m3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m3_m1, bit_met_m3_m3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_num_re0 = simde_mm_max_epi16(xmm4, xmm5);
 
     // Bit = 0
-    xmm0 = _mm_max_epi16(bit_met_p1_p1,bit_met_p1_p3);
-    xmm1 = _mm_max_epi16(bit_met_p1_m1,bit_met_p1_m3);
-    xmm2 = _mm_max_epi16(bit_met_p3_p1,bit_met_p3_p3);
-    xmm3 = _mm_max_epi16(bit_met_p3_m1,bit_met_p3_m3);
-    xmm4 = _mm_max_epi16(xmm0,xmm1);
-    xmm5 = _mm_max_epi16(xmm2,xmm3);
-    simde__m128i logmax_den_re0 = _mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p1_p1, bit_met_p1_p3);
+    xmm1 = simde_mm_max_epi16(bit_met_p1_m1, bit_met_p1_m3);
+    xmm2 = simde_mm_max_epi16(bit_met_p3_p1, bit_met_p3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p3_m1, bit_met_p3_m3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_den_re0 = simde_mm_max_epi16(xmm4, xmm5);
 
     // LLR of first bit [L1(1), L1(2), L1(3), L1(4), L1(5), L1(6), L1(7), L1(8)]
-    y0r = _mm_subs_epi16(logmax_den_re0,logmax_num_re0);
+    y0r = simde_mm_subs_epi16(logmax_den_re0,logmax_num_re0);
 
     // LLR of the second bit
     // Bit = 1
-    xmm0 = _mm_max_epi16(bit_met_p1_m1,bit_met_p3_m1);
-    xmm1 = _mm_max_epi16(bit_met_m1_m1,bit_met_m3_m1);
-    xmm2 = _mm_max_epi16(bit_met_p1_m3,bit_met_p3_m3);
-    xmm3 = _mm_max_epi16(bit_met_m1_m3,bit_met_m3_m3);
-    xmm4 = _mm_max_epi16(xmm0,xmm1);
-    xmm5 = _mm_max_epi16(xmm2,xmm3);
-    simde__m128i logmax_num_re1 = _mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p1_m1, bit_met_p3_m1);
+    xmm1 = simde_mm_max_epi16(bit_met_m1_m1, bit_met_m3_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p1_m3, bit_met_p3_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_m1_m3, bit_met_m3_m3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_num_re1 = simde_mm_max_epi16(xmm4, xmm5);
 
     // Bit = 0
-    xmm0 = _mm_max_epi16(bit_met_p1_p1,bit_met_p3_p1);
-    xmm1 = _mm_max_epi16(bit_met_m1_p1,bit_met_m3_p1);
-    xmm2 = _mm_max_epi16(bit_met_p1_p3,bit_met_p3_p3);
-    xmm3 = _mm_max_epi16(bit_met_m1_p3,bit_met_m3_p3);
-    xmm4 = _mm_max_epi16(xmm0,xmm1);
-    xmm5 = _mm_max_epi16(xmm2,xmm3);
-    simde__m128i logmax_den_re1 = _mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p1_p1, bit_met_p3_p1);
+    xmm1 = simde_mm_max_epi16(bit_met_m1_p1, bit_met_m3_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_p1_p3, bit_met_p3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m1_p3, bit_met_m3_p3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_den_re1 = simde_mm_max_epi16(xmm4, xmm5);
 
     // LLR of second bit [L2(1), L2(2), L2(3), L2(4)]
-    y1r = _mm_subs_epi16(logmax_den_re1,logmax_num_re1);
+    y1r = simde_mm_subs_epi16(logmax_den_re1,logmax_num_re1);
 
     // LLR of the third bit
     // Bit = 1
-    xmm0 = _mm_max_epi16(bit_met_m3_p1,bit_met_m3_p3);
-    xmm1 = _mm_max_epi16(bit_met_m3_m1,bit_met_m3_m3);
-    xmm2 = _mm_max_epi16(bit_met_p3_p1,bit_met_p3_p3);
-    xmm3 = _mm_max_epi16(bit_met_p3_m1,bit_met_p3_m3);
-    xmm4 = _mm_max_epi16(xmm0,xmm1);
-    xmm5 = _mm_max_epi16(xmm2,xmm3);
-    simde__m128i logmax_num_im0 = _mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m3_p1, bit_met_m3_p3);
+    xmm1 = simde_mm_max_epi16(bit_met_m3_m1, bit_met_m3_m3);
+    xmm2 = simde_mm_max_epi16(bit_met_p3_p1, bit_met_p3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p3_m1, bit_met_p3_m3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_num_im0 = simde_mm_max_epi16(xmm4, xmm5);
 
     // Bit = 0
-    xmm0 = _mm_max_epi16(bit_met_m1_p1,bit_met_m1_p3);
-    xmm1 = _mm_max_epi16(bit_met_m1_m1,bit_met_m1_m3);
-    xmm2 = _mm_max_epi16(bit_met_p1_p1,bit_met_p1_p3);
-    xmm3 = _mm_max_epi16(bit_met_p1_m1,bit_met_p1_m3);
-    xmm4 = _mm_max_epi16(xmm0,xmm1);
-    xmm5 = _mm_max_epi16(xmm2,xmm3);
-    simde__m128i logmax_den_im0 = _mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m1_p1, bit_met_m1_p3);
+    xmm1 = simde_mm_max_epi16(bit_met_m1_m1, bit_met_m1_m3);
+    xmm2 = simde_mm_max_epi16(bit_met_p1_p1, bit_met_p1_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p1_m1, bit_met_p1_m3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_den_im0 = simde_mm_max_epi16(xmm4, xmm5);
 
     // LLR of third bit [L3(1), L3(2), L3(3), L3(4)]
-    y0i = _mm_subs_epi16(logmax_den_im0,logmax_num_im0);
+    y0i = simde_mm_subs_epi16(logmax_den_im0,logmax_num_im0);
 
     // LLR of the fourth bit
     // Bit = 1
-    xmm0 = _mm_max_epi16(bit_met_p1_m3,bit_met_p3_m3);
-    xmm1 = _mm_max_epi16(bit_met_m1_m3,bit_met_m3_m3);
-    xmm2 = _mm_max_epi16(bit_met_p1_p3,bit_met_p3_p3);
-    xmm3 = _mm_max_epi16(bit_met_m1_p3,bit_met_m3_p3);
-    xmm4 = _mm_max_epi16(xmm0,xmm1);
-    xmm5 = _mm_max_epi16(xmm2,xmm3);
-    simde__m128i logmax_num_im1 = _mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p1_m3, bit_met_p3_m3);
+    xmm1 = simde_mm_max_epi16(bit_met_m1_m3, bit_met_m3_m3);
+    xmm2 = simde_mm_max_epi16(bit_met_p1_p3, bit_met_p3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m1_p3, bit_met_m3_p3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_num_im1 = simde_mm_max_epi16(xmm4, xmm5);
 
     // Bit = 0
-    xmm0 = _mm_max_epi16(bit_met_p1_m1,bit_met_p3_m1);
-    xmm1 = _mm_max_epi16(bit_met_m1_m1,bit_met_m3_m1);
-    xmm2 = _mm_max_epi16(bit_met_p1_p1,bit_met_p3_p1);
-    xmm3 = _mm_max_epi16(bit_met_m1_p1,bit_met_m3_p1);
-    xmm4 = _mm_max_epi16(xmm0,xmm1);
-    xmm5 = _mm_max_epi16(xmm2,xmm3);
-    simde__m128i logmax_den_im1 = _mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p1_m1, bit_met_p3_m1);
+    xmm1 = simde_mm_max_epi16(bit_met_m1_m1, bit_met_m3_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p1_p1, bit_met_p3_p1);
+    xmm3 = simde_mm_max_epi16(bit_met_m1_p1, bit_met_m3_p1);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_den_im1 = simde_mm_max_epi16(xmm4, xmm5);
 
     // LLR of fourth bit [L4(1), L4(2), L4(3), L4(4)]
-    y1i = _mm_subs_epi16(logmax_den_im1,logmax_num_im1);
+    y1i = simde_mm_subs_epi16(logmax_den_im1,logmax_num_im1);
 
     // Pack LLRs in output
     // [L1(1), L2(1), L1(2), L2(2), L1(3), L2(3), L1(4), L2(4)]
-    xmm0 = _mm_unpacklo_epi16(y0r,y1r);
+    xmm0 = simde_mm_unpacklo_epi16(y0r,y1r);
     // [L1(5), L2(5), L1(6), L2(6), L1(7), L2(7), L1(8), L2(8)]
-    xmm1 = _mm_unpackhi_epi16(y0r,y1r);
+    xmm1 = simde_mm_unpackhi_epi16(y0r,y1r);
     // [L3(1), L4(1), L3(2), L4(2), L3(3), L4(3), L3(4), L4(4)]
-    xmm2 = _mm_unpacklo_epi16(y0i,y1i);
+    xmm2 = simde_mm_unpacklo_epi16(y0i,y1i);
     // [L3(5), L4(5), L3(6), L4(6), L3(7), L4(7), L3(8), L4(8)]
-    xmm3 = _mm_unpackhi_epi16(y0i,y1i);
-
-    stream0_128i_out[2*i+0] = _mm_unpacklo_epi32(xmm0,xmm2); // 8LLRs, 2REs
-    stream0_128i_out[2*i+1] = _mm_unpackhi_epi32(xmm0,xmm2);
-    stream0_128i_out[2*i+2] = _mm_unpacklo_epi32(xmm1,xmm3);
-    stream0_128i_out[2*i+3] = _mm_unpackhi_epi32(xmm1,xmm3);
-#elif defined(__arm__) || defined(__aarch64__)
-
-#endif
+    xmm3 = simde_mm_unpackhi_epi16(y0i,y1i);
 
+    stream0_128i_out[2*i+0] = simde_mm_unpacklo_epi32(xmm0,xmm2); // 8LLRs, 2REs
+    stream0_128i_out[2*i+1] = simde_mm_unpackhi_epi32(xmm0,xmm2);
+    stream0_128i_out[2*i+2] = simde_mm_unpacklo_epi32(xmm1,xmm3);
+    stream0_128i_out[2*i+3] = simde_mm_unpackhi_epi32(xmm1,xmm3);
   }
-
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
 }
 
 int dlsch_16qam_64qam_llr(LTE_DL_FRAME_PARMS *frame_parms,
@@ -3447,43 +3176,6 @@ int dlsch_16qam_64qam_llr(LTE_DL_FRAME_PARMS *frame_parms,
 // 64-QAM
 //----------------------------------------------------------------------------------------------
 
-/*
-__m128i ONE_OVER_SQRT_42 __attribute__((aligned(16)));
-__m128i THREE_OVER_SQRT_42 __attribute__((aligned(16)));
-__m128i FIVE_OVER_SQRT_42 __attribute__((aligned(16)));
-__m128i SEVEN_OVER_SQRT_42 __attribute__((aligned(16)));
-
-__m128i FORTYNINE_OVER_FOUR_SQRT_42 __attribute__((aligned(16)));
-__m128i THIRTYSEVEN_OVER_FOUR_SQRT_42 __attribute__((aligned(16)));
-__m128i TWENTYNINE_OVER_FOUR_SQRT_42 __attribute__((aligned(16)));
-__m128i TWENTYFIVE_OVER_FOUR_SQRT_42 __attribute__((aligned(16)));
-__m128i SEVENTEEN_OVER_FOUR_SQRT_42 __attribute__((aligned(16)));
-__m128i NINE_OVER_FOUR_SQRT_42 __attribute__((aligned(16)));
-__m128i THIRTEEN_OVER_FOUR_SQRT_42 __attribute__((aligned(16)));
-__m128i FIVE_OVER_FOUR_SQRT_42 __attribute__((aligned(16)));
-__m128i ONE_OVER_FOUR_SQRT_42 __attribute__((aligned(16)));
-
-__m128i  y0r_one_over_sqrt_21 __attribute__((aligned(16)));
-__m128i  y0r_three_over_sqrt_21 __attribute__((aligned(16)));
-__m128i  y0r_five_over_sqrt_21 __attribute__((aligned(16)));
-__m128i  y0r_seven_over_sqrt_21 __attribute__((aligned(16)));
-__m128i  y0i_one_over_sqrt_21 __attribute__((aligned(16)));
-__m128i  y0i_three_over_sqrt_21 __attribute__((aligned(16)));
-__m128i  y0i_five_over_sqrt_21 __attribute__((aligned(16)));
-__m128i  y0i_seven_over_sqrt_21 __attribute__((aligned(16)));
-
-__m128i ch_mag_98_over_42_with_sigma2 __attribute__((aligned(16)));
-__m128i ch_mag_74_over_42_with_sigma2 __attribute__((aligned(16)));
-__m128i ch_mag_58_over_42_with_sigma2 __attribute__((aligned(16)));
-__m128i ch_mag_50_over_42_with_sigma2 __attribute__((aligned(16)));
-__m128i ch_mag_34_over_42_with_sigma2 __attribute__((aligned(16)));
-__m128i ch_mag_18_over_42_with_sigma2 __attribute__((aligned(16)));
-__m128i ch_mag_26_over_42_with_sigma2 __attribute__((aligned(16)));
-__m128i ch_mag_10_over_42_with_sigma2 __attribute__((aligned(16)));
-__m128i ch_mag_2_over_42_with_sigma2 __attribute__((aligned(16)));
-
-*/
-
 void qam64_qpsk(int16_t *stream0_in,
                 int16_t *stream1_in,
                 int16_t *ch_mag,
@@ -3508,1371 +3200,1366 @@ void qam64_qpsk(int16_t *stream0_in,
     stream0_out: output LLRs for 1st stream
   */
 
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *rho01_128i      = (__m128i *)rho01;
-  __m128i *stream0_128i_in = (__m128i *)stream0_in;
-  __m128i *stream1_128i_in = (__m128i *)stream1_in;
-  __m128i *ch_mag_128i     = (__m128i *)ch_mag;
-
-
-  __m128i ONE_OVER_SQRT_42 = _mm_set1_epi16(10112); // round(1/sqrt(42)*2^16)
-  __m128i THREE_OVER_SQRT_42 = _mm_set1_epi16(30337); // round(3/sqrt(42)*2^16)
-  __m128i FIVE_OVER_SQRT_42 = _mm_set1_epi16(25281); // round(5/sqrt(42)*2^15)
-  __m128i SEVEN_OVER_SQRT_42 = _mm_set1_epi16(17697); // round(5/sqrt(42)*2^15)
-  __m128i ONE_OVER_SQRT_2 = _mm_set1_epi16(23170); // round(1/sqrt(2)*2^15)
-  __m128i FORTYNINE_OVER_FOUR_SQRT_42 = _mm_set1_epi16(30969); // round(49/(4*sqrt(42))*2^14), Q2.14
-  __m128i THIRTYSEVEN_OVER_FOUR_SQRT_42 = _mm_set1_epi16(23385); // round(37/(4*sqrt(42))*2^14), Q2.14
-  __m128i TWENTYFIVE_OVER_FOUR_SQRT_42 = _mm_set1_epi16(31601); // round(25/(4*sqrt(42))*2^15)
-  __m128i TWENTYNINE_OVER_FOUR_SQRT_42 = _mm_set1_epi16(18329); // round(29/(4*sqrt(42))*2^15), Q2.14
-  __m128i SEVENTEEN_OVER_FOUR_SQRT_42 = _mm_set1_epi16(21489); // round(17/(4*sqrt(42))*2^15)
-  __m128i NINE_OVER_FOUR_SQRT_42 = _mm_set1_epi16(11376); // round(9/(4*sqrt(42))*2^15)
-  __m128i THIRTEEN_OVER_FOUR_SQRT_42 = _mm_set1_epi16(16433); // round(13/(4*sqrt(42))*2^15)
-  __m128i FIVE_OVER_FOUR_SQRT_42 = _mm_set1_epi16(6320); // round(5/(4*sqrt(42))*2^15)
-  __m128i ONE_OVER_FOUR_SQRT_42 = _mm_set1_epi16(1264); // round(1/(4*sqrt(42))*2^15)
-
-
-  __m128i ch_mag_des;
-  __m128i ch_mag_98_over_42_with_sigma2;
-  __m128i ch_mag_74_over_42_with_sigma2;
-  __m128i ch_mag_58_over_42_with_sigma2;
-  __m128i ch_mag_50_over_42_with_sigma2;
-  __m128i ch_mag_34_over_42_with_sigma2;
-  __m128i ch_mag_18_over_42_with_sigma2;
-  __m128i ch_mag_26_over_42_with_sigma2;
-  __m128i ch_mag_10_over_42_with_sigma2;
-  __m128i ch_mag_2_over_42_with_sigma2;
-  __m128i  y0r_one_over_sqrt_21;
-  __m128i  y0r_three_over_sqrt_21;
-  __m128i  y0r_five_over_sqrt_21;
-  __m128i  y0r_seven_over_sqrt_21;
-  __m128i  y0i_one_over_sqrt_21;
-  __m128i  y0i_three_over_sqrt_21;
-  __m128i  y0i_five_over_sqrt_21;
-  __m128i  y0i_seven_over_sqrt_21;
-#elif defined(__arm__) || defined(__aarch64__)
-
-#endif
+  simde__m128i *rho01_128i      = (simde__m128i *)rho01;
+  simde__m128i *stream0_128i_in = (simde__m128i *)stream0_in;
+  simde__m128i *stream1_128i_in = (simde__m128i *)stream1_in;
+  simde__m128i *ch_mag_128i     = (simde__m128i *)ch_mag;
+
+
+  simde__m128i ONE_OVER_SQRT_42 = simde_mm_set1_epi16(10112); // round(1/sqrt(42)*2^16)
+  simde__m128i THREE_OVER_SQRT_42 = simde_mm_set1_epi16(30337); // round(3/sqrt(42)*2^16)
+  simde__m128i FIVE_OVER_SQRT_42 = simde_mm_set1_epi16(25281); // round(5/sqrt(42)*2^15)
+  simde__m128i SEVEN_OVER_SQRT_42 = simde_mm_set1_epi16(17697); // round(5/sqrt(42)*2^15)
+  simde__m128i ONE_OVER_SQRT_2 = simde_mm_set1_epi16(23170); // round(1/sqrt(2)*2^15)
+  simde__m128i FORTYNINE_OVER_FOUR_SQRT_42 = simde_mm_set1_epi16(30969); // round(49/(4*sqrt(42))*2^14), Q2.14
+  simde__m128i THIRTYSEVEN_OVER_FOUR_SQRT_42 = simde_mm_set1_epi16(23385); // round(37/(4*sqrt(42))*2^14), Q2.14
+  simde__m128i TWENTYFIVE_OVER_FOUR_SQRT_42 = simde_mm_set1_epi16(31601); // round(25/(4*sqrt(42))*2^15)
+  simde__m128i TWENTYNINE_OVER_FOUR_SQRT_42 = simde_mm_set1_epi16(18329); // round(29/(4*sqrt(42))*2^15), Q2.14
+  simde__m128i SEVENTEEN_OVER_FOUR_SQRT_42 = simde_mm_set1_epi16(21489); // round(17/(4*sqrt(42))*2^15)
+  simde__m128i NINE_OVER_FOUR_SQRT_42 = simde_mm_set1_epi16(11376); // round(9/(4*sqrt(42))*2^15)
+  simde__m128i THIRTEEN_OVER_FOUR_SQRT_42 = simde_mm_set1_epi16(16433); // round(13/(4*sqrt(42))*2^15)
+  simde__m128i FIVE_OVER_FOUR_SQRT_42 = simde_mm_set1_epi16(6320); // round(5/(4*sqrt(42))*2^15)
+  simde__m128i ONE_OVER_FOUR_SQRT_42 = simde_mm_set1_epi16(1264); // round(1/(4*sqrt(42))*2^15)
+
+
+  simde__m128i ch_mag_des;
+  simde__m128i ch_mag_98_over_42_with_sigma2;
+  simde__m128i ch_mag_74_over_42_with_sigma2;
+  simde__m128i ch_mag_58_over_42_with_sigma2;
+  simde__m128i ch_mag_50_over_42_with_sigma2;
+  simde__m128i ch_mag_34_over_42_with_sigma2;
+  simde__m128i ch_mag_18_over_42_with_sigma2;
+  simde__m128i ch_mag_26_over_42_with_sigma2;
+  simde__m128i ch_mag_10_over_42_with_sigma2;
+  simde__m128i ch_mag_2_over_42_with_sigma2;
+  simde__m128i  y0r_one_over_sqrt_21;
+  simde__m128i  y0r_three_over_sqrt_21;
+  simde__m128i  y0r_five_over_sqrt_21;
+  simde__m128i  y0r_seven_over_sqrt_21;
+  simde__m128i  y0i_one_over_sqrt_21;
+  simde__m128i  y0i_three_over_sqrt_21;
+  simde__m128i  y0i_five_over_sqrt_21;
+  simde__m128i  y0i_seven_over_sqrt_21;
 
   int i,j;
 
   for (i=0; i<length>>2; i+=2) {
 
-#if defined(__x86_64) || defined(__i386__)
     // Get rho
     simde__m128i xmm0 = rho01_128i[i];
     simde__m128i xmm1 = rho01_128i[i + 1];
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflelo_epi16(xmm0, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflehi_epi16(xmm0, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shuffle_epi32(xmm0, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflelo_epi16(xmm1, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflehi_epi16(xmm1, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shuffle_epi32(xmm1, 0xd8); //_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    simde__m128i xmm2 = _mm_unpacklo_epi64(xmm0, xmm1); // Re(rho)
-    simde__m128i xmm3 = _mm_unpackhi_epi64(xmm0, xmm1); // Im(rho)
-    simde__m128i rho_rpi = _mm_adds_epi16(xmm2, xmm3); // rho = Re(rho) + Im(rho)
-    simde__m128i rho_rmi = _mm_subs_epi16(xmm2, xmm3); // rho* = Re(rho) - Im(rho)
+    simde__m128i xmm2 = simde_mm_unpacklo_epi64(xmm0, xmm1); // Re(rho)
+    simde__m128i xmm3 = simde_mm_unpackhi_epi64(xmm0, xmm1); // Im(rho)
+    simde__m128i rho_rpi = simde_mm_adds_epi16(xmm2, xmm3); // rho = Re(rho) + Im(rho)
+    simde__m128i rho_rmi = simde_mm_subs_epi16(xmm2, xmm3); // rho* = Re(rho) - Im(rho)
 
     // Compute the different rhos
-    simde__m128i rho_rpi_1_1 = _mm_mulhi_epi16(rho_rpi, ONE_OVER_SQRT_42);
-    simde__m128i rho_rmi_1_1 = _mm_mulhi_epi16(rho_rmi, ONE_OVER_SQRT_42);
-    simde__m128i rho_rpi_3_3 = _mm_mulhi_epi16(rho_rpi, THREE_OVER_SQRT_42);
-    simde__m128i rho_rmi_3_3 = _mm_mulhi_epi16(rho_rmi, THREE_OVER_SQRT_42);
-    simde__m128i rho_rpi_5_5 = _mm_mulhi_epi16(rho_rpi, FIVE_OVER_SQRT_42);
-    simde__m128i rho_rmi_5_5 = _mm_mulhi_epi16(rho_rmi, FIVE_OVER_SQRT_42);
-    simde__m128i rho_rpi_7_7 = _mm_mulhi_epi16(rho_rpi, SEVEN_OVER_SQRT_42);
-    simde__m128i rho_rmi_7_7 = _mm_mulhi_epi16(rho_rmi, SEVEN_OVER_SQRT_42);
-
-    rho_rpi_5_5 = _mm_slli_epi16(rho_rpi_5_5, 1);
-    rho_rmi_5_5 = _mm_slli_epi16(rho_rmi_5_5, 1);
-    rho_rpi_7_7 = _mm_slli_epi16(rho_rpi_7_7, 2);
-    rho_rmi_7_7 = _mm_slli_epi16(rho_rmi_7_7, 2);
-
-    simde__m128i xmm4 = _mm_mulhi_epi16(xmm2, ONE_OVER_SQRT_42);
-    simde__m128i xmm5 = _mm_mulhi_epi16(xmm3, ONE_OVER_SQRT_42);
-    simde__m128i xmm6 = _mm_mulhi_epi16(xmm3, THREE_OVER_SQRT_42);
-    simde__m128i xmm7 = _mm_mulhi_epi16(xmm3, FIVE_OVER_SQRT_42);
-    simde__m128i xmm8 = _mm_mulhi_epi16(xmm3, SEVEN_OVER_SQRT_42);
-    xmm7 = _mm_slli_epi16(xmm7, 1);
-    xmm8 = _mm_slli_epi16(xmm8, 2);
-
-    simde__m128i rho_rpi_1_3 = _mm_adds_epi16(xmm4, xmm6);
-    simde__m128i rho_rmi_1_3 = _mm_subs_epi16(xmm4, xmm6);
-    simde__m128i rho_rpi_1_5 = _mm_adds_epi16(xmm4, xmm7);
-    simde__m128i rho_rmi_1_5 = _mm_subs_epi16(xmm4, xmm7);
-    simde__m128i rho_rpi_1_7 = _mm_adds_epi16(xmm4, xmm8);
-    simde__m128i rho_rmi_1_7 = _mm_subs_epi16(xmm4, xmm8);
-
-    xmm4 = _mm_mulhi_epi16(xmm2, THREE_OVER_SQRT_42);
-    simde__m128i rho_rpi_3_1 = _mm_adds_epi16(xmm4, xmm5);
-    simde__m128i rho_rmi_3_1 = _mm_subs_epi16(xmm4, xmm5);
-    simde__m128i rho_rpi_3_5 = _mm_adds_epi16(xmm4, xmm7);
-    simde__m128i rho_rmi_3_5 = _mm_subs_epi16(xmm4, xmm7);
-    simde__m128i rho_rpi_3_7 = _mm_adds_epi16(xmm4, xmm8);
-    simde__m128i rho_rmi_3_7 = _mm_subs_epi16(xmm4, xmm8);
-
-    xmm4 = _mm_mulhi_epi16(xmm2, FIVE_OVER_SQRT_42);
-    xmm4 = _mm_slli_epi16(xmm4, 1);
-    simde__m128i rho_rpi_5_1 = _mm_adds_epi16(xmm4, xmm5);
-    simde__m128i rho_rmi_5_1 = _mm_subs_epi16(xmm4, xmm5);
-    simde__m128i rho_rpi_5_3 = _mm_adds_epi16(xmm4, xmm6);
-    simde__m128i rho_rmi_5_3 = _mm_subs_epi16(xmm4, xmm6);
-    simde__m128i rho_rpi_5_7 = _mm_adds_epi16(xmm4, xmm8);
-    simde__m128i rho_rmi_5_7 = _mm_subs_epi16(xmm4, xmm8);
-
-    xmm4 = _mm_mulhi_epi16(xmm2, SEVEN_OVER_SQRT_42);
-    xmm4 = _mm_slli_epi16(xmm4, 2);
-    simde__m128i rho_rpi_7_1 = _mm_adds_epi16(xmm4, xmm5);
-    simde__m128i rho_rmi_7_1 = _mm_subs_epi16(xmm4, xmm5);
-    simde__m128i rho_rpi_7_3 = _mm_adds_epi16(xmm4, xmm6);
-    simde__m128i rho_rmi_7_3 = _mm_subs_epi16(xmm4, xmm6);
-    simde__m128i rho_rpi_7_5 = _mm_adds_epi16(xmm4, xmm7);
-    simde__m128i rho_rmi_7_5 = _mm_subs_epi16(xmm4, xmm7);
+    simde__m128i rho_rpi_1_1 = simde_mm_mulhi_epi16(rho_rpi, ONE_OVER_SQRT_42);
+    simde__m128i rho_rmi_1_1 = simde_mm_mulhi_epi16(rho_rmi, ONE_OVER_SQRT_42);
+    simde__m128i rho_rpi_3_3 = simde_mm_mulhi_epi16(rho_rpi, THREE_OVER_SQRT_42);
+    simde__m128i rho_rmi_3_3 = simde_mm_mulhi_epi16(rho_rmi, THREE_OVER_SQRT_42);
+    simde__m128i rho_rpi_5_5 = simde_mm_mulhi_epi16(rho_rpi, FIVE_OVER_SQRT_42);
+    simde__m128i rho_rmi_5_5 = simde_mm_mulhi_epi16(rho_rmi, FIVE_OVER_SQRT_42);
+    simde__m128i rho_rpi_7_7 = simde_mm_mulhi_epi16(rho_rpi, SEVEN_OVER_SQRT_42);
+    simde__m128i rho_rmi_7_7 = simde_mm_mulhi_epi16(rho_rmi, SEVEN_OVER_SQRT_42);
+
+    rho_rpi_5_5 = simde_mm_slli_epi16(rho_rpi_5_5, 1);
+    rho_rmi_5_5 = simde_mm_slli_epi16(rho_rmi_5_5, 1);
+    rho_rpi_7_7 = simde_mm_slli_epi16(rho_rpi_7_7, 2);
+    rho_rmi_7_7 = simde_mm_slli_epi16(rho_rmi_7_7, 2);
+
+    simde__m128i xmm4 = simde_mm_mulhi_epi16(xmm2, ONE_OVER_SQRT_42);
+    simde__m128i xmm5 = simde_mm_mulhi_epi16(xmm3, ONE_OVER_SQRT_42);
+    simde__m128i xmm6 = simde_mm_mulhi_epi16(xmm3, THREE_OVER_SQRT_42);
+    simde__m128i xmm7 = simde_mm_mulhi_epi16(xmm3, FIVE_OVER_SQRT_42);
+    simde__m128i xmm8 = simde_mm_mulhi_epi16(xmm3, SEVEN_OVER_SQRT_42);
+    xmm7 = simde_mm_slli_epi16(xmm7, 1);
+    xmm8 = simde_mm_slli_epi16(xmm8, 2);
+
+    simde__m128i rho_rpi_1_3 = simde_mm_adds_epi16(xmm4, xmm6);
+    simde__m128i rho_rmi_1_3 = simde_mm_subs_epi16(xmm4, xmm6);
+    simde__m128i rho_rpi_1_5 = simde_mm_adds_epi16(xmm4, xmm7);
+    simde__m128i rho_rmi_1_5 = simde_mm_subs_epi16(xmm4, xmm7);
+    simde__m128i rho_rpi_1_7 = simde_mm_adds_epi16(xmm4, xmm8);
+    simde__m128i rho_rmi_1_7 = simde_mm_subs_epi16(xmm4, xmm8);
+
+    xmm4 = simde_mm_mulhi_epi16(xmm2, THREE_OVER_SQRT_42);
+    simde__m128i rho_rpi_3_1 = simde_mm_adds_epi16(xmm4, xmm5);
+    simde__m128i rho_rmi_3_1 = simde_mm_subs_epi16(xmm4, xmm5);
+    simde__m128i rho_rpi_3_5 = simde_mm_adds_epi16(xmm4, xmm7);
+    simde__m128i rho_rmi_3_5 = simde_mm_subs_epi16(xmm4, xmm7);
+    simde__m128i rho_rpi_3_7 = simde_mm_adds_epi16(xmm4, xmm8);
+    simde__m128i rho_rmi_3_7 = simde_mm_subs_epi16(xmm4, xmm8);
+
+    xmm4 = simde_mm_mulhi_epi16(xmm2, FIVE_OVER_SQRT_42);
+    xmm4 = simde_mm_slli_epi16(xmm4, 1);
+    simde__m128i rho_rpi_5_1 = simde_mm_adds_epi16(xmm4, xmm5);
+    simde__m128i rho_rmi_5_1 = simde_mm_subs_epi16(xmm4, xmm5);
+    simde__m128i rho_rpi_5_3 = simde_mm_adds_epi16(xmm4, xmm6);
+    simde__m128i rho_rmi_5_3 = simde_mm_subs_epi16(xmm4, xmm6);
+    simde__m128i rho_rpi_5_7 = simde_mm_adds_epi16(xmm4, xmm8);
+    simde__m128i rho_rmi_5_7 = simde_mm_subs_epi16(xmm4, xmm8);
+
+    xmm4 = simde_mm_mulhi_epi16(xmm2, SEVEN_OVER_SQRT_42);
+    xmm4 = simde_mm_slli_epi16(xmm4, 2);
+    simde__m128i rho_rpi_7_1 = simde_mm_adds_epi16(xmm4, xmm5);
+    simde__m128i rho_rmi_7_1 = simde_mm_subs_epi16(xmm4, xmm5);
+    simde__m128i rho_rpi_7_3 = simde_mm_adds_epi16(xmm4, xmm6);
+    simde__m128i rho_rmi_7_3 = simde_mm_subs_epi16(xmm4, xmm6);
+    simde__m128i rho_rpi_7_5 = simde_mm_adds_epi16(xmm4, xmm7);
+    simde__m128i rho_rmi_7_5 = simde_mm_subs_epi16(xmm4, xmm7);
 
     // Rearrange interfering MF output
     xmm0 = stream1_128i_in[i];
     xmm1 = stream1_128i_in[i+1];
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflelo_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflehi_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shuffle_epi32(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflelo_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflehi_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shuffle_epi32(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    simde__m128i y1r = _mm_unpacklo_epi64(xmm0, xmm1); //[y1r(1),y1r(2),y1r(3),y1r(4)]
-    simde__m128i y1i = _mm_unpackhi_epi64(xmm0, xmm1); //[y1i(1),y1i(2),y1i(3),y1i(4)]
+    simde__m128i y1r = simde_mm_unpacklo_epi64(xmm0, xmm1); //[y1r(1),y1r(2),y1r(3),y1r(4)]
+    simde__m128i y1i = simde_mm_unpackhi_epi64(xmm0, xmm1); //[y1i(1),y1i(2),y1i(3),y1i(4)]
 
     // Psi_r calculation from rho_rpi or rho_rmi
-    xmm0 = _mm_setzero_si128(); // ZERO for abs_pi16
-    xmm2 = _mm_subs_epi16(rho_rpi_7_7, y1r);
-    simde__m128i psi_r_p7_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_7_5, y1r);
-    simde__m128i psi_r_p7_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_7_3, y1r);
-    simde__m128i psi_r_p7_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_7_1, y1r);
-    simde__m128i psi_r_p7_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_7_1, y1r);
-    simde__m128i psi_r_p7_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_7_3, y1r);
-    simde__m128i psi_r_p7_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_7_5, y1r);
-    simde__m128i psi_r_p7_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_7_7, y1r);
-    simde__m128i psi_r_p7_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_5_7, y1r);
-    simde__m128i psi_r_p5_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_5_5, y1r);
-    simde__m128i psi_r_p5_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_5_3, y1r);
-    simde__m128i psi_r_p5_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_5_1, y1r);
-    simde__m128i psi_r_p5_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_5_1, y1r);
-    simde__m128i psi_r_p5_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_5_3, y1r);
-    simde__m128i psi_r_p5_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_5_5, y1r);
-    simde__m128i psi_r_p5_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_5_7, y1r);
-    simde__m128i psi_r_p5_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_3_7, y1r);
-    simde__m128i psi_r_p3_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_3_5, y1r);
-    simde__m128i psi_r_p3_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_3_3, y1r);
-    simde__m128i psi_r_p3_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_3_1, y1r);
-    simde__m128i psi_r_p3_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_3_1, y1r);
-    simde__m128i psi_r_p3_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_3_3, y1r);
-    simde__m128i psi_r_p3_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_3_5, y1r);
-    simde__m128i psi_r_p3_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_3_7, y1r);
-    simde__m128i psi_r_p3_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_1_7, y1r);
-    simde__m128i psi_r_p1_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_1_5, y1r);
-    simde__m128i psi_r_p1_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_1_3, y1r);
-    simde__m128i psi_r_p1_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_1_1, y1r);
-    simde__m128i psi_r_p1_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_1_1, y1r);
-    simde__m128i psi_r_p1_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_1_3, y1r);
-    simde__m128i psi_r_p1_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_1_5, y1r);
-    simde__m128i psi_r_p1_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_1_7, y1r);
-    simde__m128i psi_r_p1_m7 = _mm_abs_epi16(xmm2);
-
-    xmm2 = _mm_adds_epi16(rho_rmi_1_7, y1r);
-    simde__m128i psi_r_m1_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_1_5, y1r);
-    simde__m128i psi_r_m1_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_1_3, y1r);
-    simde__m128i psi_r_m1_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_1_1, y1r);
-    simde__m128i psi_r_m1_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_1_1, y1r);
-    simde__m128i psi_r_m1_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_1_3, y1r);
-    simde__m128i psi_r_m1_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_1_5, y1r);
-    simde__m128i psi_r_m1_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_1_7, y1r);
-    simde__m128i psi_r_m1_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_3_7, y1r);
-    simde__m128i psi_r_m3_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_3_5, y1r);
-    simde__m128i psi_r_m3_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_3_3, y1r);
-    simde__m128i psi_r_m3_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_3_1, y1r);
-    simde__m128i psi_r_m3_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_3_1, y1r);
-    simde__m128i psi_r_m3_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_3_3, y1r);
-    simde__m128i psi_r_m3_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_3_5, y1r);
-    simde__m128i psi_r_m3_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_3_7, y1r);
-    simde__m128i psi_r_m3_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_5_7, y1r);
-    simde__m128i psi_r_m5_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_5_5, y1r);
-    simde__m128i psi_r_m5_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_5_3, y1r);
-    simde__m128i psi_r_m5_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_5_1, y1r);
-    simde__m128i psi_r_m5_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_5_1, y1r);
-    simde__m128i psi_r_m5_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_5_3, y1r);
-    simde__m128i psi_r_m5_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_5_5, y1r);
-    simde__m128i psi_r_m5_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_5_7, y1r);
-    simde__m128i psi_r_m5_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_7_7, y1r);
-    simde__m128i psi_r_m7_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_7_5, y1r);
-    simde__m128i psi_r_m7_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_7_3, y1r);
-    simde__m128i psi_r_m7_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_7_1, y1r);
-    simde__m128i psi_r_m7_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_7_1, y1r);
-    simde__m128i psi_r_m7_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_7_3, y1r);
-    simde__m128i psi_r_m7_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_7_5, y1r);
-    simde__m128i psi_r_m7_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_7_7, y1r);
-    simde__m128i psi_r_m7_m7 = _mm_abs_epi16(xmm2);
+    xmm0 = simde_mm_setzero_si128(); // ZERO for abs_pi16
+    xmm2 = simde_mm_subs_epi16(rho_rpi_7_7, y1r);
+    simde__m128i psi_r_p7_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_7_5, y1r);
+    simde__m128i psi_r_p7_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_7_3, y1r);
+    simde__m128i psi_r_p7_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_7_1, y1r);
+    simde__m128i psi_r_p7_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_7_1, y1r);
+    simde__m128i psi_r_p7_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_7_3, y1r);
+    simde__m128i psi_r_p7_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_7_5, y1r);
+    simde__m128i psi_r_p7_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_7_7, y1r);
+    simde__m128i psi_r_p7_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_5_7, y1r);
+    simde__m128i psi_r_p5_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_5_5, y1r);
+    simde__m128i psi_r_p5_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_5_3, y1r);
+    simde__m128i psi_r_p5_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_5_1, y1r);
+    simde__m128i psi_r_p5_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_5_1, y1r);
+    simde__m128i psi_r_p5_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_5_3, y1r);
+    simde__m128i psi_r_p5_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_5_5, y1r);
+    simde__m128i psi_r_p5_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_5_7, y1r);
+    simde__m128i psi_r_p5_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_7, y1r);
+    simde__m128i psi_r_p3_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_5, y1r);
+    simde__m128i psi_r_p3_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_3, y1r);
+    simde__m128i psi_r_p3_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_1, y1r);
+    simde__m128i psi_r_p3_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_1, y1r);
+    simde__m128i psi_r_p3_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_3, y1r);
+    simde__m128i psi_r_p3_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_5, y1r);
+    simde__m128i psi_r_p3_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_7, y1r);
+    simde__m128i psi_r_p3_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_7, y1r);
+    simde__m128i psi_r_p1_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_5, y1r);
+    simde__m128i psi_r_p1_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_3, y1r);
+    simde__m128i psi_r_p1_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_1, y1r);
+    simde__m128i psi_r_p1_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_1, y1r);
+    simde__m128i psi_r_p1_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_3, y1r);
+    simde__m128i psi_r_p1_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_5, y1r);
+    simde__m128i psi_r_p1_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_7, y1r);
+    simde__m128i psi_r_p1_m7 = simde_mm_abs_epi16(xmm2);
+
+    xmm2 = simde_mm_adds_epi16(rho_rmi_1_7, y1r);
+    simde__m128i psi_r_m1_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_1_5, y1r);
+    simde__m128i psi_r_m1_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_1_3, y1r);
+    simde__m128i psi_r_m1_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_1_1, y1r);
+    simde__m128i psi_r_m1_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_1, y1r);
+    simde__m128i psi_r_m1_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_3, y1r);
+    simde__m128i psi_r_m1_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_5, y1r);
+    simde__m128i psi_r_m1_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_7, y1r);
+    simde__m128i psi_r_m1_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_3_7, y1r);
+    simde__m128i psi_r_m3_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_3_5, y1r);
+    simde__m128i psi_r_m3_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_3_3, y1r);
+    simde__m128i psi_r_m3_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_3_1, y1r);
+    simde__m128i psi_r_m3_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_1, y1r);
+    simde__m128i psi_r_m3_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_3, y1r);
+    simde__m128i psi_r_m3_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_5, y1r);
+    simde__m128i psi_r_m3_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_7, y1r);
+    simde__m128i psi_r_m3_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_5_7, y1r);
+    simde__m128i psi_r_m5_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_5_5, y1r);
+    simde__m128i psi_r_m5_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_5_3, y1r);
+    simde__m128i psi_r_m5_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_5_1, y1r);
+    simde__m128i psi_r_m5_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_5_1, y1r);
+    simde__m128i psi_r_m5_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_5_3, y1r);
+    simde__m128i psi_r_m5_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_5_5, y1r);
+    simde__m128i psi_r_m5_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_5_7, y1r);
+    simde__m128i psi_r_m5_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_7_7, y1r);
+    simde__m128i psi_r_m7_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_7_5, y1r);
+    simde__m128i psi_r_m7_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_7_3, y1r);
+    simde__m128i psi_r_m7_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_7_1, y1r);
+    simde__m128i psi_r_m7_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_7_1, y1r);
+    simde__m128i psi_r_m7_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_7_3, y1r);
+    simde__m128i psi_r_m7_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_7_5, y1r);
+    simde__m128i psi_r_m7_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_7_7, y1r);
+    simde__m128i psi_r_m7_m7 = simde_mm_abs_epi16(xmm2);
 
     // Simde__M128i Psi_i calculation from rho_rpi or rho_rmi
-    xmm2 = _mm_subs_epi16(rho_rmi_7_7, y1i);
-    simde__m128i psi_i_p7_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_5_7, y1i);
-    simde__m128i psi_i_p7_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_3_7, y1i);
-    simde__m128i psi_i_p7_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_1_7, y1i);
-    simde__m128i psi_i_p7_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_1_7, y1i);
-    simde__m128i psi_i_p7_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_3_7, y1i);
-    simde__m128i psi_i_p7_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_5_7, y1i);
-    simde__m128i psi_i_p7_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_7_7, y1i);
-    simde__m128i psi_i_p7_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_7_5, y1i);
-    simde__m128i psi_i_p5_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_5_5, y1i);
-    simde__m128i psi_i_p5_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_3_5, y1i);
-    simde__m128i psi_i_p5_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_1_5, y1i);
-    simde__m128i psi_i_p5_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_1_5, y1i);
-    simde__m128i psi_i_p5_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_3_5, y1i);
-    simde__m128i psi_i_p5_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_5_5, y1i);
-    simde__m128i psi_i_p5_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_7_5, y1i);
-    simde__m128i psi_i_p5_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_7_3, y1i);
-    simde__m128i psi_i_p3_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_5_3, y1i);
-    simde__m128i psi_i_p3_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_3_3, y1i);
-    simde__m128i psi_i_p3_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_1_3, y1i);
-    simde__m128i psi_i_p3_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_1_3, y1i);
-    simde__m128i psi_i_p3_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_3_3, y1i);
-    simde__m128i psi_i_p3_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_5_3, y1i);
-    simde__m128i psi_i_p3_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_7_3, y1i);
-    simde__m128i psi_i_p3_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_7_1, y1i);
-    simde__m128i psi_i_p1_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_5_1, y1i);
-    simde__m128i psi_i_p1_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_3_1, y1i);
-    simde__m128i psi_i_p1_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_1_1, y1i);
-    simde__m128i psi_i_p1_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_1_1, y1i);
-    simde__m128i psi_i_p1_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_3_1, y1i);
-    simde__m128i psi_i_p1_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_5_1, y1i);
-    simde__m128i psi_i_p1_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_7_1, y1i);
-    simde__m128i psi_i_p1_m7 = _mm_abs_epi16(xmm2);
-
-    xmm2 = _mm_subs_epi16(rho_rpi_7_1, y1i);
-    simde__m128i psi_i_m1_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_5_1, y1i);
-    simde__m128i psi_i_m1_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_3_1, y1i);
-    simde__m128i psi_i_m1_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_1_1, y1i);
-    simde__m128i psi_i_m1_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_1_1, y1i);
-    simde__m128i psi_i_m1_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_3_1, y1i);
-    simde__m128i psi_i_m1_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_5_1, y1i);
-    simde__m128i psi_i_m1_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_7_1, y1i);
-    simde__m128i psi_i_m1_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_7_3, y1i);
-    simde__m128i psi_i_m3_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_5_3, y1i);
-    simde__m128i psi_i_m3_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_3_3, y1i);
-    simde__m128i psi_i_m3_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_1_3, y1i);
-    simde__m128i psi_i_m3_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_1_3, y1i);
-    simde__m128i psi_i_m3_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_3_3, y1i);
-    simde__m128i psi_i_m3_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_5_3, y1i);
-    simde__m128i psi_i_m3_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_7_3, y1i);
-    simde__m128i psi_i_m3_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_7_5, y1i);
-    simde__m128i psi_i_m5_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_5_5, y1i);
-    simde__m128i psi_i_m5_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_3_5, y1i);
-    simde__m128i psi_i_m5_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_1_5, y1i);
-    simde__m128i psi_i_m5_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_1_5, y1i);
-    simde__m128i psi_i_m5_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_3_5, y1i);
-    simde__m128i psi_i_m5_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_5_5, y1i);
-    simde__m128i psi_i_m5_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_7_5, y1i);
-    simde__m128i psi_i_m5_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_7_7, y1i);
-    simde__m128i psi_i_m7_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_5_7, y1i);
-    simde__m128i psi_i_m7_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_3_7, y1i);
-    simde__m128i psi_i_m7_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_1_7, y1i);
-    simde__m128i psi_i_m7_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_1_7, y1i);
-    simde__m128i psi_i_m7_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_3_7, y1i);
-    simde__m128i psi_i_m7_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_5_7, y1i);
-    simde__m128i psi_i_m7_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_7_7, y1i);
-    simde__m128i psi_i_m7_m7 = _mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_7_7, y1i);
+    simde__m128i psi_i_p7_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_5_7, y1i);
+    simde__m128i psi_i_p7_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_7, y1i);
+    simde__m128i psi_i_p7_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_7, y1i);
+    simde__m128i psi_i_p7_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_7, y1i);
+    simde__m128i psi_i_p7_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_7, y1i);
+    simde__m128i psi_i_p7_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_5_7, y1i);
+    simde__m128i psi_i_p7_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_7_7, y1i);
+    simde__m128i psi_i_p7_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_7_5, y1i);
+    simde__m128i psi_i_p5_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_5_5, y1i);
+    simde__m128i psi_i_p5_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_5, y1i);
+    simde__m128i psi_i_p5_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_5, y1i);
+    simde__m128i psi_i_p5_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_5, y1i);
+    simde__m128i psi_i_p5_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_5, y1i);
+    simde__m128i psi_i_p5_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_5_5, y1i);
+    simde__m128i psi_i_p5_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_7_5, y1i);
+    simde__m128i psi_i_p5_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_7_3, y1i);
+    simde__m128i psi_i_p3_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_5_3, y1i);
+    simde__m128i psi_i_p3_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_3, y1i);
+    simde__m128i psi_i_p3_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_3, y1i);
+    simde__m128i psi_i_p3_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_3, y1i);
+    simde__m128i psi_i_p3_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_3, y1i);
+    simde__m128i psi_i_p3_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_5_3, y1i);
+    simde__m128i psi_i_p3_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_7_3, y1i);
+    simde__m128i psi_i_p3_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_7_1, y1i);
+    simde__m128i psi_i_p1_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_5_1, y1i);
+    simde__m128i psi_i_p1_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_1, y1i);
+    simde__m128i psi_i_p1_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_1, y1i);
+    simde__m128i psi_i_p1_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_1, y1i);
+    simde__m128i psi_i_p1_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_1, y1i);
+    simde__m128i psi_i_p1_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_5_1, y1i);
+    simde__m128i psi_i_p1_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_7_1, y1i);
+    simde__m128i psi_i_p1_m7 = simde_mm_abs_epi16(xmm2);
+
+    xmm2 = simde_mm_subs_epi16(rho_rpi_7_1, y1i);
+    simde__m128i psi_i_m1_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_5_1, y1i);
+    simde__m128i psi_i_m1_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_1, y1i);
+    simde__m128i psi_i_m1_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_1, y1i);
+    simde__m128i psi_i_m1_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_1_1, y1i);
+    simde__m128i psi_i_m1_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_3_1, y1i);
+    simde__m128i psi_i_m1_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_5_1, y1i);
+    simde__m128i psi_i_m1_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_7_1, y1i);
+    simde__m128i psi_i_m1_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_7_3, y1i);
+    simde__m128i psi_i_m3_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_5_3, y1i);
+    simde__m128i psi_i_m3_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_3, y1i);
+    simde__m128i psi_i_m3_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_3, y1i);
+    simde__m128i psi_i_m3_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_1_3, y1i);
+    simde__m128i psi_i_m3_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_3_3, y1i);
+    simde__m128i psi_i_m3_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_5_3, y1i);
+    simde__m128i psi_i_m3_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_7_3, y1i);
+    simde__m128i psi_i_m3_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_7_5, y1i);
+    simde__m128i psi_i_m5_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_5_5, y1i);
+    simde__m128i psi_i_m5_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_5, y1i);
+    simde__m128i psi_i_m5_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_5, y1i);
+    simde__m128i psi_i_m5_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_1_5, y1i);
+    simde__m128i psi_i_m5_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_3_5, y1i);
+    simde__m128i psi_i_m5_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_5_5, y1i);
+    simde__m128i psi_i_m5_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_7_5, y1i);
+    simde__m128i psi_i_m5_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_7_7, y1i);
+    simde__m128i psi_i_m7_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_5_7, y1i);
+    simde__m128i psi_i_m7_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_7, y1i);
+    simde__m128i psi_i_m7_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_7, y1i);
+    simde__m128i psi_i_m7_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_1_7, y1i);
+    simde__m128i psi_i_m7_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_3_7, y1i);
+    simde__m128i psi_i_m7_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_5_7, y1i);
+    simde__m128i psi_i_m7_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_7_7, y1i);
+    simde__m128i psi_i_m7_m7 = simde_mm_abs_epi16(xmm2);
 
     // Rearrange desired MF output
     xmm0 = stream0_128i_in[i];
     xmm1 = stream0_128i_in[i+1];
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflelo_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflehi_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shuffle_epi32(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflelo_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflehi_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shuffle_epi32(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    simde__m128i y0r = _mm_unpacklo_epi64(xmm0, xmm1); // = [y0r(1),y0r(2),y0r(3),y0r(4)]
-    simde__m128i y0i = _mm_unpackhi_epi64(xmm0, xmm1);
+    simde__m128i y0r = simde_mm_unpacklo_epi64(xmm0, xmm1); // = [y0r(1),y0r(2),y0r(3),y0r(4)]
+    simde__m128i y0i = simde_mm_unpackhi_epi64(xmm0, xmm1);
 
     // Rearrange desired channel magnitudes
     xmm2 = ch_mag_128i[i]; // = [|h|^2(1),|h|^2(1),|h|^2(2),|h|^2(2)]*(2/sqrt(10))
     xmm3 = ch_mag_128i[i+1]; // = [|h|^2(3),|h|^2(3),|h|^2(4),|h|^2(4)]*(2/sqrt(10))
-    xmm2 = _mm_shufflelo_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = _mm_shufflehi_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = _mm_shuffle_epi32(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shufflelo_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shufflehi_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shuffle_epi32(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    ch_mag_des = _mm_unpacklo_epi64(xmm2,xmm3);
-
-    y0r_one_over_sqrt_21   = _mm_mulhi_epi16(y0r, ONE_OVER_SQRT_42);
-    y0r_three_over_sqrt_21 = _mm_mulhi_epi16(y0r, THREE_OVER_SQRT_42);
-    y0r_five_over_sqrt_21  = _mm_mulhi_epi16(y0r, FIVE_OVER_SQRT_42);
-    y0r_five_over_sqrt_21  = _mm_slli_epi16(y0r_five_over_sqrt_21, 1);
-    y0r_seven_over_sqrt_21 = _mm_mulhi_epi16(y0r, SEVEN_OVER_SQRT_42);
-    y0r_seven_over_sqrt_21 = _mm_slli_epi16(y0r_seven_over_sqrt_21, 2); // Q2.14
-
-    y0i_one_over_sqrt_21   = _mm_mulhi_epi16(y0i, ONE_OVER_SQRT_42);
-    y0i_three_over_sqrt_21 = _mm_mulhi_epi16(y0i, THREE_OVER_SQRT_42);
-    y0i_five_over_sqrt_21  = _mm_mulhi_epi16(y0i, FIVE_OVER_SQRT_42);
-    y0i_five_over_sqrt_21  = _mm_slli_epi16(y0i_five_over_sqrt_21, 1);
-    y0i_seven_over_sqrt_21 = _mm_mulhi_epi16(y0i, SEVEN_OVER_SQRT_42);
-    y0i_seven_over_sqrt_21 = _mm_slli_epi16(y0i_seven_over_sqrt_21, 2); // Q2.14
-
-    simde__m128i y0_p_7_1 = _mm_adds_epi16(y0r_seven_over_sqrt_21, y0i_one_over_sqrt_21);
-    simde__m128i y0_p_7_3 = _mm_adds_epi16(y0r_seven_over_sqrt_21, y0i_three_over_sqrt_21);
-    simde__m128i y0_p_7_5 = _mm_adds_epi16(y0r_seven_over_sqrt_21, y0i_five_over_sqrt_21);
-    simde__m128i y0_p_7_7 = _mm_adds_epi16(y0r_seven_over_sqrt_21, y0i_seven_over_sqrt_21);
-    simde__m128i y0_p_5_1 = _mm_adds_epi16(y0r_five_over_sqrt_21, y0i_one_over_sqrt_21);
-    simde__m128i y0_p_5_3 = _mm_adds_epi16(y0r_five_over_sqrt_21, y0i_three_over_sqrt_21);
-    simde__m128i y0_p_5_5 = _mm_adds_epi16(y0r_five_over_sqrt_21, y0i_five_over_sqrt_21);
-    simde__m128i y0_p_5_7 = _mm_adds_epi16(y0r_five_over_sqrt_21, y0i_seven_over_sqrt_21);
-    simde__m128i y0_p_3_1 = _mm_adds_epi16(y0r_three_over_sqrt_21, y0i_one_over_sqrt_21);
-    simde__m128i y0_p_3_3 = _mm_adds_epi16(y0r_three_over_sqrt_21, y0i_three_over_sqrt_21);
-    simde__m128i y0_p_3_5 = _mm_adds_epi16(y0r_three_over_sqrt_21, y0i_five_over_sqrt_21);
-    simde__m128i y0_p_3_7 = _mm_adds_epi16(y0r_three_over_sqrt_21, y0i_seven_over_sqrt_21);
-    simde__m128i y0_p_1_1 = _mm_adds_epi16(y0r_one_over_sqrt_21, y0i_one_over_sqrt_21);
-    simde__m128i y0_p_1_3 = _mm_adds_epi16(y0r_one_over_sqrt_21, y0i_three_over_sqrt_21);
-    simde__m128i y0_p_1_5 = _mm_adds_epi16(y0r_one_over_sqrt_21, y0i_five_over_sqrt_21);
-    simde__m128i y0_p_1_7 = _mm_adds_epi16(y0r_one_over_sqrt_21, y0i_seven_over_sqrt_21);
-
-    simde__m128i y0_m_1_1 = _mm_subs_epi16(y0r_one_over_sqrt_21, y0i_one_over_sqrt_21);
-    simde__m128i y0_m_1_3 = _mm_subs_epi16(y0r_one_over_sqrt_21, y0i_three_over_sqrt_21);
-    simde__m128i y0_m_1_5 = _mm_subs_epi16(y0r_one_over_sqrt_21, y0i_five_over_sqrt_21);
-    simde__m128i y0_m_1_7 = _mm_subs_epi16(y0r_one_over_sqrt_21, y0i_seven_over_sqrt_21);
-    simde__m128i y0_m_3_1 = _mm_subs_epi16(y0r_three_over_sqrt_21, y0i_one_over_sqrt_21);
-    simde__m128i y0_m_3_3 = _mm_subs_epi16(y0r_three_over_sqrt_21, y0i_three_over_sqrt_21);
-    simde__m128i y0_m_3_5 = _mm_subs_epi16(y0r_three_over_sqrt_21, y0i_five_over_sqrt_21);
-    simde__m128i y0_m_3_7 = _mm_subs_epi16(y0r_three_over_sqrt_21, y0i_seven_over_sqrt_21);
-    simde__m128i y0_m_5_1 = _mm_subs_epi16(y0r_five_over_sqrt_21, y0i_one_over_sqrt_21);
-    simde__m128i y0_m_5_3 = _mm_subs_epi16(y0r_five_over_sqrt_21, y0i_three_over_sqrt_21);
-    simde__m128i y0_m_5_5 = _mm_subs_epi16(y0r_five_over_sqrt_21, y0i_five_over_sqrt_21);
-    simde__m128i y0_m_5_7 = _mm_subs_epi16(y0r_five_over_sqrt_21, y0i_seven_over_sqrt_21);
-    simde__m128i y0_m_7_1 = _mm_subs_epi16(y0r_seven_over_sqrt_21, y0i_one_over_sqrt_21);
-    simde__m128i y0_m_7_3 = _mm_subs_epi16(y0r_seven_over_sqrt_21, y0i_three_over_sqrt_21);
-    simde__m128i y0_m_7_5 = _mm_subs_epi16(y0r_seven_over_sqrt_21, y0i_five_over_sqrt_21);
-    simde__m128i y0_m_7_7 = _mm_subs_epi16(y0r_seven_over_sqrt_21, y0i_seven_over_sqrt_21);
+    xmm2 = simde_mm_shufflelo_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shufflehi_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shuffle_epi32(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shufflelo_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shufflehi_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shuffle_epi32(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    ch_mag_des = simde_mm_unpacklo_epi64(xmm2,xmm3);
+
+    y0r_one_over_sqrt_21   = simde_mm_mulhi_epi16(y0r, ONE_OVER_SQRT_42);
+    y0r_three_over_sqrt_21 = simde_mm_mulhi_epi16(y0r, THREE_OVER_SQRT_42);
+    y0r_five_over_sqrt_21  = simde_mm_mulhi_epi16(y0r, FIVE_OVER_SQRT_42);
+    y0r_five_over_sqrt_21  = simde_mm_slli_epi16(y0r_five_over_sqrt_21, 1);
+    y0r_seven_over_sqrt_21 = simde_mm_mulhi_epi16(y0r, SEVEN_OVER_SQRT_42);
+    y0r_seven_over_sqrt_21 = simde_mm_slli_epi16(y0r_seven_over_sqrt_21, 2); // Q2.14
+
+    y0i_one_over_sqrt_21   = simde_mm_mulhi_epi16(y0i, ONE_OVER_SQRT_42);
+    y0i_three_over_sqrt_21 = simde_mm_mulhi_epi16(y0i, THREE_OVER_SQRT_42);
+    y0i_five_over_sqrt_21  = simde_mm_mulhi_epi16(y0i, FIVE_OVER_SQRT_42);
+    y0i_five_over_sqrt_21  = simde_mm_slli_epi16(y0i_five_over_sqrt_21, 1);
+    y0i_seven_over_sqrt_21 = simde_mm_mulhi_epi16(y0i, SEVEN_OVER_SQRT_42);
+    y0i_seven_over_sqrt_21 = simde_mm_slli_epi16(y0i_seven_over_sqrt_21, 2); // Q2.14
+
+    simde__m128i y0_p_7_1 = simde_mm_adds_epi16(y0r_seven_over_sqrt_21, y0i_one_over_sqrt_21);
+    simde__m128i y0_p_7_3 = simde_mm_adds_epi16(y0r_seven_over_sqrt_21, y0i_three_over_sqrt_21);
+    simde__m128i y0_p_7_5 = simde_mm_adds_epi16(y0r_seven_over_sqrt_21, y0i_five_over_sqrt_21);
+    simde__m128i y0_p_7_7 = simde_mm_adds_epi16(y0r_seven_over_sqrt_21, y0i_seven_over_sqrt_21);
+    simde__m128i y0_p_5_1 = simde_mm_adds_epi16(y0r_five_over_sqrt_21, y0i_one_over_sqrt_21);
+    simde__m128i y0_p_5_3 = simde_mm_adds_epi16(y0r_five_over_sqrt_21, y0i_three_over_sqrt_21);
+    simde__m128i y0_p_5_5 = simde_mm_adds_epi16(y0r_five_over_sqrt_21, y0i_five_over_sqrt_21);
+    simde__m128i y0_p_5_7 = simde_mm_adds_epi16(y0r_five_over_sqrt_21, y0i_seven_over_sqrt_21);
+    simde__m128i y0_p_3_1 = simde_mm_adds_epi16(y0r_three_over_sqrt_21, y0i_one_over_sqrt_21);
+    simde__m128i y0_p_3_3 = simde_mm_adds_epi16(y0r_three_over_sqrt_21, y0i_three_over_sqrt_21);
+    simde__m128i y0_p_3_5 = simde_mm_adds_epi16(y0r_three_over_sqrt_21, y0i_five_over_sqrt_21);
+    simde__m128i y0_p_3_7 = simde_mm_adds_epi16(y0r_three_over_sqrt_21, y0i_seven_over_sqrt_21);
+    simde__m128i y0_p_1_1 = simde_mm_adds_epi16(y0r_one_over_sqrt_21, y0i_one_over_sqrt_21);
+    simde__m128i y0_p_1_3 = simde_mm_adds_epi16(y0r_one_over_sqrt_21, y0i_three_over_sqrt_21);
+    simde__m128i y0_p_1_5 = simde_mm_adds_epi16(y0r_one_over_sqrt_21, y0i_five_over_sqrt_21);
+    simde__m128i y0_p_1_7 = simde_mm_adds_epi16(y0r_one_over_sqrt_21, y0i_seven_over_sqrt_21);
+
+    simde__m128i y0_m_1_1 = simde_mm_subs_epi16(y0r_one_over_sqrt_21, y0i_one_over_sqrt_21);
+    simde__m128i y0_m_1_3 = simde_mm_subs_epi16(y0r_one_over_sqrt_21, y0i_three_over_sqrt_21);
+    simde__m128i y0_m_1_5 = simde_mm_subs_epi16(y0r_one_over_sqrt_21, y0i_five_over_sqrt_21);
+    simde__m128i y0_m_1_7 = simde_mm_subs_epi16(y0r_one_over_sqrt_21, y0i_seven_over_sqrt_21);
+    simde__m128i y0_m_3_1 = simde_mm_subs_epi16(y0r_three_over_sqrt_21, y0i_one_over_sqrt_21);
+    simde__m128i y0_m_3_3 = simde_mm_subs_epi16(y0r_three_over_sqrt_21, y0i_three_over_sqrt_21);
+    simde__m128i y0_m_3_5 = simde_mm_subs_epi16(y0r_three_over_sqrt_21, y0i_five_over_sqrt_21);
+    simde__m128i y0_m_3_7 = simde_mm_subs_epi16(y0r_three_over_sqrt_21, y0i_seven_over_sqrt_21);
+    simde__m128i y0_m_5_1 = simde_mm_subs_epi16(y0r_five_over_sqrt_21, y0i_one_over_sqrt_21);
+    simde__m128i y0_m_5_3 = simde_mm_subs_epi16(y0r_five_over_sqrt_21, y0i_three_over_sqrt_21);
+    simde__m128i y0_m_5_5 = simde_mm_subs_epi16(y0r_five_over_sqrt_21, y0i_five_over_sqrt_21);
+    simde__m128i y0_m_5_7 = simde_mm_subs_epi16(y0r_five_over_sqrt_21, y0i_seven_over_sqrt_21);
+    simde__m128i y0_m_7_1 = simde_mm_subs_epi16(y0r_seven_over_sqrt_21, y0i_one_over_sqrt_21);
+    simde__m128i y0_m_7_3 = simde_mm_subs_epi16(y0r_seven_over_sqrt_21, y0i_three_over_sqrt_21);
+    simde__m128i y0_m_7_5 = simde_mm_subs_epi16(y0r_seven_over_sqrt_21, y0i_five_over_sqrt_21);
+    simde__m128i y0_m_7_7 = simde_mm_subs_epi16(y0r_seven_over_sqrt_21, y0i_seven_over_sqrt_21);
 
     // divide by sqrt(2)
-    psi_r_p7_p7 = _mm_mulhi_epi16(psi_r_p7_p7, ONE_OVER_SQRT_2);
-    psi_r_p7_p7 = _mm_slli_epi16(psi_r_p7_p7, 1);
-    psi_r_p7_p5 = _mm_mulhi_epi16(psi_r_p7_p5, ONE_OVER_SQRT_2);
-    psi_r_p7_p5 = _mm_slli_epi16(psi_r_p7_p5, 1);
-    psi_r_p7_p3 = _mm_mulhi_epi16(psi_r_p7_p3, ONE_OVER_SQRT_2);
-    psi_r_p7_p3 = _mm_slli_epi16(psi_r_p7_p3, 1);
-    psi_r_p7_p1 = _mm_mulhi_epi16(psi_r_p7_p1, ONE_OVER_SQRT_2);
-    psi_r_p7_p1 = _mm_slli_epi16(psi_r_p7_p1, 1);
-    psi_r_p7_m1 = _mm_mulhi_epi16(psi_r_p7_m1, ONE_OVER_SQRT_2);
-    psi_r_p7_m1 = _mm_slli_epi16(psi_r_p7_m1, 1);
-    psi_r_p7_m3 = _mm_mulhi_epi16(psi_r_p7_m3, ONE_OVER_SQRT_2);
-    psi_r_p7_m3 = _mm_slli_epi16(psi_r_p7_m3, 1);
-    psi_r_p7_m5 = _mm_mulhi_epi16(psi_r_p7_m5, ONE_OVER_SQRT_2);
-    psi_r_p7_m5 = _mm_slli_epi16(psi_r_p7_m5, 1);
-    psi_r_p7_m7 = _mm_mulhi_epi16(psi_r_p7_m7, ONE_OVER_SQRT_2);
-    psi_r_p7_m7 = _mm_slli_epi16(psi_r_p7_m7, 1);
-    psi_r_p5_p7 = _mm_mulhi_epi16(psi_r_p5_p7, ONE_OVER_SQRT_2);
-    psi_r_p5_p7 = _mm_slli_epi16(psi_r_p5_p7, 1);
-    psi_r_p5_p5 = _mm_mulhi_epi16(psi_r_p5_p5, ONE_OVER_SQRT_2);
-    psi_r_p5_p5 = _mm_slli_epi16(psi_r_p5_p5, 1);
-    psi_r_p5_p3 = _mm_mulhi_epi16(psi_r_p5_p3, ONE_OVER_SQRT_2);
-    psi_r_p5_p3 = _mm_slli_epi16(psi_r_p5_p3, 1);
-    psi_r_p5_p1 = _mm_mulhi_epi16(psi_r_p5_p1, ONE_OVER_SQRT_2);
-    psi_r_p5_p1 = _mm_slli_epi16(psi_r_p5_p1, 1);
-    psi_r_p5_m1 = _mm_mulhi_epi16(psi_r_p5_m1, ONE_OVER_SQRT_2);
-    psi_r_p5_m1 = _mm_slli_epi16(psi_r_p5_m1, 1);
-    psi_r_p5_m3 = _mm_mulhi_epi16(psi_r_p5_m3, ONE_OVER_SQRT_2);
-    psi_r_p5_m3 = _mm_slli_epi16(psi_r_p5_m3, 1);
-    psi_r_p5_m5 = _mm_mulhi_epi16(psi_r_p5_m5, ONE_OVER_SQRT_2);
-    psi_r_p5_m5 = _mm_slli_epi16(psi_r_p5_m5, 1);
-    psi_r_p5_m7 = _mm_mulhi_epi16(psi_r_p5_m7, ONE_OVER_SQRT_2);
-    psi_r_p5_m7 = _mm_slli_epi16(psi_r_p5_m7, 1);
-    psi_r_p3_p7 = _mm_mulhi_epi16(psi_r_p3_p7, ONE_OVER_SQRT_2);
-    psi_r_p3_p7 = _mm_slli_epi16(psi_r_p3_p7, 1);
-    psi_r_p3_p5 = _mm_mulhi_epi16(psi_r_p3_p5, ONE_OVER_SQRT_2);
-    psi_r_p3_p5 = _mm_slli_epi16(psi_r_p3_p5, 1);
-    psi_r_p3_p3 = _mm_mulhi_epi16(psi_r_p3_p3, ONE_OVER_SQRT_2);
-    psi_r_p3_p3 = _mm_slli_epi16(psi_r_p3_p3, 1);
-    psi_r_p3_p1 = _mm_mulhi_epi16(psi_r_p3_p1, ONE_OVER_SQRT_2);
-    psi_r_p3_p1 = _mm_slli_epi16(psi_r_p3_p1, 1);
-    psi_r_p3_m1 = _mm_mulhi_epi16(psi_r_p3_m1, ONE_OVER_SQRT_2);
-    psi_r_p3_m1 = _mm_slli_epi16(psi_r_p3_m1, 1);
-    psi_r_p3_m3 = _mm_mulhi_epi16(psi_r_p3_m3, ONE_OVER_SQRT_2);
-    psi_r_p3_m3 = _mm_slli_epi16(psi_r_p3_m3, 1);
-    psi_r_p3_m5 = _mm_mulhi_epi16(psi_r_p3_m5, ONE_OVER_SQRT_2);
-    psi_r_p3_m5 = _mm_slli_epi16(psi_r_p3_m5, 1);
-    psi_r_p3_m7 = _mm_mulhi_epi16(psi_r_p3_m7, ONE_OVER_SQRT_2);
-    psi_r_p3_m7 = _mm_slli_epi16(psi_r_p3_m7, 1);
-    psi_r_p1_p7 = _mm_mulhi_epi16(psi_r_p1_p7, ONE_OVER_SQRT_2);
-    psi_r_p1_p7 = _mm_slli_epi16(psi_r_p1_p7, 1);
-    psi_r_p1_p5 = _mm_mulhi_epi16(psi_r_p1_p5, ONE_OVER_SQRT_2);
-    psi_r_p1_p5 = _mm_slli_epi16(psi_r_p1_p5, 1);
-    psi_r_p1_p3 = _mm_mulhi_epi16(psi_r_p1_p3, ONE_OVER_SQRT_2);
-    psi_r_p1_p3 = _mm_slli_epi16(psi_r_p1_p3, 1);
-    psi_r_p1_p1 = _mm_mulhi_epi16(psi_r_p1_p1, ONE_OVER_SQRT_2);
-    psi_r_p1_p1 = _mm_slli_epi16(psi_r_p1_p1, 1);
-    psi_r_p1_m1 = _mm_mulhi_epi16(psi_r_p1_m1, ONE_OVER_SQRT_2);
-    psi_r_p1_m1 = _mm_slli_epi16(psi_r_p1_m1, 1);
-    psi_r_p1_m3 = _mm_mulhi_epi16(psi_r_p1_m3, ONE_OVER_SQRT_2);
-    psi_r_p1_m3 = _mm_slli_epi16(psi_r_p1_m3, 1);
-    psi_r_p1_m5 = _mm_mulhi_epi16(psi_r_p1_m5, ONE_OVER_SQRT_2);
-    psi_r_p1_m5 = _mm_slli_epi16(psi_r_p1_m5, 1);
-    psi_r_p1_m7 = _mm_mulhi_epi16(psi_r_p1_m7, ONE_OVER_SQRT_2);
-    psi_r_p1_m7 = _mm_slli_epi16(psi_r_p1_m7, 1);
-    psi_r_m1_p7 = _mm_mulhi_epi16(psi_r_m1_p7, ONE_OVER_SQRT_2);
-    psi_r_m1_p7 = _mm_slli_epi16(psi_r_m1_p7, 1);
-    psi_r_m1_p5 = _mm_mulhi_epi16(psi_r_m1_p5, ONE_OVER_SQRT_2);
-    psi_r_m1_p5 = _mm_slli_epi16(psi_r_m1_p5, 1);
-    psi_r_m1_p3 = _mm_mulhi_epi16(psi_r_m1_p3, ONE_OVER_SQRT_2);
-    psi_r_m1_p3 = _mm_slli_epi16(psi_r_m1_p3, 1);
-    psi_r_m1_p1 = _mm_mulhi_epi16(psi_r_m1_p1, ONE_OVER_SQRT_2);
-    psi_r_m1_p1 = _mm_slli_epi16(psi_r_m1_p1, 1);
-    psi_r_m1_m1 = _mm_mulhi_epi16(psi_r_m1_m1, ONE_OVER_SQRT_2);
-    psi_r_m1_m1 = _mm_slli_epi16(psi_r_m1_m1, 1);
-    psi_r_m1_m3 = _mm_mulhi_epi16(psi_r_m1_m3, ONE_OVER_SQRT_2);
-    psi_r_m1_m3 = _mm_slli_epi16(psi_r_m1_m3, 1);
-    psi_r_m1_m5 = _mm_mulhi_epi16(psi_r_m1_m5, ONE_OVER_SQRT_2);
-    psi_r_m1_m5 = _mm_slli_epi16(psi_r_m1_m5, 1);
-    psi_r_m1_m7 = _mm_mulhi_epi16(psi_r_m1_m7, ONE_OVER_SQRT_2);
-    psi_r_m1_m7 = _mm_slli_epi16(psi_r_m1_m7, 1);
-    psi_r_m3_p7 = _mm_mulhi_epi16(psi_r_m3_p7, ONE_OVER_SQRT_2);
-    psi_r_m3_p7 = _mm_slli_epi16(psi_r_m3_p7, 1);
-    psi_r_m3_p5 = _mm_mulhi_epi16(psi_r_m3_p5, ONE_OVER_SQRT_2);
-    psi_r_m3_p5 = _mm_slli_epi16(psi_r_m3_p5, 1);
-    psi_r_m3_p3 = _mm_mulhi_epi16(psi_r_m3_p3, ONE_OVER_SQRT_2);
-    psi_r_m3_p3 = _mm_slli_epi16(psi_r_m3_p3, 1);
-    psi_r_m3_p1 = _mm_mulhi_epi16(psi_r_m3_p1, ONE_OVER_SQRT_2);
-    psi_r_m3_p1 = _mm_slli_epi16(psi_r_m3_p1, 1);
-    psi_r_m3_m1 = _mm_mulhi_epi16(psi_r_m3_m1, ONE_OVER_SQRT_2);
-    psi_r_m3_m1 = _mm_slli_epi16(psi_r_m3_m1, 1);
-    psi_r_m3_m3 = _mm_mulhi_epi16(psi_r_m3_m3, ONE_OVER_SQRT_2);
-    psi_r_m3_m3 = _mm_slli_epi16(psi_r_m3_m3, 1);
-    psi_r_m3_m5 = _mm_mulhi_epi16(psi_r_m3_m5, ONE_OVER_SQRT_2);
-    psi_r_m3_m5 = _mm_slli_epi16(psi_r_m3_m5, 1);
-    psi_r_m3_m7 = _mm_mulhi_epi16(psi_r_m3_m7, ONE_OVER_SQRT_2);
-    psi_r_m3_m7 = _mm_slli_epi16(psi_r_m3_m7, 1);
-    psi_r_m5_p7 = _mm_mulhi_epi16(psi_r_m5_p7, ONE_OVER_SQRT_2);
-    psi_r_m5_p7 = _mm_slli_epi16(psi_r_m5_p7, 1);
-    psi_r_m5_p5 = _mm_mulhi_epi16(psi_r_m5_p5, ONE_OVER_SQRT_2);
-    psi_r_m5_p5 = _mm_slli_epi16(psi_r_m5_p5, 1);
-    psi_r_m5_p3 = _mm_mulhi_epi16(psi_r_m5_p3, ONE_OVER_SQRT_2);
-    psi_r_m5_p3 = _mm_slli_epi16(psi_r_m5_p3, 1);
-    psi_r_m5_p1 = _mm_mulhi_epi16(psi_r_m5_p1, ONE_OVER_SQRT_2);
-    psi_r_m5_p1 = _mm_slli_epi16(psi_r_m5_p1, 1);
-    psi_r_m5_m1 = _mm_mulhi_epi16(psi_r_m5_m1, ONE_OVER_SQRT_2);
-    psi_r_m5_m1 = _mm_slli_epi16(psi_r_m5_m1, 1);
-    psi_r_m5_m3 = _mm_mulhi_epi16(psi_r_m5_m3, ONE_OVER_SQRT_2);
-    psi_r_m5_m3 = _mm_slli_epi16(psi_r_m5_m3, 1);
-    psi_r_m5_m5 = _mm_mulhi_epi16(psi_r_m5_m5, ONE_OVER_SQRT_2);
-    psi_r_m5_m5 = _mm_slli_epi16(psi_r_m5_m5, 1);
-    psi_r_m5_m7 = _mm_mulhi_epi16(psi_r_m5_m7, ONE_OVER_SQRT_2);
-    psi_r_m5_m7 = _mm_slli_epi16(psi_r_m5_m7, 1);
-    psi_r_m7_p7 = _mm_mulhi_epi16(psi_r_m7_p7, ONE_OVER_SQRT_2);
-    psi_r_m7_p7 = _mm_slli_epi16(psi_r_m7_p7, 1);
-    psi_r_m7_p5 = _mm_mulhi_epi16(psi_r_m7_p5, ONE_OVER_SQRT_2);
-    psi_r_m7_p5 = _mm_slli_epi16(psi_r_m7_p5, 1);
-    psi_r_m7_p3 = _mm_mulhi_epi16(psi_r_m7_p3, ONE_OVER_SQRT_2);
-    psi_r_m7_p3 = _mm_slli_epi16(psi_r_m7_p3, 1);
-    psi_r_m7_p1 = _mm_mulhi_epi16(psi_r_m7_p1, ONE_OVER_SQRT_2);
-    psi_r_m7_p1 = _mm_slli_epi16(psi_r_m7_p1, 1);
-    psi_r_m7_m1 = _mm_mulhi_epi16(psi_r_m7_m1, ONE_OVER_SQRT_2);
-    psi_r_m7_m1 = _mm_slli_epi16(psi_r_m7_m1, 1);
-    psi_r_m7_m3 = _mm_mulhi_epi16(psi_r_m7_m3, ONE_OVER_SQRT_2);
-    psi_r_m7_m3 = _mm_slli_epi16(psi_r_m7_m3, 1);
-    psi_r_m7_m5 = _mm_mulhi_epi16(psi_r_m7_m5, ONE_OVER_SQRT_2);
-    psi_r_m7_m5 = _mm_slli_epi16(psi_r_m7_m5, 1);
-    psi_r_m7_m7 = _mm_mulhi_epi16(psi_r_m7_m7, ONE_OVER_SQRT_2);
-    psi_r_m7_m7 = _mm_slli_epi16(psi_r_m7_m7, 1);
-
-    psi_i_p7_p7 = _mm_mulhi_epi16(psi_i_p7_p7, ONE_OVER_SQRT_2);
-    psi_i_p7_p7 = _mm_slli_epi16(psi_i_p7_p7, 1);
-    psi_i_p7_p5 = _mm_mulhi_epi16(psi_i_p7_p5, ONE_OVER_SQRT_2);
-    psi_i_p7_p5 = _mm_slli_epi16(psi_i_p7_p5, 1);
-    psi_i_p7_p3 = _mm_mulhi_epi16(psi_i_p7_p3, ONE_OVER_SQRT_2);
-    psi_i_p7_p3 = _mm_slli_epi16(psi_i_p7_p3, 1);
-    psi_i_p7_p1 = _mm_mulhi_epi16(psi_i_p7_p1, ONE_OVER_SQRT_2);
-    psi_i_p7_p1 = _mm_slli_epi16(psi_i_p7_p1, 1);
-    psi_i_p7_m1 = _mm_mulhi_epi16(psi_i_p7_m1, ONE_OVER_SQRT_2);
-    psi_i_p7_m1 = _mm_slli_epi16(psi_i_p7_m1, 1);
-    psi_i_p7_m3 = _mm_mulhi_epi16(psi_i_p7_m3, ONE_OVER_SQRT_2);
-    psi_i_p7_m3 = _mm_slli_epi16(psi_i_p7_m3, 1);
-    psi_i_p7_m5 = _mm_mulhi_epi16(psi_i_p7_m5, ONE_OVER_SQRT_2);
-    psi_i_p7_m5 = _mm_slli_epi16(psi_i_p7_m5, 1);
-    psi_i_p7_m7 = _mm_mulhi_epi16(psi_i_p7_m7, ONE_OVER_SQRT_2);
-    psi_i_p7_m7 = _mm_slli_epi16(psi_i_p7_m7, 1);
-    psi_i_p5_p7 = _mm_mulhi_epi16(psi_i_p5_p7, ONE_OVER_SQRT_2);
-    psi_i_p5_p7 = _mm_slli_epi16(psi_i_p5_p7, 1);
-    psi_i_p5_p5 = _mm_mulhi_epi16(psi_i_p5_p5, ONE_OVER_SQRT_2);
-    psi_i_p5_p5 = _mm_slli_epi16(psi_i_p5_p5, 1);
-    psi_i_p5_p3 = _mm_mulhi_epi16(psi_i_p5_p3, ONE_OVER_SQRT_2);
-    psi_i_p5_p3 = _mm_slli_epi16(psi_i_p5_p3, 1);
-    psi_i_p5_p1 = _mm_mulhi_epi16(psi_i_p5_p1, ONE_OVER_SQRT_2);
-    psi_i_p5_p1 = _mm_slli_epi16(psi_i_p5_p1, 1);
-    psi_i_p5_m1 = _mm_mulhi_epi16(psi_i_p5_m1, ONE_OVER_SQRT_2);
-    psi_i_p5_m1 = _mm_slli_epi16(psi_i_p5_m1, 1);
-    psi_i_p5_m3 = _mm_mulhi_epi16(psi_i_p5_m3, ONE_OVER_SQRT_2);
-    psi_i_p5_m3 = _mm_slli_epi16(psi_i_p5_m3, 1);
-    psi_i_p5_m5 = _mm_mulhi_epi16(psi_i_p5_m5, ONE_OVER_SQRT_2);
-    psi_i_p5_m5 = _mm_slli_epi16(psi_i_p5_m5, 1);
-    psi_i_p5_m7 = _mm_mulhi_epi16(psi_i_p5_m7, ONE_OVER_SQRT_2);
-    psi_i_p5_m7 = _mm_slli_epi16(psi_i_p5_m7, 1);
-    psi_i_p3_p7 = _mm_mulhi_epi16(psi_i_p3_p7, ONE_OVER_SQRT_2);
-    psi_i_p3_p7 = _mm_slli_epi16(psi_i_p3_p7, 1);
-    psi_i_p3_p5 = _mm_mulhi_epi16(psi_i_p3_p5, ONE_OVER_SQRT_2);
-    psi_i_p3_p5 = _mm_slli_epi16(psi_i_p3_p5, 1);
-    psi_i_p3_p3 = _mm_mulhi_epi16(psi_i_p3_p3, ONE_OVER_SQRT_2);
-    psi_i_p3_p3 = _mm_slli_epi16(psi_i_p3_p3, 1);
-    psi_i_p3_p1 = _mm_mulhi_epi16(psi_i_p3_p1, ONE_OVER_SQRT_2);
-    psi_i_p3_p1 = _mm_slli_epi16(psi_i_p3_p1, 1);
-    psi_i_p3_m1 = _mm_mulhi_epi16(psi_i_p3_m1, ONE_OVER_SQRT_2);
-    psi_i_p3_m1 = _mm_slli_epi16(psi_i_p3_m1, 1);
-    psi_i_p3_m3 = _mm_mulhi_epi16(psi_i_p3_m3, ONE_OVER_SQRT_2);
-    psi_i_p3_m3 = _mm_slli_epi16(psi_i_p3_m3, 1);
-    psi_i_p3_m5 = _mm_mulhi_epi16(psi_i_p3_m5, ONE_OVER_SQRT_2);
-    psi_i_p3_m5 = _mm_slli_epi16(psi_i_p3_m5, 1);
-    psi_i_p3_m7 = _mm_mulhi_epi16(psi_i_p3_m7, ONE_OVER_SQRT_2);
-    psi_i_p3_m7 = _mm_slli_epi16(psi_i_p3_m7, 1);
-    psi_i_p1_p7 = _mm_mulhi_epi16(psi_i_p1_p7, ONE_OVER_SQRT_2);
-    psi_i_p1_p7 = _mm_slli_epi16(psi_i_p1_p7, 1);
-    psi_i_p1_p5 = _mm_mulhi_epi16(psi_i_p1_p5, ONE_OVER_SQRT_2);
-    psi_i_p1_p5 = _mm_slli_epi16(psi_i_p1_p5, 1);
-    psi_i_p1_p3 = _mm_mulhi_epi16(psi_i_p1_p3, ONE_OVER_SQRT_2);
-    psi_i_p1_p3 = _mm_slli_epi16(psi_i_p1_p3, 1);
-    psi_i_p1_p1 = _mm_mulhi_epi16(psi_i_p1_p1, ONE_OVER_SQRT_2);
-    psi_i_p1_p1 = _mm_slli_epi16(psi_i_p1_p1, 1);
-    psi_i_p1_m1 = _mm_mulhi_epi16(psi_i_p1_m1, ONE_OVER_SQRT_2);
-    psi_i_p1_m1 = _mm_slli_epi16(psi_i_p1_m1, 1);
-    psi_i_p1_m3 = _mm_mulhi_epi16(psi_i_p1_m3, ONE_OVER_SQRT_2);
-    psi_i_p1_m3 = _mm_slli_epi16(psi_i_p1_m3, 1);
-    psi_i_p1_m5 = _mm_mulhi_epi16(psi_i_p1_m5, ONE_OVER_SQRT_2);
-    psi_i_p1_m5 = _mm_slli_epi16(psi_i_p1_m5, 1);
-    psi_i_p1_m7 = _mm_mulhi_epi16(psi_i_p1_m7, ONE_OVER_SQRT_2);
-    psi_i_p1_m7 = _mm_slli_epi16(psi_i_p1_m7, 1);
-    psi_i_m1_p7 = _mm_mulhi_epi16(psi_i_m1_p7, ONE_OVER_SQRT_2);
-    psi_i_m1_p7 = _mm_slli_epi16(psi_i_m1_p7, 1);
-    psi_i_m1_p5 = _mm_mulhi_epi16(psi_i_m1_p5, ONE_OVER_SQRT_2);
-    psi_i_m1_p5 = _mm_slli_epi16(psi_i_m1_p5, 1);
-    psi_i_m1_p3 = _mm_mulhi_epi16(psi_i_m1_p3, ONE_OVER_SQRT_2);
-    psi_i_m1_p3 = _mm_slli_epi16(psi_i_m1_p3, 1);
-    psi_i_m1_p1 = _mm_mulhi_epi16(psi_i_m1_p1, ONE_OVER_SQRT_2);
-    psi_i_m1_p1 = _mm_slli_epi16(psi_i_m1_p1, 1);
-    psi_i_m1_m1 = _mm_mulhi_epi16(psi_i_m1_m1, ONE_OVER_SQRT_2);
-    psi_i_m1_m1 = _mm_slli_epi16(psi_i_m1_m1, 1);
-    psi_i_m1_m3 = _mm_mulhi_epi16(psi_i_m1_m3, ONE_OVER_SQRT_2);
-    psi_i_m1_m3 = _mm_slli_epi16(psi_i_m1_m3, 1);
-    psi_i_m1_m5 = _mm_mulhi_epi16(psi_i_m1_m5, ONE_OVER_SQRT_2);
-    psi_i_m1_m5 = _mm_slli_epi16(psi_i_m1_m5, 1);
-    psi_i_m1_m7 = _mm_mulhi_epi16(psi_i_m1_m7, ONE_OVER_SQRT_2);
-    psi_i_m1_m7 = _mm_slli_epi16(psi_i_m1_m7, 1);
-    psi_i_m3_p7 = _mm_mulhi_epi16(psi_i_m3_p7, ONE_OVER_SQRT_2);
-    psi_i_m3_p7 = _mm_slli_epi16(psi_i_m3_p7, 1);
-    psi_i_m3_p5 = _mm_mulhi_epi16(psi_i_m3_p5, ONE_OVER_SQRT_2);
-    psi_i_m3_p5 = _mm_slli_epi16(psi_i_m3_p5, 1);
-    psi_i_m3_p3 = _mm_mulhi_epi16(psi_i_m3_p3, ONE_OVER_SQRT_2);
-    psi_i_m3_p3 = _mm_slli_epi16(psi_i_m3_p3, 1);
-    psi_i_m3_p1 = _mm_mulhi_epi16(psi_i_m3_p1, ONE_OVER_SQRT_2);
-    psi_i_m3_p1 = _mm_slli_epi16(psi_i_m3_p1, 1);
-    psi_i_m3_m1 = _mm_mulhi_epi16(psi_i_m3_m1, ONE_OVER_SQRT_2);
-    psi_i_m3_m1 = _mm_slli_epi16(psi_i_m3_m1, 1);
-    psi_i_m3_m3 = _mm_mulhi_epi16(psi_i_m3_m3, ONE_OVER_SQRT_2);
-    psi_i_m3_m3 = _mm_slli_epi16(psi_i_m3_m3, 1);
-    psi_i_m3_m5 = _mm_mulhi_epi16(psi_i_m3_m5, ONE_OVER_SQRT_2);
-    psi_i_m3_m5 = _mm_slli_epi16(psi_i_m3_m5, 1);
-    psi_i_m3_m7 = _mm_mulhi_epi16(psi_i_m3_m7, ONE_OVER_SQRT_2);
-    psi_i_m3_m7 = _mm_slli_epi16(psi_i_m3_m7, 1);
-    psi_i_m5_p7 = _mm_mulhi_epi16(psi_i_m5_p7, ONE_OVER_SQRT_2);
-    psi_i_m5_p7 = _mm_slli_epi16(psi_i_m5_p7, 1);
-    psi_i_m5_p5 = _mm_mulhi_epi16(psi_i_m5_p5, ONE_OVER_SQRT_2);
-    psi_i_m5_p5 = _mm_slli_epi16(psi_i_m5_p5, 1);
-    psi_i_m5_p3 = _mm_mulhi_epi16(psi_i_m5_p3, ONE_OVER_SQRT_2);
-    psi_i_m5_p3 = _mm_slli_epi16(psi_i_m5_p3, 1);
-    psi_i_m5_p1 = _mm_mulhi_epi16(psi_i_m5_p1, ONE_OVER_SQRT_2);
-    psi_i_m5_p1 = _mm_slli_epi16(psi_i_m5_p1, 1);
-    psi_i_m5_m1 = _mm_mulhi_epi16(psi_i_m5_m1, ONE_OVER_SQRT_2);
-    psi_i_m5_m1 = _mm_slli_epi16(psi_i_m5_m1, 1);
-    psi_i_m5_m3 = _mm_mulhi_epi16(psi_i_m5_m3, ONE_OVER_SQRT_2);
-    psi_i_m5_m3 = _mm_slli_epi16(psi_i_m5_m3, 1);
-    psi_i_m5_m5 = _mm_mulhi_epi16(psi_i_m5_m5, ONE_OVER_SQRT_2);
-    psi_i_m5_m5 = _mm_slli_epi16(psi_i_m5_m5, 1);
-    psi_i_m5_m7 = _mm_mulhi_epi16(psi_i_m5_m7, ONE_OVER_SQRT_2);
-    psi_i_m5_m7 = _mm_slli_epi16(psi_i_m5_m7, 1);
-    psi_i_m7_p7 = _mm_mulhi_epi16(psi_i_m7_p7, ONE_OVER_SQRT_2);
-    psi_i_m7_p7 = _mm_slli_epi16(psi_i_m7_p7, 1);
-    psi_i_m7_p5 = _mm_mulhi_epi16(psi_i_m7_p5, ONE_OVER_SQRT_2);
-    psi_i_m7_p5 = _mm_slli_epi16(psi_i_m7_p5, 1);
-    psi_i_m7_p3 = _mm_mulhi_epi16(psi_i_m7_p3, ONE_OVER_SQRT_2);
-    psi_i_m7_p3 = _mm_slli_epi16(psi_i_m7_p3, 1);
-    psi_i_m7_p1 = _mm_mulhi_epi16(psi_i_m7_p1, ONE_OVER_SQRT_2);
-    psi_i_m7_p1 = _mm_slli_epi16(psi_i_m7_p1, 1);
-    psi_i_m7_m1 = _mm_mulhi_epi16(psi_i_m7_m1, ONE_OVER_SQRT_2);
-    psi_i_m7_m1 = _mm_slli_epi16(psi_i_m7_m1, 1);
-    psi_i_m7_m3 = _mm_mulhi_epi16(psi_i_m7_m3, ONE_OVER_SQRT_2);
-    psi_i_m7_m3 = _mm_slli_epi16(psi_i_m7_m3, 1);
-    psi_i_m7_m5 = _mm_mulhi_epi16(psi_i_m7_m5, ONE_OVER_SQRT_2);
-    psi_i_m7_m5 = _mm_slli_epi16(psi_i_m7_m5, 1);
-    psi_i_m7_m7 = _mm_mulhi_epi16(psi_i_m7_m7, ONE_OVER_SQRT_2);
-    psi_i_m7_m7 = _mm_slli_epi16(psi_i_m7_m7, 1);
-
-    simde__m128i psi_a_p7_p7 = _mm_adds_epi16(psi_r_p7_p7, psi_i_p7_p7);
-    simde__m128i psi_a_p7_p5 = _mm_adds_epi16(psi_r_p7_p5, psi_i_p7_p5);
-    simde__m128i psi_a_p7_p3 = _mm_adds_epi16(psi_r_p7_p3, psi_i_p7_p3);
-    simde__m128i psi_a_p7_p1 = _mm_adds_epi16(psi_r_p7_p1, psi_i_p7_p1);
-    simde__m128i psi_a_p7_m1 = _mm_adds_epi16(psi_r_p7_m1, psi_i_p7_m1);
-    simde__m128i psi_a_p7_m3 = _mm_adds_epi16(psi_r_p7_m3, psi_i_p7_m3);
-    simde__m128i psi_a_p7_m5 = _mm_adds_epi16(psi_r_p7_m5, psi_i_p7_m5);
-    simde__m128i psi_a_p7_m7 = _mm_adds_epi16(psi_r_p7_m7, psi_i_p7_m7);
-    simde__m128i psi_a_p5_p7 = _mm_adds_epi16(psi_r_p5_p7, psi_i_p5_p7);
-    simde__m128i psi_a_p5_p5 = _mm_adds_epi16(psi_r_p5_p5, psi_i_p5_p5);
-    simde__m128i psi_a_p5_p3 = _mm_adds_epi16(psi_r_p5_p3, psi_i_p5_p3);
-    simde__m128i psi_a_p5_p1 = _mm_adds_epi16(psi_r_p5_p1, psi_i_p5_p1);
-    simde__m128i psi_a_p5_m1 = _mm_adds_epi16(psi_r_p5_m1, psi_i_p5_m1);
-    simde__m128i psi_a_p5_m3 = _mm_adds_epi16(psi_r_p5_m3, psi_i_p5_m3);
-    simde__m128i psi_a_p5_m5 = _mm_adds_epi16(psi_r_p5_m5, psi_i_p5_m5);
-    simde__m128i psi_a_p5_m7 = _mm_adds_epi16(psi_r_p5_m7, psi_i_p5_m7);
-    simde__m128i psi_a_p3_p7 = _mm_adds_epi16(psi_r_p3_p7, psi_i_p3_p7);
-    simde__m128i psi_a_p3_p5 = _mm_adds_epi16(psi_r_p3_p5, psi_i_p3_p5);
-    simde__m128i psi_a_p3_p3 = _mm_adds_epi16(psi_r_p3_p3, psi_i_p3_p3);
-    simde__m128i psi_a_p3_p1 = _mm_adds_epi16(psi_r_p3_p1, psi_i_p3_p1);
-    simde__m128i psi_a_p3_m1 = _mm_adds_epi16(psi_r_p3_m1, psi_i_p3_m1);
-    simde__m128i psi_a_p3_m3 = _mm_adds_epi16(psi_r_p3_m3, psi_i_p3_m3);
-    simde__m128i psi_a_p3_m5 = _mm_adds_epi16(psi_r_p3_m5, psi_i_p3_m5);
-    simde__m128i psi_a_p3_m7 = _mm_adds_epi16(psi_r_p3_m7, psi_i_p3_m7);
-    simde__m128i psi_a_p1_p7 = _mm_adds_epi16(psi_r_p1_p7, psi_i_p1_p7);
-    simde__m128i psi_a_p1_p5 = _mm_adds_epi16(psi_r_p1_p5, psi_i_p1_p5);
-    simde__m128i psi_a_p1_p3 = _mm_adds_epi16(psi_r_p1_p3, psi_i_p1_p3);
-    simde__m128i psi_a_p1_p1 = _mm_adds_epi16(psi_r_p1_p1, psi_i_p1_p1);
-    simde__m128i psi_a_p1_m1 = _mm_adds_epi16(psi_r_p1_m1, psi_i_p1_m1);
-    simde__m128i psi_a_p1_m3 = _mm_adds_epi16(psi_r_p1_m3, psi_i_p1_m3);
-    simde__m128i psi_a_p1_m5 = _mm_adds_epi16(psi_r_p1_m5, psi_i_p1_m5);
-    simde__m128i psi_a_p1_m7 = _mm_adds_epi16(psi_r_p1_m7, psi_i_p1_m7);
-    simde__m128i psi_a_m1_p7 = _mm_adds_epi16(psi_r_m1_p7, psi_i_m1_p7);
-    simde__m128i psi_a_m1_p5 = _mm_adds_epi16(psi_r_m1_p5, psi_i_m1_p5);
-    simde__m128i psi_a_m1_p3 = _mm_adds_epi16(psi_r_m1_p3, psi_i_m1_p3);
-    simde__m128i psi_a_m1_p1 = _mm_adds_epi16(psi_r_m1_p1, psi_i_m1_p1);
-    simde__m128i psi_a_m1_m1 = _mm_adds_epi16(psi_r_m1_m1, psi_i_m1_m1);
-    simde__m128i psi_a_m1_m3 = _mm_adds_epi16(psi_r_m1_m3, psi_i_m1_m3);
-    simde__m128i psi_a_m1_m5 = _mm_adds_epi16(psi_r_m1_m5, psi_i_m1_m5);
-    simde__m128i psi_a_m1_m7 = _mm_adds_epi16(psi_r_m1_m7, psi_i_m1_m7);
-    simde__m128i psi_a_m3_p7 = _mm_adds_epi16(psi_r_m3_p7, psi_i_m3_p7);
-    simde__m128i psi_a_m3_p5 = _mm_adds_epi16(psi_r_m3_p5, psi_i_m3_p5);
-    simde__m128i psi_a_m3_p3 = _mm_adds_epi16(psi_r_m3_p3, psi_i_m3_p3);
-    simde__m128i psi_a_m3_p1 = _mm_adds_epi16(psi_r_m3_p1, psi_i_m3_p1);
-    simde__m128i psi_a_m3_m1 = _mm_adds_epi16(psi_r_m3_m1, psi_i_m3_m1);
-    simde__m128i psi_a_m3_m3 = _mm_adds_epi16(psi_r_m3_m3, psi_i_m3_m3);
-    simde__m128i psi_a_m3_m5 = _mm_adds_epi16(psi_r_m3_m5, psi_i_m3_m5);
-    simde__m128i psi_a_m3_m7 = _mm_adds_epi16(psi_r_m3_m7, psi_i_m3_m7);
-    simde__m128i psi_a_m5_p7 = _mm_adds_epi16(psi_r_m5_p7, psi_i_m5_p7);
-    simde__m128i psi_a_m5_p5 = _mm_adds_epi16(psi_r_m5_p5, psi_i_m5_p5);
-    simde__m128i psi_a_m5_p3 = _mm_adds_epi16(psi_r_m5_p3, psi_i_m5_p3);
-    simde__m128i psi_a_m5_p1 = _mm_adds_epi16(psi_r_m5_p1, psi_i_m5_p1);
-    simde__m128i psi_a_m5_m1 = _mm_adds_epi16(psi_r_m5_m1, psi_i_m5_m1);
-    simde__m128i psi_a_m5_m3 = _mm_adds_epi16(psi_r_m5_m3, psi_i_m5_m3);
-    simde__m128i psi_a_m5_m5 = _mm_adds_epi16(psi_r_m5_m5, psi_i_m5_m5);
-    simde__m128i psi_a_m5_m7 = _mm_adds_epi16(psi_r_m5_m7, psi_i_m5_m7);
-    simde__m128i psi_a_m7_p7 = _mm_adds_epi16(psi_r_m7_p7, psi_i_m7_p7);
-    simde__m128i psi_a_m7_p5 = _mm_adds_epi16(psi_r_m7_p5, psi_i_m7_p5);
-    simde__m128i psi_a_m7_p3 = _mm_adds_epi16(psi_r_m7_p3, psi_i_m7_p3);
-    simde__m128i psi_a_m7_p1 = _mm_adds_epi16(psi_r_m7_p1, psi_i_m7_p1);
-    simde__m128i psi_a_m7_m1 = _mm_adds_epi16(psi_r_m7_m1, psi_i_m7_m1);
-    simde__m128i psi_a_m7_m3 = _mm_adds_epi16(psi_r_m7_m3, psi_i_m7_m3);
-    simde__m128i psi_a_m7_m5 = _mm_adds_epi16(psi_r_m7_m5, psi_i_m7_m5);
-    simde__m128i psi_a_m7_m7 = _mm_adds_epi16(psi_r_m7_m7, psi_i_m7_m7);
+    psi_r_p7_p7 = simde_mm_mulhi_epi16(psi_r_p7_p7, ONE_OVER_SQRT_2);
+    psi_r_p7_p7 = simde_mm_slli_epi16(psi_r_p7_p7, 1);
+    psi_r_p7_p5 = simde_mm_mulhi_epi16(psi_r_p7_p5, ONE_OVER_SQRT_2);
+    psi_r_p7_p5 = simde_mm_slli_epi16(psi_r_p7_p5, 1);
+    psi_r_p7_p3 = simde_mm_mulhi_epi16(psi_r_p7_p3, ONE_OVER_SQRT_2);
+    psi_r_p7_p3 = simde_mm_slli_epi16(psi_r_p7_p3, 1);
+    psi_r_p7_p1 = simde_mm_mulhi_epi16(psi_r_p7_p1, ONE_OVER_SQRT_2);
+    psi_r_p7_p1 = simde_mm_slli_epi16(psi_r_p7_p1, 1);
+    psi_r_p7_m1 = simde_mm_mulhi_epi16(psi_r_p7_m1, ONE_OVER_SQRT_2);
+    psi_r_p7_m1 = simde_mm_slli_epi16(psi_r_p7_m1, 1);
+    psi_r_p7_m3 = simde_mm_mulhi_epi16(psi_r_p7_m3, ONE_OVER_SQRT_2);
+    psi_r_p7_m3 = simde_mm_slli_epi16(psi_r_p7_m3, 1);
+    psi_r_p7_m5 = simde_mm_mulhi_epi16(psi_r_p7_m5, ONE_OVER_SQRT_2);
+    psi_r_p7_m5 = simde_mm_slli_epi16(psi_r_p7_m5, 1);
+    psi_r_p7_m7 = simde_mm_mulhi_epi16(psi_r_p7_m7, ONE_OVER_SQRT_2);
+    psi_r_p7_m7 = simde_mm_slli_epi16(psi_r_p7_m7, 1);
+    psi_r_p5_p7 = simde_mm_mulhi_epi16(psi_r_p5_p7, ONE_OVER_SQRT_2);
+    psi_r_p5_p7 = simde_mm_slli_epi16(psi_r_p5_p7, 1);
+    psi_r_p5_p5 = simde_mm_mulhi_epi16(psi_r_p5_p5, ONE_OVER_SQRT_2);
+    psi_r_p5_p5 = simde_mm_slli_epi16(psi_r_p5_p5, 1);
+    psi_r_p5_p3 = simde_mm_mulhi_epi16(psi_r_p5_p3, ONE_OVER_SQRT_2);
+    psi_r_p5_p3 = simde_mm_slli_epi16(psi_r_p5_p3, 1);
+    psi_r_p5_p1 = simde_mm_mulhi_epi16(psi_r_p5_p1, ONE_OVER_SQRT_2);
+    psi_r_p5_p1 = simde_mm_slli_epi16(psi_r_p5_p1, 1);
+    psi_r_p5_m1 = simde_mm_mulhi_epi16(psi_r_p5_m1, ONE_OVER_SQRT_2);
+    psi_r_p5_m1 = simde_mm_slli_epi16(psi_r_p5_m1, 1);
+    psi_r_p5_m3 = simde_mm_mulhi_epi16(psi_r_p5_m3, ONE_OVER_SQRT_2);
+    psi_r_p5_m3 = simde_mm_slli_epi16(psi_r_p5_m3, 1);
+    psi_r_p5_m5 = simde_mm_mulhi_epi16(psi_r_p5_m5, ONE_OVER_SQRT_2);
+    psi_r_p5_m5 = simde_mm_slli_epi16(psi_r_p5_m5, 1);
+    psi_r_p5_m7 = simde_mm_mulhi_epi16(psi_r_p5_m7, ONE_OVER_SQRT_2);
+    psi_r_p5_m7 = simde_mm_slli_epi16(psi_r_p5_m7, 1);
+    psi_r_p3_p7 = simde_mm_mulhi_epi16(psi_r_p3_p7, ONE_OVER_SQRT_2);
+    psi_r_p3_p7 = simde_mm_slli_epi16(psi_r_p3_p7, 1);
+    psi_r_p3_p5 = simde_mm_mulhi_epi16(psi_r_p3_p5, ONE_OVER_SQRT_2);
+    psi_r_p3_p5 = simde_mm_slli_epi16(psi_r_p3_p5, 1);
+    psi_r_p3_p3 = simde_mm_mulhi_epi16(psi_r_p3_p3, ONE_OVER_SQRT_2);
+    psi_r_p3_p3 = simde_mm_slli_epi16(psi_r_p3_p3, 1);
+    psi_r_p3_p1 = simde_mm_mulhi_epi16(psi_r_p3_p1, ONE_OVER_SQRT_2);
+    psi_r_p3_p1 = simde_mm_slli_epi16(psi_r_p3_p1, 1);
+    psi_r_p3_m1 = simde_mm_mulhi_epi16(psi_r_p3_m1, ONE_OVER_SQRT_2);
+    psi_r_p3_m1 = simde_mm_slli_epi16(psi_r_p3_m1, 1);
+    psi_r_p3_m3 = simde_mm_mulhi_epi16(psi_r_p3_m3, ONE_OVER_SQRT_2);
+    psi_r_p3_m3 = simde_mm_slli_epi16(psi_r_p3_m3, 1);
+    psi_r_p3_m5 = simde_mm_mulhi_epi16(psi_r_p3_m5, ONE_OVER_SQRT_2);
+    psi_r_p3_m5 = simde_mm_slli_epi16(psi_r_p3_m5, 1);
+    psi_r_p3_m7 = simde_mm_mulhi_epi16(psi_r_p3_m7, ONE_OVER_SQRT_2);
+    psi_r_p3_m7 = simde_mm_slli_epi16(psi_r_p3_m7, 1);
+    psi_r_p1_p7 = simde_mm_mulhi_epi16(psi_r_p1_p7, ONE_OVER_SQRT_2);
+    psi_r_p1_p7 = simde_mm_slli_epi16(psi_r_p1_p7, 1);
+    psi_r_p1_p5 = simde_mm_mulhi_epi16(psi_r_p1_p5, ONE_OVER_SQRT_2);
+    psi_r_p1_p5 = simde_mm_slli_epi16(psi_r_p1_p5, 1);
+    psi_r_p1_p3 = simde_mm_mulhi_epi16(psi_r_p1_p3, ONE_OVER_SQRT_2);
+    psi_r_p1_p3 = simde_mm_slli_epi16(psi_r_p1_p3, 1);
+    psi_r_p1_p1 = simde_mm_mulhi_epi16(psi_r_p1_p1, ONE_OVER_SQRT_2);
+    psi_r_p1_p1 = simde_mm_slli_epi16(psi_r_p1_p1, 1);
+    psi_r_p1_m1 = simde_mm_mulhi_epi16(psi_r_p1_m1, ONE_OVER_SQRT_2);
+    psi_r_p1_m1 = simde_mm_slli_epi16(psi_r_p1_m1, 1);
+    psi_r_p1_m3 = simde_mm_mulhi_epi16(psi_r_p1_m3, ONE_OVER_SQRT_2);
+    psi_r_p1_m3 = simde_mm_slli_epi16(psi_r_p1_m3, 1);
+    psi_r_p1_m5 = simde_mm_mulhi_epi16(psi_r_p1_m5, ONE_OVER_SQRT_2);
+    psi_r_p1_m5 = simde_mm_slli_epi16(psi_r_p1_m5, 1);
+    psi_r_p1_m7 = simde_mm_mulhi_epi16(psi_r_p1_m7, ONE_OVER_SQRT_2);
+    psi_r_p1_m7 = simde_mm_slli_epi16(psi_r_p1_m7, 1);
+    psi_r_m1_p7 = simde_mm_mulhi_epi16(psi_r_m1_p7, ONE_OVER_SQRT_2);
+    psi_r_m1_p7 = simde_mm_slli_epi16(psi_r_m1_p7, 1);
+    psi_r_m1_p5 = simde_mm_mulhi_epi16(psi_r_m1_p5, ONE_OVER_SQRT_2);
+    psi_r_m1_p5 = simde_mm_slli_epi16(psi_r_m1_p5, 1);
+    psi_r_m1_p3 = simde_mm_mulhi_epi16(psi_r_m1_p3, ONE_OVER_SQRT_2);
+    psi_r_m1_p3 = simde_mm_slli_epi16(psi_r_m1_p3, 1);
+    psi_r_m1_p1 = simde_mm_mulhi_epi16(psi_r_m1_p1, ONE_OVER_SQRT_2);
+    psi_r_m1_p1 = simde_mm_slli_epi16(psi_r_m1_p1, 1);
+    psi_r_m1_m1 = simde_mm_mulhi_epi16(psi_r_m1_m1, ONE_OVER_SQRT_2);
+    psi_r_m1_m1 = simde_mm_slli_epi16(psi_r_m1_m1, 1);
+    psi_r_m1_m3 = simde_mm_mulhi_epi16(psi_r_m1_m3, ONE_OVER_SQRT_2);
+    psi_r_m1_m3 = simde_mm_slli_epi16(psi_r_m1_m3, 1);
+    psi_r_m1_m5 = simde_mm_mulhi_epi16(psi_r_m1_m5, ONE_OVER_SQRT_2);
+    psi_r_m1_m5 = simde_mm_slli_epi16(psi_r_m1_m5, 1);
+    psi_r_m1_m7 = simde_mm_mulhi_epi16(psi_r_m1_m7, ONE_OVER_SQRT_2);
+    psi_r_m1_m7 = simde_mm_slli_epi16(psi_r_m1_m7, 1);
+    psi_r_m3_p7 = simde_mm_mulhi_epi16(psi_r_m3_p7, ONE_OVER_SQRT_2);
+    psi_r_m3_p7 = simde_mm_slli_epi16(psi_r_m3_p7, 1);
+    psi_r_m3_p5 = simde_mm_mulhi_epi16(psi_r_m3_p5, ONE_OVER_SQRT_2);
+    psi_r_m3_p5 = simde_mm_slli_epi16(psi_r_m3_p5, 1);
+    psi_r_m3_p3 = simde_mm_mulhi_epi16(psi_r_m3_p3, ONE_OVER_SQRT_2);
+    psi_r_m3_p3 = simde_mm_slli_epi16(psi_r_m3_p3, 1);
+    psi_r_m3_p1 = simde_mm_mulhi_epi16(psi_r_m3_p1, ONE_OVER_SQRT_2);
+    psi_r_m3_p1 = simde_mm_slli_epi16(psi_r_m3_p1, 1);
+    psi_r_m3_m1 = simde_mm_mulhi_epi16(psi_r_m3_m1, ONE_OVER_SQRT_2);
+    psi_r_m3_m1 = simde_mm_slli_epi16(psi_r_m3_m1, 1);
+    psi_r_m3_m3 = simde_mm_mulhi_epi16(psi_r_m3_m3, ONE_OVER_SQRT_2);
+    psi_r_m3_m3 = simde_mm_slli_epi16(psi_r_m3_m3, 1);
+    psi_r_m3_m5 = simde_mm_mulhi_epi16(psi_r_m3_m5, ONE_OVER_SQRT_2);
+    psi_r_m3_m5 = simde_mm_slli_epi16(psi_r_m3_m5, 1);
+    psi_r_m3_m7 = simde_mm_mulhi_epi16(psi_r_m3_m7, ONE_OVER_SQRT_2);
+    psi_r_m3_m7 = simde_mm_slli_epi16(psi_r_m3_m7, 1);
+    psi_r_m5_p7 = simde_mm_mulhi_epi16(psi_r_m5_p7, ONE_OVER_SQRT_2);
+    psi_r_m5_p7 = simde_mm_slli_epi16(psi_r_m5_p7, 1);
+    psi_r_m5_p5 = simde_mm_mulhi_epi16(psi_r_m5_p5, ONE_OVER_SQRT_2);
+    psi_r_m5_p5 = simde_mm_slli_epi16(psi_r_m5_p5, 1);
+    psi_r_m5_p3 = simde_mm_mulhi_epi16(psi_r_m5_p3, ONE_OVER_SQRT_2);
+    psi_r_m5_p3 = simde_mm_slli_epi16(psi_r_m5_p3, 1);
+    psi_r_m5_p1 = simde_mm_mulhi_epi16(psi_r_m5_p1, ONE_OVER_SQRT_2);
+    psi_r_m5_p1 = simde_mm_slli_epi16(psi_r_m5_p1, 1);
+    psi_r_m5_m1 = simde_mm_mulhi_epi16(psi_r_m5_m1, ONE_OVER_SQRT_2);
+    psi_r_m5_m1 = simde_mm_slli_epi16(psi_r_m5_m1, 1);
+    psi_r_m5_m3 = simde_mm_mulhi_epi16(psi_r_m5_m3, ONE_OVER_SQRT_2);
+    psi_r_m5_m3 = simde_mm_slli_epi16(psi_r_m5_m3, 1);
+    psi_r_m5_m5 = simde_mm_mulhi_epi16(psi_r_m5_m5, ONE_OVER_SQRT_2);
+    psi_r_m5_m5 = simde_mm_slli_epi16(psi_r_m5_m5, 1);
+    psi_r_m5_m7 = simde_mm_mulhi_epi16(psi_r_m5_m7, ONE_OVER_SQRT_2);
+    psi_r_m5_m7 = simde_mm_slli_epi16(psi_r_m5_m7, 1);
+    psi_r_m7_p7 = simde_mm_mulhi_epi16(psi_r_m7_p7, ONE_OVER_SQRT_2);
+    psi_r_m7_p7 = simde_mm_slli_epi16(psi_r_m7_p7, 1);
+    psi_r_m7_p5 = simde_mm_mulhi_epi16(psi_r_m7_p5, ONE_OVER_SQRT_2);
+    psi_r_m7_p5 = simde_mm_slli_epi16(psi_r_m7_p5, 1);
+    psi_r_m7_p3 = simde_mm_mulhi_epi16(psi_r_m7_p3, ONE_OVER_SQRT_2);
+    psi_r_m7_p3 = simde_mm_slli_epi16(psi_r_m7_p3, 1);
+    psi_r_m7_p1 = simde_mm_mulhi_epi16(psi_r_m7_p1, ONE_OVER_SQRT_2);
+    psi_r_m7_p1 = simde_mm_slli_epi16(psi_r_m7_p1, 1);
+    psi_r_m7_m1 = simde_mm_mulhi_epi16(psi_r_m7_m1, ONE_OVER_SQRT_2);
+    psi_r_m7_m1 = simde_mm_slli_epi16(psi_r_m7_m1, 1);
+    psi_r_m7_m3 = simde_mm_mulhi_epi16(psi_r_m7_m3, ONE_OVER_SQRT_2);
+    psi_r_m7_m3 = simde_mm_slli_epi16(psi_r_m7_m3, 1);
+    psi_r_m7_m5 = simde_mm_mulhi_epi16(psi_r_m7_m5, ONE_OVER_SQRT_2);
+    psi_r_m7_m5 = simde_mm_slli_epi16(psi_r_m7_m5, 1);
+    psi_r_m7_m7 = simde_mm_mulhi_epi16(psi_r_m7_m7, ONE_OVER_SQRT_2);
+    psi_r_m7_m7 = simde_mm_slli_epi16(psi_r_m7_m7, 1);
+
+    psi_i_p7_p7 = simde_mm_mulhi_epi16(psi_i_p7_p7, ONE_OVER_SQRT_2);
+    psi_i_p7_p7 = simde_mm_slli_epi16(psi_i_p7_p7, 1);
+    psi_i_p7_p5 = simde_mm_mulhi_epi16(psi_i_p7_p5, ONE_OVER_SQRT_2);
+    psi_i_p7_p5 = simde_mm_slli_epi16(psi_i_p7_p5, 1);
+    psi_i_p7_p3 = simde_mm_mulhi_epi16(psi_i_p7_p3, ONE_OVER_SQRT_2);
+    psi_i_p7_p3 = simde_mm_slli_epi16(psi_i_p7_p3, 1);
+    psi_i_p7_p1 = simde_mm_mulhi_epi16(psi_i_p7_p1, ONE_OVER_SQRT_2);
+    psi_i_p7_p1 = simde_mm_slli_epi16(psi_i_p7_p1, 1);
+    psi_i_p7_m1 = simde_mm_mulhi_epi16(psi_i_p7_m1, ONE_OVER_SQRT_2);
+    psi_i_p7_m1 = simde_mm_slli_epi16(psi_i_p7_m1, 1);
+    psi_i_p7_m3 = simde_mm_mulhi_epi16(psi_i_p7_m3, ONE_OVER_SQRT_2);
+    psi_i_p7_m3 = simde_mm_slli_epi16(psi_i_p7_m3, 1);
+    psi_i_p7_m5 = simde_mm_mulhi_epi16(psi_i_p7_m5, ONE_OVER_SQRT_2);
+    psi_i_p7_m5 = simde_mm_slli_epi16(psi_i_p7_m5, 1);
+    psi_i_p7_m7 = simde_mm_mulhi_epi16(psi_i_p7_m7, ONE_OVER_SQRT_2);
+    psi_i_p7_m7 = simde_mm_slli_epi16(psi_i_p7_m7, 1);
+    psi_i_p5_p7 = simde_mm_mulhi_epi16(psi_i_p5_p7, ONE_OVER_SQRT_2);
+    psi_i_p5_p7 = simde_mm_slli_epi16(psi_i_p5_p7, 1);
+    psi_i_p5_p5 = simde_mm_mulhi_epi16(psi_i_p5_p5, ONE_OVER_SQRT_2);
+    psi_i_p5_p5 = simde_mm_slli_epi16(psi_i_p5_p5, 1);
+    psi_i_p5_p3 = simde_mm_mulhi_epi16(psi_i_p5_p3, ONE_OVER_SQRT_2);
+    psi_i_p5_p3 = simde_mm_slli_epi16(psi_i_p5_p3, 1);
+    psi_i_p5_p1 = simde_mm_mulhi_epi16(psi_i_p5_p1, ONE_OVER_SQRT_2);
+    psi_i_p5_p1 = simde_mm_slli_epi16(psi_i_p5_p1, 1);
+    psi_i_p5_m1 = simde_mm_mulhi_epi16(psi_i_p5_m1, ONE_OVER_SQRT_2);
+    psi_i_p5_m1 = simde_mm_slli_epi16(psi_i_p5_m1, 1);
+    psi_i_p5_m3 = simde_mm_mulhi_epi16(psi_i_p5_m3, ONE_OVER_SQRT_2);
+    psi_i_p5_m3 = simde_mm_slli_epi16(psi_i_p5_m3, 1);
+    psi_i_p5_m5 = simde_mm_mulhi_epi16(psi_i_p5_m5, ONE_OVER_SQRT_2);
+    psi_i_p5_m5 = simde_mm_slli_epi16(psi_i_p5_m5, 1);
+    psi_i_p5_m7 = simde_mm_mulhi_epi16(psi_i_p5_m7, ONE_OVER_SQRT_2);
+    psi_i_p5_m7 = simde_mm_slli_epi16(psi_i_p5_m7, 1);
+    psi_i_p3_p7 = simde_mm_mulhi_epi16(psi_i_p3_p7, ONE_OVER_SQRT_2);
+    psi_i_p3_p7 = simde_mm_slli_epi16(psi_i_p3_p7, 1);
+    psi_i_p3_p5 = simde_mm_mulhi_epi16(psi_i_p3_p5, ONE_OVER_SQRT_2);
+    psi_i_p3_p5 = simde_mm_slli_epi16(psi_i_p3_p5, 1);
+    psi_i_p3_p3 = simde_mm_mulhi_epi16(psi_i_p3_p3, ONE_OVER_SQRT_2);
+    psi_i_p3_p3 = simde_mm_slli_epi16(psi_i_p3_p3, 1);
+    psi_i_p3_p1 = simde_mm_mulhi_epi16(psi_i_p3_p1, ONE_OVER_SQRT_2);
+    psi_i_p3_p1 = simde_mm_slli_epi16(psi_i_p3_p1, 1);
+    psi_i_p3_m1 = simde_mm_mulhi_epi16(psi_i_p3_m1, ONE_OVER_SQRT_2);
+    psi_i_p3_m1 = simde_mm_slli_epi16(psi_i_p3_m1, 1);
+    psi_i_p3_m3 = simde_mm_mulhi_epi16(psi_i_p3_m3, ONE_OVER_SQRT_2);
+    psi_i_p3_m3 = simde_mm_slli_epi16(psi_i_p3_m3, 1);
+    psi_i_p3_m5 = simde_mm_mulhi_epi16(psi_i_p3_m5, ONE_OVER_SQRT_2);
+    psi_i_p3_m5 = simde_mm_slli_epi16(psi_i_p3_m5, 1);
+    psi_i_p3_m7 = simde_mm_mulhi_epi16(psi_i_p3_m7, ONE_OVER_SQRT_2);
+    psi_i_p3_m7 = simde_mm_slli_epi16(psi_i_p3_m7, 1);
+    psi_i_p1_p7 = simde_mm_mulhi_epi16(psi_i_p1_p7, ONE_OVER_SQRT_2);
+    psi_i_p1_p7 = simde_mm_slli_epi16(psi_i_p1_p7, 1);
+    psi_i_p1_p5 = simde_mm_mulhi_epi16(psi_i_p1_p5, ONE_OVER_SQRT_2);
+    psi_i_p1_p5 = simde_mm_slli_epi16(psi_i_p1_p5, 1);
+    psi_i_p1_p3 = simde_mm_mulhi_epi16(psi_i_p1_p3, ONE_OVER_SQRT_2);
+    psi_i_p1_p3 = simde_mm_slli_epi16(psi_i_p1_p3, 1);
+    psi_i_p1_p1 = simde_mm_mulhi_epi16(psi_i_p1_p1, ONE_OVER_SQRT_2);
+    psi_i_p1_p1 = simde_mm_slli_epi16(psi_i_p1_p1, 1);
+    psi_i_p1_m1 = simde_mm_mulhi_epi16(psi_i_p1_m1, ONE_OVER_SQRT_2);
+    psi_i_p1_m1 = simde_mm_slli_epi16(psi_i_p1_m1, 1);
+    psi_i_p1_m3 = simde_mm_mulhi_epi16(psi_i_p1_m3, ONE_OVER_SQRT_2);
+    psi_i_p1_m3 = simde_mm_slli_epi16(psi_i_p1_m3, 1);
+    psi_i_p1_m5 = simde_mm_mulhi_epi16(psi_i_p1_m5, ONE_OVER_SQRT_2);
+    psi_i_p1_m5 = simde_mm_slli_epi16(psi_i_p1_m5, 1);
+    psi_i_p1_m7 = simde_mm_mulhi_epi16(psi_i_p1_m7, ONE_OVER_SQRT_2);
+    psi_i_p1_m7 = simde_mm_slli_epi16(psi_i_p1_m7, 1);
+    psi_i_m1_p7 = simde_mm_mulhi_epi16(psi_i_m1_p7, ONE_OVER_SQRT_2);
+    psi_i_m1_p7 = simde_mm_slli_epi16(psi_i_m1_p7, 1);
+    psi_i_m1_p5 = simde_mm_mulhi_epi16(psi_i_m1_p5, ONE_OVER_SQRT_2);
+    psi_i_m1_p5 = simde_mm_slli_epi16(psi_i_m1_p5, 1);
+    psi_i_m1_p3 = simde_mm_mulhi_epi16(psi_i_m1_p3, ONE_OVER_SQRT_2);
+    psi_i_m1_p3 = simde_mm_slli_epi16(psi_i_m1_p3, 1);
+    psi_i_m1_p1 = simde_mm_mulhi_epi16(psi_i_m1_p1, ONE_OVER_SQRT_2);
+    psi_i_m1_p1 = simde_mm_slli_epi16(psi_i_m1_p1, 1);
+    psi_i_m1_m1 = simde_mm_mulhi_epi16(psi_i_m1_m1, ONE_OVER_SQRT_2);
+    psi_i_m1_m1 = simde_mm_slli_epi16(psi_i_m1_m1, 1);
+    psi_i_m1_m3 = simde_mm_mulhi_epi16(psi_i_m1_m3, ONE_OVER_SQRT_2);
+    psi_i_m1_m3 = simde_mm_slli_epi16(psi_i_m1_m3, 1);
+    psi_i_m1_m5 = simde_mm_mulhi_epi16(psi_i_m1_m5, ONE_OVER_SQRT_2);
+    psi_i_m1_m5 = simde_mm_slli_epi16(psi_i_m1_m5, 1);
+    psi_i_m1_m7 = simde_mm_mulhi_epi16(psi_i_m1_m7, ONE_OVER_SQRT_2);
+    psi_i_m1_m7 = simde_mm_slli_epi16(psi_i_m1_m7, 1);
+    psi_i_m3_p7 = simde_mm_mulhi_epi16(psi_i_m3_p7, ONE_OVER_SQRT_2);
+    psi_i_m3_p7 = simde_mm_slli_epi16(psi_i_m3_p7, 1);
+    psi_i_m3_p5 = simde_mm_mulhi_epi16(psi_i_m3_p5, ONE_OVER_SQRT_2);
+    psi_i_m3_p5 = simde_mm_slli_epi16(psi_i_m3_p5, 1);
+    psi_i_m3_p3 = simde_mm_mulhi_epi16(psi_i_m3_p3, ONE_OVER_SQRT_2);
+    psi_i_m3_p3 = simde_mm_slli_epi16(psi_i_m3_p3, 1);
+    psi_i_m3_p1 = simde_mm_mulhi_epi16(psi_i_m3_p1, ONE_OVER_SQRT_2);
+    psi_i_m3_p1 = simde_mm_slli_epi16(psi_i_m3_p1, 1);
+    psi_i_m3_m1 = simde_mm_mulhi_epi16(psi_i_m3_m1, ONE_OVER_SQRT_2);
+    psi_i_m3_m1 = simde_mm_slli_epi16(psi_i_m3_m1, 1);
+    psi_i_m3_m3 = simde_mm_mulhi_epi16(psi_i_m3_m3, ONE_OVER_SQRT_2);
+    psi_i_m3_m3 = simde_mm_slli_epi16(psi_i_m3_m3, 1);
+    psi_i_m3_m5 = simde_mm_mulhi_epi16(psi_i_m3_m5, ONE_OVER_SQRT_2);
+    psi_i_m3_m5 = simde_mm_slli_epi16(psi_i_m3_m5, 1);
+    psi_i_m3_m7 = simde_mm_mulhi_epi16(psi_i_m3_m7, ONE_OVER_SQRT_2);
+    psi_i_m3_m7 = simde_mm_slli_epi16(psi_i_m3_m7, 1);
+    psi_i_m5_p7 = simde_mm_mulhi_epi16(psi_i_m5_p7, ONE_OVER_SQRT_2);
+    psi_i_m5_p7 = simde_mm_slli_epi16(psi_i_m5_p7, 1);
+    psi_i_m5_p5 = simde_mm_mulhi_epi16(psi_i_m5_p5, ONE_OVER_SQRT_2);
+    psi_i_m5_p5 = simde_mm_slli_epi16(psi_i_m5_p5, 1);
+    psi_i_m5_p3 = simde_mm_mulhi_epi16(psi_i_m5_p3, ONE_OVER_SQRT_2);
+    psi_i_m5_p3 = simde_mm_slli_epi16(psi_i_m5_p3, 1);
+    psi_i_m5_p1 = simde_mm_mulhi_epi16(psi_i_m5_p1, ONE_OVER_SQRT_2);
+    psi_i_m5_p1 = simde_mm_slli_epi16(psi_i_m5_p1, 1);
+    psi_i_m5_m1 = simde_mm_mulhi_epi16(psi_i_m5_m1, ONE_OVER_SQRT_2);
+    psi_i_m5_m1 = simde_mm_slli_epi16(psi_i_m5_m1, 1);
+    psi_i_m5_m3 = simde_mm_mulhi_epi16(psi_i_m5_m3, ONE_OVER_SQRT_2);
+    psi_i_m5_m3 = simde_mm_slli_epi16(psi_i_m5_m3, 1);
+    psi_i_m5_m5 = simde_mm_mulhi_epi16(psi_i_m5_m5, ONE_OVER_SQRT_2);
+    psi_i_m5_m5 = simde_mm_slli_epi16(psi_i_m5_m5, 1);
+    psi_i_m5_m7 = simde_mm_mulhi_epi16(psi_i_m5_m7, ONE_OVER_SQRT_2);
+    psi_i_m5_m7 = simde_mm_slli_epi16(psi_i_m5_m7, 1);
+    psi_i_m7_p7 = simde_mm_mulhi_epi16(psi_i_m7_p7, ONE_OVER_SQRT_2);
+    psi_i_m7_p7 = simde_mm_slli_epi16(psi_i_m7_p7, 1);
+    psi_i_m7_p5 = simde_mm_mulhi_epi16(psi_i_m7_p5, ONE_OVER_SQRT_2);
+    psi_i_m7_p5 = simde_mm_slli_epi16(psi_i_m7_p5, 1);
+    psi_i_m7_p3 = simde_mm_mulhi_epi16(psi_i_m7_p3, ONE_OVER_SQRT_2);
+    psi_i_m7_p3 = simde_mm_slli_epi16(psi_i_m7_p3, 1);
+    psi_i_m7_p1 = simde_mm_mulhi_epi16(psi_i_m7_p1, ONE_OVER_SQRT_2);
+    psi_i_m7_p1 = simde_mm_slli_epi16(psi_i_m7_p1, 1);
+    psi_i_m7_m1 = simde_mm_mulhi_epi16(psi_i_m7_m1, ONE_OVER_SQRT_2);
+    psi_i_m7_m1 = simde_mm_slli_epi16(psi_i_m7_m1, 1);
+    psi_i_m7_m3 = simde_mm_mulhi_epi16(psi_i_m7_m3, ONE_OVER_SQRT_2);
+    psi_i_m7_m3 = simde_mm_slli_epi16(psi_i_m7_m3, 1);
+    psi_i_m7_m5 = simde_mm_mulhi_epi16(psi_i_m7_m5, ONE_OVER_SQRT_2);
+    psi_i_m7_m5 = simde_mm_slli_epi16(psi_i_m7_m5, 1);
+    psi_i_m7_m7 = simde_mm_mulhi_epi16(psi_i_m7_m7, ONE_OVER_SQRT_2);
+    psi_i_m7_m7 = simde_mm_slli_epi16(psi_i_m7_m7, 1);
+
+    simde__m128i psi_a_p7_p7 = simde_mm_adds_epi16(psi_r_p7_p7, psi_i_p7_p7);
+    simde__m128i psi_a_p7_p5 = simde_mm_adds_epi16(psi_r_p7_p5, psi_i_p7_p5);
+    simde__m128i psi_a_p7_p3 = simde_mm_adds_epi16(psi_r_p7_p3, psi_i_p7_p3);
+    simde__m128i psi_a_p7_p1 = simde_mm_adds_epi16(psi_r_p7_p1, psi_i_p7_p1);
+    simde__m128i psi_a_p7_m1 = simde_mm_adds_epi16(psi_r_p7_m1, psi_i_p7_m1);
+    simde__m128i psi_a_p7_m3 = simde_mm_adds_epi16(psi_r_p7_m3, psi_i_p7_m3);
+    simde__m128i psi_a_p7_m5 = simde_mm_adds_epi16(psi_r_p7_m5, psi_i_p7_m5);
+    simde__m128i psi_a_p7_m7 = simde_mm_adds_epi16(psi_r_p7_m7, psi_i_p7_m7);
+    simde__m128i psi_a_p5_p7 = simde_mm_adds_epi16(psi_r_p5_p7, psi_i_p5_p7);
+    simde__m128i psi_a_p5_p5 = simde_mm_adds_epi16(psi_r_p5_p5, psi_i_p5_p5);
+    simde__m128i psi_a_p5_p3 = simde_mm_adds_epi16(psi_r_p5_p3, psi_i_p5_p3);
+    simde__m128i psi_a_p5_p1 = simde_mm_adds_epi16(psi_r_p5_p1, psi_i_p5_p1);
+    simde__m128i psi_a_p5_m1 = simde_mm_adds_epi16(psi_r_p5_m1, psi_i_p5_m1);
+    simde__m128i psi_a_p5_m3 = simde_mm_adds_epi16(psi_r_p5_m3, psi_i_p5_m3);
+    simde__m128i psi_a_p5_m5 = simde_mm_adds_epi16(psi_r_p5_m5, psi_i_p5_m5);
+    simde__m128i psi_a_p5_m7 = simde_mm_adds_epi16(psi_r_p5_m7, psi_i_p5_m7);
+    simde__m128i psi_a_p3_p7 = simde_mm_adds_epi16(psi_r_p3_p7, psi_i_p3_p7);
+    simde__m128i psi_a_p3_p5 = simde_mm_adds_epi16(psi_r_p3_p5, psi_i_p3_p5);
+    simde__m128i psi_a_p3_p3 = simde_mm_adds_epi16(psi_r_p3_p3, psi_i_p3_p3);
+    simde__m128i psi_a_p3_p1 = simde_mm_adds_epi16(psi_r_p3_p1, psi_i_p3_p1);
+    simde__m128i psi_a_p3_m1 = simde_mm_adds_epi16(psi_r_p3_m1, psi_i_p3_m1);
+    simde__m128i psi_a_p3_m3 = simde_mm_adds_epi16(psi_r_p3_m3, psi_i_p3_m3);
+    simde__m128i psi_a_p3_m5 = simde_mm_adds_epi16(psi_r_p3_m5, psi_i_p3_m5);
+    simde__m128i psi_a_p3_m7 = simde_mm_adds_epi16(psi_r_p3_m7, psi_i_p3_m7);
+    simde__m128i psi_a_p1_p7 = simde_mm_adds_epi16(psi_r_p1_p7, psi_i_p1_p7);
+    simde__m128i psi_a_p1_p5 = simde_mm_adds_epi16(psi_r_p1_p5, psi_i_p1_p5);
+    simde__m128i psi_a_p1_p3 = simde_mm_adds_epi16(psi_r_p1_p3, psi_i_p1_p3);
+    simde__m128i psi_a_p1_p1 = simde_mm_adds_epi16(psi_r_p1_p1, psi_i_p1_p1);
+    simde__m128i psi_a_p1_m1 = simde_mm_adds_epi16(psi_r_p1_m1, psi_i_p1_m1);
+    simde__m128i psi_a_p1_m3 = simde_mm_adds_epi16(psi_r_p1_m3, psi_i_p1_m3);
+    simde__m128i psi_a_p1_m5 = simde_mm_adds_epi16(psi_r_p1_m5, psi_i_p1_m5);
+    simde__m128i psi_a_p1_m7 = simde_mm_adds_epi16(psi_r_p1_m7, psi_i_p1_m7);
+    simde__m128i psi_a_m1_p7 = simde_mm_adds_epi16(psi_r_m1_p7, psi_i_m1_p7);
+    simde__m128i psi_a_m1_p5 = simde_mm_adds_epi16(psi_r_m1_p5, psi_i_m1_p5);
+    simde__m128i psi_a_m1_p3 = simde_mm_adds_epi16(psi_r_m1_p3, psi_i_m1_p3);
+    simde__m128i psi_a_m1_p1 = simde_mm_adds_epi16(psi_r_m1_p1, psi_i_m1_p1);
+    simde__m128i psi_a_m1_m1 = simde_mm_adds_epi16(psi_r_m1_m1, psi_i_m1_m1);
+    simde__m128i psi_a_m1_m3 = simde_mm_adds_epi16(psi_r_m1_m3, psi_i_m1_m3);
+    simde__m128i psi_a_m1_m5 = simde_mm_adds_epi16(psi_r_m1_m5, psi_i_m1_m5);
+    simde__m128i psi_a_m1_m7 = simde_mm_adds_epi16(psi_r_m1_m7, psi_i_m1_m7);
+    simde__m128i psi_a_m3_p7 = simde_mm_adds_epi16(psi_r_m3_p7, psi_i_m3_p7);
+    simde__m128i psi_a_m3_p5 = simde_mm_adds_epi16(psi_r_m3_p5, psi_i_m3_p5);
+    simde__m128i psi_a_m3_p3 = simde_mm_adds_epi16(psi_r_m3_p3, psi_i_m3_p3);
+    simde__m128i psi_a_m3_p1 = simde_mm_adds_epi16(psi_r_m3_p1, psi_i_m3_p1);
+    simde__m128i psi_a_m3_m1 = simde_mm_adds_epi16(psi_r_m3_m1, psi_i_m3_m1);
+    simde__m128i psi_a_m3_m3 = simde_mm_adds_epi16(psi_r_m3_m3, psi_i_m3_m3);
+    simde__m128i psi_a_m3_m5 = simde_mm_adds_epi16(psi_r_m3_m5, psi_i_m3_m5);
+    simde__m128i psi_a_m3_m7 = simde_mm_adds_epi16(psi_r_m3_m7, psi_i_m3_m7);
+    simde__m128i psi_a_m5_p7 = simde_mm_adds_epi16(psi_r_m5_p7, psi_i_m5_p7);
+    simde__m128i psi_a_m5_p5 = simde_mm_adds_epi16(psi_r_m5_p5, psi_i_m5_p5);
+    simde__m128i psi_a_m5_p3 = simde_mm_adds_epi16(psi_r_m5_p3, psi_i_m5_p3);
+    simde__m128i psi_a_m5_p1 = simde_mm_adds_epi16(psi_r_m5_p1, psi_i_m5_p1);
+    simde__m128i psi_a_m5_m1 = simde_mm_adds_epi16(psi_r_m5_m1, psi_i_m5_m1);
+    simde__m128i psi_a_m5_m3 = simde_mm_adds_epi16(psi_r_m5_m3, psi_i_m5_m3);
+    simde__m128i psi_a_m5_m5 = simde_mm_adds_epi16(psi_r_m5_m5, psi_i_m5_m5);
+    simde__m128i psi_a_m5_m7 = simde_mm_adds_epi16(psi_r_m5_m7, psi_i_m5_m7);
+    simde__m128i psi_a_m7_p7 = simde_mm_adds_epi16(psi_r_m7_p7, psi_i_m7_p7);
+    simde__m128i psi_a_m7_p5 = simde_mm_adds_epi16(psi_r_m7_p5, psi_i_m7_p5);
+    simde__m128i psi_a_m7_p3 = simde_mm_adds_epi16(psi_r_m7_p3, psi_i_m7_p3);
+    simde__m128i psi_a_m7_p1 = simde_mm_adds_epi16(psi_r_m7_p1, psi_i_m7_p1);
+    simde__m128i psi_a_m7_m1 = simde_mm_adds_epi16(psi_r_m7_m1, psi_i_m7_m1);
+    simde__m128i psi_a_m7_m3 = simde_mm_adds_epi16(psi_r_m7_m3, psi_i_m7_m3);
+    simde__m128i psi_a_m7_m5 = simde_mm_adds_epi16(psi_r_m7_m5, psi_i_m7_m5);
+    simde__m128i psi_a_m7_m7 = simde_mm_adds_epi16(psi_r_m7_m7, psi_i_m7_m7);
 
     // Computing different multiples of ||h0||^2
     // x=1, y=1
-    ch_mag_2_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,ONE_OVER_FOUR_SQRT_42);
-    ch_mag_2_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_2_over_42_with_sigma2,1);
+    ch_mag_2_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,ONE_OVER_FOUR_SQRT_42);
+    ch_mag_2_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_2_over_42_with_sigma2,1);
     // x=1, y=3
-    ch_mag_10_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,FIVE_OVER_FOUR_SQRT_42);
-    ch_mag_10_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_10_over_42_with_sigma2,1);
+    ch_mag_10_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,FIVE_OVER_FOUR_SQRT_42);
+    ch_mag_10_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_10_over_42_with_sigma2,1);
     // x=1, x=5
-    ch_mag_26_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,THIRTEEN_OVER_FOUR_SQRT_42);
-    ch_mag_26_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_26_over_42_with_sigma2,1);
+    ch_mag_26_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,THIRTEEN_OVER_FOUR_SQRT_42);
+    ch_mag_26_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_26_over_42_with_sigma2,1);
     // x=1, y=7
-    ch_mag_50_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,TWENTYFIVE_OVER_FOUR_SQRT_42);
-    ch_mag_50_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_50_over_42_with_sigma2,1);
+    ch_mag_50_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,TWENTYFIVE_OVER_FOUR_SQRT_42);
+    ch_mag_50_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_50_over_42_with_sigma2,1);
     // x=3, y=3
-    ch_mag_18_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,NINE_OVER_FOUR_SQRT_42);
-    ch_mag_18_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_18_over_42_with_sigma2,1);
+    ch_mag_18_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,NINE_OVER_FOUR_SQRT_42);
+    ch_mag_18_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_18_over_42_with_sigma2,1);
     // x=3, y=5
-    ch_mag_34_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,SEVENTEEN_OVER_FOUR_SQRT_42);
-    ch_mag_34_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_34_over_42_with_sigma2,1);
+    ch_mag_34_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,SEVENTEEN_OVER_FOUR_SQRT_42);
+    ch_mag_34_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_34_over_42_with_sigma2,1);
     // x=3, y=7
-    ch_mag_58_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,TWENTYNINE_OVER_FOUR_SQRT_42);
-    ch_mag_58_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_58_over_42_with_sigma2,2);
+    ch_mag_58_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,TWENTYNINE_OVER_FOUR_SQRT_42);
+    ch_mag_58_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_58_over_42_with_sigma2,2);
     // x=5, y=5
-    ch_mag_50_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,TWENTYFIVE_OVER_FOUR_SQRT_42);
-    ch_mag_50_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_50_over_42_with_sigma2,1);
+    ch_mag_50_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,TWENTYFIVE_OVER_FOUR_SQRT_42);
+    ch_mag_50_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_50_over_42_with_sigma2,1);
     // x=5, y=7
-    ch_mag_74_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,THIRTYSEVEN_OVER_FOUR_SQRT_42);
-    ch_mag_74_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_74_over_42_with_sigma2,2);
+    ch_mag_74_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,THIRTYSEVEN_OVER_FOUR_SQRT_42);
+    ch_mag_74_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_74_over_42_with_sigma2,2);
     // x=7, y=7
-    ch_mag_98_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,FORTYNINE_OVER_FOUR_SQRT_42);
-    ch_mag_98_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_98_over_42_with_sigma2,2);
+    ch_mag_98_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,FORTYNINE_OVER_FOUR_SQRT_42);
+    ch_mag_98_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_98_over_42_with_sigma2,2);
 
     // Computing Metrics
-    xmm1 = _mm_adds_epi16(psi_a_p7_p7, y0_p_7_7);
-    simde__m128i bit_met_p7_p7 = _mm_subs_epi16(xmm1, ch_mag_98_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p7_p5, y0_p_7_5);
-    simde__m128i bit_met_p7_p5 = _mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p7_p3, y0_p_7_3);
-    simde__m128i bit_met_p7_p3 = _mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p7_p1, y0_p_7_1);
-    simde__m128i bit_met_p7_p1 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p7_m1, y0_m_7_1);
-    simde__m128i bit_met_p7_m1 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p7_m3, y0_m_7_3);
-    simde__m128i bit_met_p7_m3 = _mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p7_m5, y0_m_7_5);
-    simde__m128i bit_met_p7_m5 = _mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p7_m7, y0_m_7_7);
-    simde__m128i bit_met_p7_m7 = _mm_subs_epi16(xmm1, ch_mag_98_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p5_p7, y0_p_5_7);
-    simde__m128i bit_met_p5_p7 = _mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p5_p5, y0_p_5_5);
-    simde__m128i bit_met_p5_p5 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p5_p3, y0_p_5_3);
-    simde__m128i bit_met_p5_p3 = _mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p5_p1, y0_p_5_1);
-    simde__m128i bit_met_p5_p1 = _mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p5_m1, y0_m_5_1);
-    simde__m128i bit_met_p5_m1 = _mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p5_m3, y0_m_5_3);
-    simde__m128i bit_met_p5_m3 = _mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p5_m5, y0_m_5_5);
-    simde__m128i bit_met_p5_m5 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p5_m7, y0_m_5_7);
-    simde__m128i bit_met_p5_m7 = _mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p3_p7, y0_p_3_7);
-    simde__m128i bit_met_p3_p7 = _mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p3_p5, y0_p_3_5);
-    simde__m128i bit_met_p3_p5 = _mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p3_p3, y0_p_3_3);
-    simde__m128i bit_met_p3_p3 = _mm_subs_epi16(xmm1, ch_mag_18_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p3_p1, y0_p_3_1);
-    simde__m128i bit_met_p3_p1 = _mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p3_m1, y0_m_3_1);
-    simde__m128i bit_met_p3_m1 = _mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p3_m3, y0_m_3_3);
-    simde__m128i bit_met_p3_m3 = _mm_subs_epi16(xmm1, ch_mag_18_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p3_m5, y0_m_3_5);
-    simde__m128i bit_met_p3_m5 = _mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p3_m7, y0_m_3_7);
-    simde__m128i bit_met_p3_m7 = _mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p1_p7, y0_p_1_7);
-    simde__m128i bit_met_p1_p7 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p1_p5, y0_p_1_5);
-    simde__m128i bit_met_p1_p5 = _mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p1_p3, y0_p_1_3);
-    simde__m128i bit_met_p1_p3 = _mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p1_p1, y0_p_1_1);
-    simde__m128i bit_met_p1_p1 = _mm_subs_epi16(xmm1, ch_mag_2_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p1_m1, y0_m_1_1);
-    simde__m128i bit_met_p1_m1 = _mm_subs_epi16(xmm1, ch_mag_2_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p1_m3, y0_m_1_3);
-    simde__m128i bit_met_p1_m3 = _mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p1_m5, y0_m_1_5);
-    simde__m128i bit_met_p1_m5 = _mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
-    xmm1 = _mm_adds_epi16(psi_a_p1_m7, y0_m_1_7);
-    simde__m128i bit_met_p1_m7 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-
-    xmm1 = _mm_subs_epi16(psi_a_m1_p7, y0_m_1_7);
-    simde__m128i bit_met_m1_p7 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m1_p5, y0_m_1_5);
-    simde__m128i bit_met_m1_p5 = _mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m1_p3, y0_m_1_3);
-    simde__m128i bit_met_m1_p3 = _mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m1_p1, y0_m_1_1);
-    simde__m128i bit_met_m1_p1 = _mm_subs_epi16(xmm1, ch_mag_2_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m1_m1, y0_p_1_1);
-    simde__m128i bit_met_m1_m1 = _mm_subs_epi16(xmm1, ch_mag_2_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m1_m3, y0_p_1_3);
-    simde__m128i bit_met_m1_m3 = _mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m1_m5, y0_p_1_5);
-    simde__m128i bit_met_m1_m5 = _mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m1_m7, y0_p_1_7);
-    simde__m128i bit_met_m1_m7 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m3_p7, y0_m_3_7);
-    simde__m128i bit_met_m3_p7 = _mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m3_p5, y0_m_3_5);
-    simde__m128i bit_met_m3_p5 = _mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m3_p3, y0_m_3_3);
-    simde__m128i bit_met_m3_p3 = _mm_subs_epi16(xmm1, ch_mag_18_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m3_p1, y0_m_3_1);
-    simde__m128i bit_met_m3_p1 = _mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m3_m1, y0_p_3_1);
-    simde__m128i bit_met_m3_m1 = _mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m3_m3, y0_p_3_3);
-    simde__m128i bit_met_m3_m3 = _mm_subs_epi16(xmm1, ch_mag_18_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m3_m5, y0_p_3_5);
-    simde__m128i bit_met_m3_m5 = _mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m3_m7, y0_p_3_7);
-    simde__m128i bit_met_m3_m7 = _mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m5_p7, y0_m_5_7);
-    simde__m128i bit_met_m5_p7 = _mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m5_p5, y0_m_5_5);
-    simde__m128i bit_met_m5_p5 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m5_p3, y0_m_5_3);
-    simde__m128i bit_met_m5_p3 = _mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m5_p1, y0_m_5_1);
-    simde__m128i bit_met_m5_p1 = _mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m5_m1, y0_p_5_1);
-    simde__m128i bit_met_m5_m1 = _mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m5_m3, y0_p_5_3);
-    simde__m128i bit_met_m5_m3 = _mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m5_m5, y0_p_5_5);
-    simde__m128i bit_met_m5_m5 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m5_m7, y0_p_5_7);
-    simde__m128i bit_met_m5_m7 = _mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m7_p7, y0_m_7_7);
-    simde__m128i bit_met_m7_p7 = _mm_subs_epi16(xmm1, ch_mag_98_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m7_p5, y0_m_7_5);
-    simde__m128i bit_met_m7_p5 = _mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m7_p3, y0_m_7_3);
-    simde__m128i bit_met_m7_p3 = _mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m7_p1, y0_m_7_1);
-    simde__m128i bit_met_m7_p1 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m7_m1, y0_p_7_1);
-    simde__m128i bit_met_m7_m1 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m7_m3, y0_p_7_3);
-    simde__m128i bit_met_m7_m3 = _mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m7_m5, y0_p_7_5);
-    simde__m128i bit_met_m7_m5 = _mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
-    xmm1 = _mm_subs_epi16(psi_a_m7_m7, y0_p_7_7);
-    simde__m128i bit_met_m7_m7 = _mm_subs_epi16(xmm1, ch_mag_98_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p7_p7, y0_p_7_7);
+    simde__m128i bit_met_p7_p7 = simde_mm_subs_epi16(xmm1, ch_mag_98_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p7_p5, y0_p_7_5);
+    simde__m128i bit_met_p7_p5 = simde_mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p7_p3, y0_p_7_3);
+    simde__m128i bit_met_p7_p3 = simde_mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p7_p1, y0_p_7_1);
+    simde__m128i bit_met_p7_p1 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p7_m1, y0_m_7_1);
+    simde__m128i bit_met_p7_m1 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p7_m3, y0_m_7_3);
+    simde__m128i bit_met_p7_m3 = simde_mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p7_m5, y0_m_7_5);
+    simde__m128i bit_met_p7_m5 = simde_mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p7_m7, y0_m_7_7);
+    simde__m128i bit_met_p7_m7 = simde_mm_subs_epi16(xmm1, ch_mag_98_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p5_p7, y0_p_5_7);
+    simde__m128i bit_met_p5_p7 = simde_mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p5_p5, y0_p_5_5);
+    simde__m128i bit_met_p5_p5 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p5_p3, y0_p_5_3);
+    simde__m128i bit_met_p5_p3 = simde_mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p5_p1, y0_p_5_1);
+    simde__m128i bit_met_p5_p1 = simde_mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p5_m1, y0_m_5_1);
+    simde__m128i bit_met_p5_m1 = simde_mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p5_m3, y0_m_5_3);
+    simde__m128i bit_met_p5_m3 = simde_mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p5_m5, y0_m_5_5);
+    simde__m128i bit_met_p5_m5 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p5_m7, y0_m_5_7);
+    simde__m128i bit_met_p5_m7 = simde_mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p3_p7, y0_p_3_7);
+    simde__m128i bit_met_p3_p7 = simde_mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p3_p5, y0_p_3_5);
+    simde__m128i bit_met_p3_p5 = simde_mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p3_p3, y0_p_3_3);
+    simde__m128i bit_met_p3_p3 = simde_mm_subs_epi16(xmm1, ch_mag_18_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p3_p1, y0_p_3_1);
+    simde__m128i bit_met_p3_p1 = simde_mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p3_m1, y0_m_3_1);
+    simde__m128i bit_met_p3_m1 = simde_mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p3_m3, y0_m_3_3);
+    simde__m128i bit_met_p3_m3 = simde_mm_subs_epi16(xmm1, ch_mag_18_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p3_m5, y0_m_3_5);
+    simde__m128i bit_met_p3_m5 = simde_mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p3_m7, y0_m_3_7);
+    simde__m128i bit_met_p3_m7 = simde_mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p1_p7, y0_p_1_7);
+    simde__m128i bit_met_p1_p7 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p1_p5, y0_p_1_5);
+    simde__m128i bit_met_p1_p5 = simde_mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p1_p3, y0_p_1_3);
+    simde__m128i bit_met_p1_p3 = simde_mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p1_p1, y0_p_1_1);
+    simde__m128i bit_met_p1_p1 = simde_mm_subs_epi16(xmm1, ch_mag_2_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p1_m1, y0_m_1_1);
+    simde__m128i bit_met_p1_m1 = simde_mm_subs_epi16(xmm1, ch_mag_2_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p1_m3, y0_m_1_3);
+    simde__m128i bit_met_p1_m3 = simde_mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p1_m5, y0_m_1_5);
+    simde__m128i bit_met_p1_m5 = simde_mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
+    xmm1 = simde_mm_adds_epi16(psi_a_p1_m7, y0_m_1_7);
+    simde__m128i bit_met_p1_m7 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+
+    xmm1 = simde_mm_subs_epi16(psi_a_m1_p7, y0_m_1_7);
+    simde__m128i bit_met_m1_p7 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m1_p5, y0_m_1_5);
+    simde__m128i bit_met_m1_p5 = simde_mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m1_p3, y0_m_1_3);
+    simde__m128i bit_met_m1_p3 = simde_mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m1_p1, y0_m_1_1);
+    simde__m128i bit_met_m1_p1 = simde_mm_subs_epi16(xmm1, ch_mag_2_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m1_m1, y0_p_1_1);
+    simde__m128i bit_met_m1_m1 = simde_mm_subs_epi16(xmm1, ch_mag_2_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m1_m3, y0_p_1_3);
+    simde__m128i bit_met_m1_m3 = simde_mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m1_m5, y0_p_1_5);
+    simde__m128i bit_met_m1_m5 = simde_mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m1_m7, y0_p_1_7);
+    simde__m128i bit_met_m1_m7 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m3_p7, y0_m_3_7);
+    simde__m128i bit_met_m3_p7 = simde_mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m3_p5, y0_m_3_5);
+    simde__m128i bit_met_m3_p5 = simde_mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m3_p3, y0_m_3_3);
+    simde__m128i bit_met_m3_p3 = simde_mm_subs_epi16(xmm1, ch_mag_18_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m3_p1, y0_m_3_1);
+    simde__m128i bit_met_m3_p1 = simde_mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m3_m1, y0_p_3_1);
+    simde__m128i bit_met_m3_m1 = simde_mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m3_m3, y0_p_3_3);
+    simde__m128i bit_met_m3_m3 = simde_mm_subs_epi16(xmm1, ch_mag_18_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m3_m5, y0_p_3_5);
+    simde__m128i bit_met_m3_m5 = simde_mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m3_m7, y0_p_3_7);
+    simde__m128i bit_met_m3_m7 = simde_mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m5_p7, y0_m_5_7);
+    simde__m128i bit_met_m5_p7 = simde_mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m5_p5, y0_m_5_5);
+    simde__m128i bit_met_m5_p5 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m5_p3, y0_m_5_3);
+    simde__m128i bit_met_m5_p3 = simde_mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m5_p1, y0_m_5_1);
+    simde__m128i bit_met_m5_p1 = simde_mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m5_m1, y0_p_5_1);
+    simde__m128i bit_met_m5_m1 = simde_mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m5_m3, y0_p_5_3);
+    simde__m128i bit_met_m5_m3 = simde_mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m5_m5, y0_p_5_5);
+    simde__m128i bit_met_m5_m5 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m5_m7, y0_p_5_7);
+    simde__m128i bit_met_m5_m7 = simde_mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m7_p7, y0_m_7_7);
+    simde__m128i bit_met_m7_p7 = simde_mm_subs_epi16(xmm1, ch_mag_98_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m7_p5, y0_m_7_5);
+    simde__m128i bit_met_m7_p5 = simde_mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m7_p3, y0_m_7_3);
+    simde__m128i bit_met_m7_p3 = simde_mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m7_p1, y0_m_7_1);
+    simde__m128i bit_met_m7_p1 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m7_m1, y0_p_7_1);
+    simde__m128i bit_met_m7_m1 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m7_m3, y0_p_7_3);
+    simde__m128i bit_met_m7_m3 = simde_mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m7_m5, y0_p_7_5);
+    simde__m128i bit_met_m7_m5 = simde_mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
+    xmm1 = simde_mm_subs_epi16(psi_a_m7_m7, y0_p_7_7);
+    simde__m128i bit_met_m7_m7 = simde_mm_subs_epi16(xmm1, ch_mag_98_over_42_with_sigma2);
 
     // Detection for 1st bit (LTE mapping)
     // bit = 1
-    xmm0 = _mm_max_epi16(bit_met_m7_p7, bit_met_m7_p5);
-    xmm1 = _mm_max_epi16(bit_met_m7_p3, bit_met_m7_p1);
-    xmm2 = _mm_max_epi16(bit_met_m7_m1, bit_met_m7_m3);
-    xmm3 = _mm_max_epi16(bit_met_m7_m5, bit_met_m7_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    simde__m128i logmax_den_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_m5_p7, bit_met_m5_p5);
-    xmm1 = _mm_max_epi16(bit_met_m5_p3, bit_met_m5_p1);
-    xmm2 = _mm_max_epi16(bit_met_m5_m1, bit_met_m5_m3);
-    xmm3 = _mm_max_epi16(bit_met_m5_m5, bit_met_m5_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_m3_p7, bit_met_m3_p5);
-    xmm1 = _mm_max_epi16(bit_met_m3_p3, bit_met_m3_p1);
-    xmm2 = _mm_max_epi16(bit_met_m3_m1, bit_met_m3_m3);
-    xmm3 = _mm_max_epi16(bit_met_m3_m5, bit_met_m3_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_m1_p7, bit_met_m1_p5);
-    xmm1 = _mm_max_epi16(bit_met_m1_p3, bit_met_m1_p1);
-    xmm2 = _mm_max_epi16(bit_met_m1_m1, bit_met_m1_m3);
-    xmm3 = _mm_max_epi16(bit_met_m1_m5, bit_met_m1_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m7_p7, bit_met_m7_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_m7_p3, bit_met_m7_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_m7_m1, bit_met_m7_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_m7_m5, bit_met_m7_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_den_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m5_p7, bit_met_m5_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_m5_p3, bit_met_m5_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_m5_m1, bit_met_m5_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m5, bit_met_m5_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m3_p7, bit_met_m3_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_m3_p3, bit_met_m3_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_m3_m1, bit_met_m3_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_m3_m5, bit_met_m3_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m1_p7, bit_met_m1_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_m1_p3, bit_met_m1_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m1, bit_met_m1_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_m1_m5, bit_met_m1_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
 
     // bit = 0
-    xmm0 = _mm_max_epi16(bit_met_p7_p7, bit_met_p7_p5);
-    xmm1 = _mm_max_epi16(bit_met_p7_p3, bit_met_p7_p1);
-    xmm2 = _mm_max_epi16(bit_met_p7_m1, bit_met_p7_m3);
-    xmm3 = _mm_max_epi16(bit_met_p7_m5, bit_met_p7_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    simde__m128i logmax_num_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p5_p7, bit_met_p5_p5);
-    xmm1 = _mm_max_epi16(bit_met_p5_p3, bit_met_p5_p1);
-    xmm2 = _mm_max_epi16(bit_met_p5_m1, bit_met_p5_m3);
-    xmm3 = _mm_max_epi16(bit_met_p5_m5, bit_met_p5_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p3_p7, bit_met_p3_p5);
-    xmm1 = _mm_max_epi16(bit_met_p3_p3, bit_met_p3_p1);
-    xmm2 = _mm_max_epi16(bit_met_p3_m1, bit_met_p3_m3);
-    xmm3 = _mm_max_epi16(bit_met_p3_m5, bit_met_p3_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p1_p7, bit_met_p1_p5);
-    xmm1 = _mm_max_epi16(bit_met_p1_p3, bit_met_p1_p1);
-    xmm2 = _mm_max_epi16(bit_met_p1_m1, bit_met_p1_m3);
-    xmm3 = _mm_max_epi16(bit_met_p1_m5, bit_met_p1_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-
-    y0r = _mm_subs_epi16(logmax_num_re0, logmax_den_re0);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p7, bit_met_p7_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_p7_p3, bit_met_p7_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_p7_m1, bit_met_p7_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_p7_m5, bit_met_p7_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_num_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p5_p7, bit_met_p5_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_p5_p3, bit_met_p5_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_p5_m1, bit_met_p5_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_p5_m5, bit_met_p5_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p3_p7, bit_met_p3_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p3, bit_met_p3_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_p3_m1, bit_met_p3_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_p3_m5, bit_met_p3_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p1_p7, bit_met_p1_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_p1_p3, bit_met_p1_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_p1_m1, bit_met_p1_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_p1_m5, bit_met_p1_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+
+    y0r = simde_mm_subs_epi16(logmax_num_re0, logmax_den_re0);
 
     // Detection for 2nd bit (LTE mapping)
     // bit = 1
-    xmm0 = _mm_max_epi16(bit_met_p7_m1, bit_met_p5_m1);
-    xmm1 = _mm_max_epi16(bit_met_p3_m1, bit_met_p1_m1);
-    xmm2 = _mm_max_epi16(bit_met_m1_m1, bit_met_m3_m1);
-    xmm3 = _mm_max_epi16(bit_met_m5_m1, bit_met_m7_m1);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m3, bit_met_p5_m3);
-    xmm1 = _mm_max_epi16(bit_met_p3_m3, bit_met_p1_m3);
-    xmm2 = _mm_max_epi16(bit_met_m1_m3, bit_met_m3_m3);
-    xmm3 = _mm_max_epi16(bit_met_m5_m3, bit_met_m7_m3);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m5, bit_met_p5_m5);
-    xmm1 = _mm_max_epi16(bit_met_p3_m5, bit_met_p1_m5);
-    xmm2 = _mm_max_epi16(bit_met_m1_m5, bit_met_m3_m5);
-    xmm3 = _mm_max_epi16(bit_met_m5_m5, bit_met_m7_m5);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m7, bit_met_p5_m7);
-    xmm1 = _mm_max_epi16(bit_met_p3_m7, bit_met_p1_m7);
-    xmm2 = _mm_max_epi16(bit_met_m1_m7, bit_met_m3_m7);
-    xmm3 = _mm_max_epi16(bit_met_m5_m7, bit_met_m7_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m1, bit_met_p5_m1);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m1, bit_met_p1_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m1, bit_met_m3_m1);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m1, bit_met_m7_m1);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m3, bit_met_p5_m3);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m3, bit_met_p1_m3);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m3, bit_met_m3_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m3, bit_met_m7_m3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m5, bit_met_p5_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m5, bit_met_p1_m5);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m5, bit_met_m3_m5);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m5, bit_met_m7_m5);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m7, bit_met_p5_m7);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m7, bit_met_p1_m7);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m7, bit_met_m3_m7);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m7, bit_met_m7_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
 
     // bit = 0
-    xmm0 = _mm_max_epi16(bit_met_p7_p1, bit_met_p5_p1);
-    xmm1 = _mm_max_epi16(bit_met_p3_p1, bit_met_p1_p1);
-    xmm2 = _mm_max_epi16(bit_met_m1_p1, bit_met_m3_p1);
-    xmm3 = _mm_max_epi16(bit_met_m5_p1, bit_met_m7_p1);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_p3, bit_met_p5_p3);
-    xmm1 = _mm_max_epi16(bit_met_p3_p3, bit_met_p1_p3);
-    xmm2 = _mm_max_epi16(bit_met_m1_p3, bit_met_m3_p3);
-    xmm3 = _mm_max_epi16(bit_met_m5_p3, bit_met_m7_p3);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_p5, bit_met_p5_p5);
-    xmm1 = _mm_max_epi16(bit_met_p3_p5, bit_met_p1_p5);
-    xmm2 = _mm_max_epi16(bit_met_m1_p5, bit_met_m3_p5);
-    xmm3 = _mm_max_epi16(bit_met_m5_p5, bit_met_m7_p5);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_p7, bit_met_p5_p7);
-    xmm1 = _mm_max_epi16(bit_met_p3_p7, bit_met_p1_p7);
-    xmm2 = _mm_max_epi16(bit_met_m1_p7, bit_met_m3_p7);
-    xmm3 = _mm_max_epi16(bit_met_m5_p7, bit_met_m7_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-
-    y1r = _mm_subs_epi16(logmax_num_re0, logmax_den_re0);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p1, bit_met_p5_p1);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p1, bit_met_p1_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p1, bit_met_m3_p1);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p1, bit_met_m7_p1);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p3, bit_met_p5_p3);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p3, bit_met_p1_p3);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p3, bit_met_m3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p3, bit_met_m7_p3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p5, bit_met_p5_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p5, bit_met_p1_p5);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p5, bit_met_m3_p5);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p5, bit_met_m7_p5);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p7, bit_met_p5_p7);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p7, bit_met_p1_p7);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p7, bit_met_m3_p7);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p7, bit_met_m7_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+
+    y1r = simde_mm_subs_epi16(logmax_num_re0, logmax_den_re0);
 
     // Detection for 3rd bit (LTE mapping)
-    xmm0 = _mm_max_epi16(bit_met_m7_m7, bit_met_m7_m5);
-    xmm1 = _mm_max_epi16(bit_met_m7_m3, bit_met_m7_m1);
-    xmm2 = _mm_max_epi16(bit_met_m7_p1, bit_met_m7_p3);
-    xmm3 = _mm_max_epi16(bit_met_m7_p5, bit_met_m7_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_m5_m7, bit_met_m5_m5);
-    xmm1 = _mm_max_epi16(bit_met_m5_m3, bit_met_m5_m1);
-    xmm2 = _mm_max_epi16(bit_met_m5_p1, bit_met_m5_p3);
-    xmm3 = _mm_max_epi16(bit_met_m5_p5, bit_met_m5_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p5_m7, bit_met_p5_m5);
-    xmm1 = _mm_max_epi16(bit_met_p5_m3, bit_met_p5_m1);
-    xmm2 = _mm_max_epi16(bit_met_p5_p1, bit_met_p5_p3);
-    xmm3 = _mm_max_epi16(bit_met_p5_p5, bit_met_p5_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m7, bit_met_p7_m5);
-    xmm1 = _mm_max_epi16(bit_met_p7_m3, bit_met_p7_m1);
-    xmm2 = _mm_max_epi16(bit_met_p7_p1, bit_met_p7_p3);
-    xmm3 = _mm_max_epi16(bit_met_p7_p5, bit_met_p7_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-
-    xmm0 = _mm_max_epi16(bit_met_m3_m7, bit_met_m3_m5);
-    xmm1 = _mm_max_epi16(bit_met_m3_m3, bit_met_m3_m1);
-    xmm2 = _mm_max_epi16(bit_met_m3_p1, bit_met_m3_p3);
-    xmm3 = _mm_max_epi16(bit_met_m3_p5, bit_met_m3_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_m1_m7, bit_met_m1_m5);
-    xmm1 = _mm_max_epi16(bit_met_m1_m3, bit_met_m1_m1);
-    xmm2 = _mm_max_epi16(bit_met_m1_p1, bit_met_m1_p3);
-    xmm3 = _mm_max_epi16(bit_met_m1_p5, bit_met_m1_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p1_m7, bit_met_p1_m5);
-    xmm1 = _mm_max_epi16(bit_met_p1_m3, bit_met_p1_m1);
-    xmm2 = _mm_max_epi16(bit_met_p1_p1, bit_met_p1_p3);
-    xmm3 = _mm_max_epi16(bit_met_p1_p5, bit_met_p1_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p3_m7, bit_met_p3_m5);
-    xmm1 = _mm_max_epi16(bit_met_p3_m3, bit_met_p3_m1);
-    xmm2 = _mm_max_epi16(bit_met_p3_p1, bit_met_p3_p3);
-    xmm3 = _mm_max_epi16(bit_met_p3_p5, bit_met_p3_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-
-    simde__m128i y2r = _mm_subs_epi16(logmax_num_re0, logmax_den_re0);
+    xmm0 = simde_mm_max_epi16(bit_met_m7_m7, bit_met_m7_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_m7_m3, bit_met_m7_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m7_p1, bit_met_m7_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m7_p5, bit_met_m7_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m5_m7, bit_met_m5_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_m5_m3, bit_met_m5_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m5_p1, bit_met_m5_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p5, bit_met_m5_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p5_m7, bit_met_p5_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p5_m3, bit_met_p5_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p5_p1, bit_met_p5_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p5_p5, bit_met_p5_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m7, bit_met_p7_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p7_m3, bit_met_p7_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p7_p1, bit_met_p7_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p7_p5, bit_met_p7_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+
+    xmm0 = simde_mm_max_epi16(bit_met_m3_m7, bit_met_m3_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_m3_m3, bit_met_m3_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m3_p1, bit_met_m3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m3_p5, bit_met_m3_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m1_m7, bit_met_m1_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_m1_m3, bit_met_m1_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p1, bit_met_m1_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m1_p5, bit_met_m1_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p1_m7, bit_met_p1_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p1_m3, bit_met_p1_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p1_p1, bit_met_p1_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p1_p5, bit_met_p1_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p3_m7, bit_met_p3_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m3, bit_met_p3_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p3_p1, bit_met_p3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p3_p5, bit_met_p3_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+
+    simde__m128i y2r = simde_mm_subs_epi16(logmax_num_re0, logmax_den_re0);
 
     // Detection for 4th bit (LTE mapping)
-    xmm0 = _mm_max_epi16(bit_met_p7_p7, bit_met_p5_p7);
-    xmm1 = _mm_max_epi16(bit_met_p3_p7, bit_met_p1_p7);
-    xmm2 = _mm_max_epi16(bit_met_m1_p7, bit_met_m3_p7);
-    xmm3 = _mm_max_epi16(bit_met_m5_p7, bit_met_m7_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_p5, bit_met_p5_p5);
-    xmm1 = _mm_max_epi16(bit_met_p3_p5, bit_met_p1_p5);
-    xmm2 = _mm_max_epi16(bit_met_m1_p5, bit_met_m3_p5);
-    xmm3 = _mm_max_epi16(bit_met_m5_p5, bit_met_m5_p5);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m5, bit_met_p5_m5);
-    xmm1 = _mm_max_epi16(bit_met_p3_m5, bit_met_p1_m5);
-    xmm2 = _mm_max_epi16(bit_met_m1_m5, bit_met_m3_m5);
-    xmm3 = _mm_max_epi16(bit_met_m5_m5, bit_met_m7_m5);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m7, bit_met_p5_m7);
-    xmm1 = _mm_max_epi16(bit_met_p3_m7, bit_met_p1_m7);
-    xmm2 = _mm_max_epi16(bit_met_m1_m7, bit_met_m3_m7);
-    xmm3 = _mm_max_epi16(bit_met_m5_m7, bit_met_m7_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-
-    xmm0 = _mm_max_epi16(bit_met_p7_m1, bit_met_p5_m1);
-    xmm1 = _mm_max_epi16(bit_met_p3_m1, bit_met_p1_m1);
-    xmm2 = _mm_max_epi16(bit_met_m1_m1, bit_met_m3_m1);
-    xmm3 = _mm_max_epi16(bit_met_m5_m1, bit_met_m7_m1);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m3, bit_met_p5_m3);
-    xmm1 = _mm_max_epi16(bit_met_p3_m3, bit_met_p1_m3);
-    xmm2 = _mm_max_epi16(bit_met_m1_m3, bit_met_m3_m3);
-    xmm3 = _mm_max_epi16(bit_met_m5_m3, bit_met_m7_m3);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_p1, bit_met_p5_p1);
-    xmm1 = _mm_max_epi16(bit_met_p3_p1, bit_met_p1_p1);
-    xmm2 = _mm_max_epi16(bit_met_m1_p1, bit_met_m3_p1);
-    xmm3 = _mm_max_epi16(bit_met_m5_p1, bit_met_m7_p1);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_p3, bit_met_p5_p3);
-    xmm1 = _mm_max_epi16(bit_met_p3_p3, bit_met_p1_p3);
-    xmm2 = _mm_max_epi16(bit_met_m1_p3, bit_met_m3_p3);
-    xmm3 = _mm_max_epi16(bit_met_m5_p3, bit_met_m7_p3);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-
-    y0i = _mm_subs_epi16(logmax_num_re0, logmax_den_re0);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p7, bit_met_p5_p7);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p7, bit_met_p1_p7);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p7, bit_met_m3_p7);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p7, bit_met_m7_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p5, bit_met_p5_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p5, bit_met_p1_p5);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p5, bit_met_m3_p5);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p5, bit_met_m5_p5);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m5, bit_met_p5_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m5, bit_met_p1_m5);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m5, bit_met_m3_m5);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m5, bit_met_m7_m5);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m7, bit_met_p5_m7);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m7, bit_met_p1_m7);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m7, bit_met_m3_m7);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m7, bit_met_m7_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m1, bit_met_p5_m1);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m1, bit_met_p1_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m1, bit_met_m3_m1);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m1, bit_met_m7_m1);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m3, bit_met_p5_m3);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m3, bit_met_p1_m3);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m3, bit_met_m3_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m3, bit_met_m7_m3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p1, bit_met_p5_p1);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p1, bit_met_p1_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p1, bit_met_m3_p1);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p1, bit_met_m7_p1);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p3, bit_met_p5_p3);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p3, bit_met_p1_p3);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p3, bit_met_m3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p3, bit_met_m7_p3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+
+    y0i = simde_mm_subs_epi16(logmax_num_re0, logmax_den_re0);
 
 
     // Detection for 5th bit (LTE mapping)
-    xmm0 = _mm_max_epi16(bit_met_m7_m7, bit_met_m7_m5);
-    xmm1 = _mm_max_epi16(bit_met_m7_m3, bit_met_m7_m1);
-    xmm2 = _mm_max_epi16(bit_met_m7_p1, bit_met_m7_p3);
-    xmm3 = _mm_max_epi16(bit_met_m7_p5, bit_met_m7_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_m1_m7, bit_met_m1_m5);
-    xmm1 = _mm_max_epi16(bit_met_m1_m3, bit_met_m1_m1);
-    xmm2 = _mm_max_epi16(bit_met_m1_p1, bit_met_m1_p3);
-    xmm3 = _mm_max_epi16(bit_met_m1_p5, bit_met_m1_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p1_m7, bit_met_p1_m5);
-    xmm1 = _mm_max_epi16(bit_met_p1_m3, bit_met_p1_m1);
-    xmm2 = _mm_max_epi16(bit_met_p1_p1, bit_met_p1_p3);
-    xmm3 = _mm_max_epi16(bit_met_p1_p5, bit_met_p1_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m7, bit_met_p7_m5);
-    xmm1 = _mm_max_epi16(bit_met_p7_m3, bit_met_p7_m1);
-    xmm2 = _mm_max_epi16(bit_met_p7_p1, bit_met_p7_p3);
-    xmm3 = _mm_max_epi16(bit_met_p7_p5, bit_met_p7_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-
-    xmm0 = _mm_max_epi16(bit_met_m5_m7, bit_met_m5_m5);
-    xmm1 = _mm_max_epi16(bit_met_m5_m3, bit_met_m5_m1);
-    xmm2 = _mm_max_epi16(bit_met_m5_p1, bit_met_m5_p3);
-    xmm3 = _mm_max_epi16(bit_met_m5_p5, bit_met_m5_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_m3_m7, bit_met_m3_m5);
-    xmm1 = _mm_max_epi16(bit_met_m3_m3, bit_met_m3_m1);
-    xmm2 = _mm_max_epi16(bit_met_m3_p1, bit_met_m3_p3);
-    xmm3 = _mm_max_epi16(bit_met_m3_p5, bit_met_m3_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p3_m7, bit_met_p3_m5);
-    xmm1 = _mm_max_epi16(bit_met_p3_m3, bit_met_p3_m1);
-    xmm2 = _mm_max_epi16(bit_met_p3_p1, bit_met_p3_p3);
-    xmm3 = _mm_max_epi16(bit_met_p3_p5, bit_met_p3_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p5_m7, bit_met_p5_m5);
-    xmm1 = _mm_max_epi16(bit_met_p5_m3, bit_met_p5_m1);
-    xmm2 = _mm_max_epi16(bit_met_p5_p1, bit_met_p5_p3);
-    xmm3 = _mm_max_epi16(bit_met_p5_p5, bit_met_p5_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-
-    y1i = _mm_subs_epi16(logmax_num_re0, logmax_den_re0);
+    xmm0 = simde_mm_max_epi16(bit_met_m7_m7, bit_met_m7_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_m7_m3, bit_met_m7_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m7_p1, bit_met_m7_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m7_p5, bit_met_m7_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m1_m7, bit_met_m1_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_m1_m3, bit_met_m1_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p1, bit_met_m1_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m1_p5, bit_met_m1_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p1_m7, bit_met_p1_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p1_m3, bit_met_p1_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p1_p1, bit_met_p1_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p1_p5, bit_met_p1_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m7, bit_met_p7_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p7_m3, bit_met_p7_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p7_p1, bit_met_p7_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p7_p5, bit_met_p7_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+
+    xmm0 = simde_mm_max_epi16(bit_met_m5_m7, bit_met_m5_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_m5_m3, bit_met_m5_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m5_p1, bit_met_m5_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p5, bit_met_m5_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m3_m7, bit_met_m3_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_m3_m3, bit_met_m3_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m3_p1, bit_met_m3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m3_p5, bit_met_m3_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p3_m7, bit_met_p3_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m3, bit_met_p3_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p3_p1, bit_met_p3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p3_p5, bit_met_p3_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p5_m7, bit_met_p5_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p5_m3, bit_met_p5_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p5_p1, bit_met_p5_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p5_p5, bit_met_p5_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+
+    y1i = simde_mm_subs_epi16(logmax_num_re0, logmax_den_re0);
 
     // Detection for 6th bit (LTE mapping)
-    xmm0 = _mm_max_epi16(bit_met_p7_p7, bit_met_p5_p7);
-    xmm1 = _mm_max_epi16(bit_met_p3_p7, bit_met_p1_p7);
-    xmm2 = _mm_max_epi16(bit_met_m1_p7, bit_met_m3_p7);
-    xmm3 = _mm_max_epi16(bit_met_m5_p7, bit_met_m7_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_p1, bit_met_p5_p1);
-    xmm1 = _mm_max_epi16(bit_met_p3_p1, bit_met_p1_p1);
-    xmm2 = _mm_max_epi16(bit_met_m1_p1, bit_met_m3_p1);
-    xmm3 = _mm_max_epi16(bit_met_m5_p1, bit_met_m5_p1);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m1, bit_met_p5_m1);
-    xmm1 = _mm_max_epi16(bit_met_p3_m1, bit_met_p1_m1);
-    xmm2 = _mm_max_epi16(bit_met_m1_m1, bit_met_m3_m1);
-    xmm3 = _mm_max_epi16(bit_met_m5_m1, bit_met_m7_m1);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m7, bit_met_p5_m7);
-    xmm1 = _mm_max_epi16(bit_met_p3_m7, bit_met_p1_m7);
-    xmm2 = _mm_max_epi16(bit_met_m1_m7, bit_met_m3_m7);
-    xmm3 = _mm_max_epi16(bit_met_m5_m7, bit_met_m7_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-
-    xmm0 = _mm_max_epi16(bit_met_p7_m5, bit_met_p5_m5);
-    xmm1 = _mm_max_epi16(bit_met_p3_m5, bit_met_p1_m5);
-    xmm2 = _mm_max_epi16(bit_met_m1_m5, bit_met_m3_m5);
-    xmm3 = _mm_max_epi16(bit_met_m5_m5, bit_met_m7_m5);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m3, bit_met_p5_m3);
-    xmm1 = _mm_max_epi16(bit_met_p3_m3, bit_met_p1_m3);
-    xmm2 = _mm_max_epi16(bit_met_m1_m3, bit_met_m3_m3);
-    xmm3 = _mm_max_epi16(bit_met_m5_m3, bit_met_m7_m3);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_p3, bit_met_p5_p3);
-    xmm1 = _mm_max_epi16(bit_met_p3_p3, bit_met_p1_p3);
-    xmm2 = _mm_max_epi16(bit_met_m1_p3, bit_met_m3_p3);
-    xmm3 = _mm_max_epi16(bit_met_m5_p3, bit_met_m7_p3);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_p5, bit_met_p5_p5);
-    xmm1 = _mm_max_epi16(bit_met_p3_p5, bit_met_p1_p5);
-    xmm2 = _mm_max_epi16(bit_met_m1_p5, bit_met_m3_p5);
-    xmm3 = _mm_max_epi16(bit_met_m5_p5, bit_met_m7_p5);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-
-    simde__m128i y2i = _mm_subs_epi16(logmax_num_re0, logmax_den_re0);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p7, bit_met_p5_p7);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p7, bit_met_p1_p7);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p7, bit_met_m3_p7);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p7, bit_met_m7_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p1, bit_met_p5_p1);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p1, bit_met_p1_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p1, bit_met_m3_p1);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p1, bit_met_m5_p1);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m1, bit_met_p5_m1);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m1, bit_met_p1_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m1, bit_met_m3_m1);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m1, bit_met_m7_m1);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m7, bit_met_p5_m7);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m7, bit_met_p1_m7);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m7, bit_met_m3_m7);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m7, bit_met_m7_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m5, bit_met_p5_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m5, bit_met_p1_m5);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m5, bit_met_m3_m5);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m5, bit_met_m7_m5);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m3, bit_met_p5_m3);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m3, bit_met_p1_m3);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m3, bit_met_m3_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m3, bit_met_m7_m3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p3, bit_met_p5_p3);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p3, bit_met_p1_p3);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p3, bit_met_m3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p3, bit_met_m7_p3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p5, bit_met_p5_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p5, bit_met_p1_p5);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p5, bit_met_m3_p5);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p5, bit_met_m7_p5);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+
+    simde__m128i y2i = simde_mm_subs_epi16(logmax_num_re0, logmax_den_re0);
 
     // map to output stream, difficult to do in SIMD since we have 6 16bit LLRs
     // RE 1
@@ -4932,15 +4619,7 @@ void qam64_qpsk(int16_t *stream0_in,
     stream0_out[j + 45] = ((short *)&y0i)[7];
     stream0_out[j + 46] = ((short *)&y1i)[7];
     stream0_out[j + 47] = ((short *)&y2i)[7];
-#elif defined(__arm__) || defined(__aarch64__)
-
-#endif
   }
-
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
 }
 
 
@@ -5026,494 +4705,489 @@ void qam64_qam16(short *stream0_in,
     stream0_out: output LLRs for 1st stream
   */
 
-#if defined(__x86_64__) || defined(__i386__)
-
-  __m128i *rho01_128i      = (__m128i *)rho01;
-  __m128i *stream0_128i_in = (__m128i *)stream0_in;
-  __m128i *stream1_128i_in = (__m128i *)stream1_in;
-  __m128i *ch_mag_128i     = (__m128i *)ch_mag;
-  __m128i *ch_mag_128i_i   = (__m128i *)ch_mag_i;
-
-  __m128i ONE_OVER_SQRT_42 = _mm_set1_epi16(10112); // round(1/sqrt(42)*2^16)
-  __m128i THREE_OVER_SQRT_42 = _mm_set1_epi16(30337); // round(3/sqrt(42)*2^16)
-  __m128i FIVE_OVER_SQRT_42 = _mm_set1_epi16(25281); // round(5/sqrt(42)*2^15)
-  __m128i SEVEN_OVER_SQRT_42 = _mm_set1_epi16(17697); // round(5/sqrt(42)*2^15)
-  __m128i FORTYNINE_OVER_FOUR_SQRT_42 = _mm_set1_epi16(30969); // round(49/(4*sqrt(42))*2^14), Q2.14
-  __m128i THIRTYSEVEN_OVER_FOUR_SQRT_42 = _mm_set1_epi16(23385); // round(37/(4*sqrt(42))*2^14), Q2.14
-  __m128i TWENTYFIVE_OVER_FOUR_SQRT_42 = _mm_set1_epi16(31601); // round(25/(4*sqrt(42))*2^15)
-  __m128i TWENTYNINE_OVER_FOUR_SQRT_42 = _mm_set1_epi16(18329); // round(29/(4*sqrt(42))*2^15), Q2.14
-  __m128i SEVENTEEN_OVER_FOUR_SQRT_42 = _mm_set1_epi16(21489); // round(17/(4*sqrt(42))*2^15)
-  __m128i NINE_OVER_FOUR_SQRT_42 = _mm_set1_epi16(11376); // round(9/(4*sqrt(42))*2^15)
-  __m128i THIRTEEN_OVER_FOUR_SQRT_42 = _mm_set1_epi16(16433); // round(13/(4*sqrt(42))*2^15)
-  __m128i FIVE_OVER_FOUR_SQRT_42 = _mm_set1_epi16(6320); // round(5/(4*sqrt(42))*2^15)
-  __m128i ONE_OVER_FOUR_SQRT_42 = _mm_set1_epi16(1264); // round(1/(4*sqrt(42))*2^15)
-  __m128i ONE_OVER_SQRT_10_Q15 = _mm_set1_epi16(10362); // round(1/sqrt(10)*2^15)
-  __m128i THREE_OVER_SQRT_10 = _mm_set1_epi16(31086); // round(3/sqrt(10)*2^15)
-  __m128i SQRT_10_OVER_FOUR = _mm_set1_epi16(25905); // round(sqrt(10)/4*2^15)
-
-
-  __m128i ch_mag_int;
-  __m128i ch_mag_des;
-  __m128i ch_mag_98_over_42_with_sigma2;
-  __m128i ch_mag_74_over_42_with_sigma2;
-  __m128i ch_mag_58_over_42_with_sigma2;
-  __m128i ch_mag_50_over_42_with_sigma2;
-  __m128i ch_mag_34_over_42_with_sigma2;
-  __m128i ch_mag_18_over_42_with_sigma2;
-  __m128i ch_mag_26_over_42_with_sigma2;
-  __m128i ch_mag_10_over_42_with_sigma2;
-  __m128i ch_mag_2_over_42_with_sigma2;
-  __m128i  y0r_one_over_sqrt_21;
-  __m128i  y0r_three_over_sqrt_21;
-  __m128i  y0r_five_over_sqrt_21;
-  __m128i  y0r_seven_over_sqrt_21;
-  __m128i  y0i_one_over_sqrt_21;
-  __m128i  y0i_three_over_sqrt_21;
-  __m128i  y0i_five_over_sqrt_21;
-  __m128i  y0i_seven_over_sqrt_21;
-
-#elif defined(__arm__) || defined(__aarch64__)
 
-#endif
+  simde__m128i *rho01_128i      = (simde__m128i *)rho01;
+  simde__m128i *stream0_128i_in = (simde__m128i *)stream0_in;
+  simde__m128i *stream1_128i_in = (simde__m128i *)stream1_in;
+  simde__m128i *ch_mag_128i     = (simde__m128i *)ch_mag;
+  simde__m128i *ch_mag_128i_i   = (simde__m128i *)ch_mag_i;
+
+  simde__m128i ONE_OVER_SQRT_42 = simde_mm_set1_epi16(10112); // round(1/sqrt(42)*2^16)
+  simde__m128i THREE_OVER_SQRT_42 = simde_mm_set1_epi16(30337); // round(3/sqrt(42)*2^16)
+  simde__m128i FIVE_OVER_SQRT_42 = simde_mm_set1_epi16(25281); // round(5/sqrt(42)*2^15)
+  simde__m128i SEVEN_OVER_SQRT_42 = simde_mm_set1_epi16(17697); // round(5/sqrt(42)*2^15)
+  simde__m128i FORTYNINE_OVER_FOUR_SQRT_42 = simde_mm_set1_epi16(30969); // round(49/(4*sqrt(42))*2^14), Q2.14
+  simde__m128i THIRTYSEVEN_OVER_FOUR_SQRT_42 = simde_mm_set1_epi16(23385); // round(37/(4*sqrt(42))*2^14), Q2.14
+  simde__m128i TWENTYFIVE_OVER_FOUR_SQRT_42 = simde_mm_set1_epi16(31601); // round(25/(4*sqrt(42))*2^15)
+  simde__m128i TWENTYNINE_OVER_FOUR_SQRT_42 = simde_mm_set1_epi16(18329); // round(29/(4*sqrt(42))*2^15), Q2.14
+  simde__m128i SEVENTEEN_OVER_FOUR_SQRT_42 = simde_mm_set1_epi16(21489); // round(17/(4*sqrt(42))*2^15)
+  simde__m128i NINE_OVER_FOUR_SQRT_42 = simde_mm_set1_epi16(11376); // round(9/(4*sqrt(42))*2^15)
+  simde__m128i THIRTEEN_OVER_FOUR_SQRT_42 = simde_mm_set1_epi16(16433); // round(13/(4*sqrt(42))*2^15)
+  simde__m128i FIVE_OVER_FOUR_SQRT_42 = simde_mm_set1_epi16(6320); // round(5/(4*sqrt(42))*2^15)
+  simde__m128i ONE_OVER_FOUR_SQRT_42 = simde_mm_set1_epi16(1264); // round(1/(4*sqrt(42))*2^15)
+  simde__m128i ONE_OVER_SQRT_10_Q15 = simde_mm_set1_epi16(10362); // round(1/sqrt(10)*2^15)
+  simde__m128i THREE_OVER_SQRT_10 = simde_mm_set1_epi16(31086); // round(3/sqrt(10)*2^15)
+  simde__m128i SQRT_10_OVER_FOUR = simde_mm_set1_epi16(25905); // round(sqrt(10)/4*2^15)
+
+
+  simde__m128i ch_mag_int;
+  simde__m128i ch_mag_des;
+  simde__m128i ch_mag_98_over_42_with_sigma2;
+  simde__m128i ch_mag_74_over_42_with_sigma2;
+  simde__m128i ch_mag_58_over_42_with_sigma2;
+  simde__m128i ch_mag_50_over_42_with_sigma2;
+  simde__m128i ch_mag_34_over_42_with_sigma2;
+  simde__m128i ch_mag_18_over_42_with_sigma2;
+  simde__m128i ch_mag_26_over_42_with_sigma2;
+  simde__m128i ch_mag_10_over_42_with_sigma2;
+  simde__m128i ch_mag_2_over_42_with_sigma2;
+  simde__m128i  y0r_one_over_sqrt_21;
+  simde__m128i  y0r_three_over_sqrt_21;
+  simde__m128i  y0r_five_over_sqrt_21;
+  simde__m128i  y0r_seven_over_sqrt_21;
+  simde__m128i  y0i_one_over_sqrt_21;
+  simde__m128i  y0i_three_over_sqrt_21;
+  simde__m128i  y0i_five_over_sqrt_21;
+  simde__m128i  y0i_seven_over_sqrt_21;
+
   int i,j;
 
 
 
   for (i=0; i<length>>2; i+=2) {
 
-#if defined(__x86_64__) || defined(__i386__)
     // Get rho
     simde__m128i xmm0 = rho01_128i[i];
     simde__m128i xmm1 = rho01_128i[i + 1];
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflelo_epi16(xmm0, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflehi_epi16(xmm0, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shuffle_epi32(xmm0, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflelo_epi16(xmm1, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflehi_epi16(xmm1, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shuffle_epi32(xmm1, 0xd8); //_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    simde__m128i xmm2 = _mm_unpacklo_epi64(xmm0, xmm1); // Re(rho)
-    simde__m128i xmm3 = _mm_unpackhi_epi64(xmm0, xmm1); // Im(rho)
-    simde__m128i rho_rpi = _mm_adds_epi16(xmm2, xmm3); // rho = Re(rho) + Im(rho)
-    simde__m128i rho_rmi = _mm_subs_epi16(xmm2, xmm3); // rho* = Re(rho) - Im(rho)
+    simde__m128i xmm2 = simde_mm_unpacklo_epi64(xmm0, xmm1); // Re(rho)
+    simde__m128i xmm3 = simde_mm_unpackhi_epi64(xmm0, xmm1); // Im(rho)
+    simde__m128i rho_rpi = simde_mm_adds_epi16(xmm2, xmm3); // rho = Re(rho) + Im(rho)
+    simde__m128i rho_rmi = simde_mm_subs_epi16(xmm2, xmm3); // rho* = Re(rho) - Im(rho)
 
     // Compute the different rhos
-    simde__m128i rho_rpi_1_1 = _mm_mulhi_epi16(rho_rpi, ONE_OVER_SQRT_42);
-    simde__m128i rho_rmi_1_1 = _mm_mulhi_epi16(rho_rmi, ONE_OVER_SQRT_42);
-    simde__m128i rho_rpi_3_3 = _mm_mulhi_epi16(rho_rpi, THREE_OVER_SQRT_42);
-    simde__m128i rho_rmi_3_3 = _mm_mulhi_epi16(rho_rmi, THREE_OVER_SQRT_42);
-    simde__m128i rho_rpi_5_5 = _mm_mulhi_epi16(rho_rpi, FIVE_OVER_SQRT_42);
-    simde__m128i rho_rmi_5_5 = _mm_mulhi_epi16(rho_rmi, FIVE_OVER_SQRT_42);
-    simde__m128i rho_rpi_7_7 = _mm_mulhi_epi16(rho_rpi, SEVEN_OVER_SQRT_42);
-    simde__m128i rho_rmi_7_7 = _mm_mulhi_epi16(rho_rmi, SEVEN_OVER_SQRT_42);
-
-    rho_rpi_5_5 = _mm_slli_epi16(rho_rpi_5_5, 1);
-    rho_rmi_5_5 = _mm_slli_epi16(rho_rmi_5_5, 1);
-    rho_rpi_7_7 = _mm_slli_epi16(rho_rpi_7_7, 2);
-    rho_rmi_7_7 = _mm_slli_epi16(rho_rmi_7_7, 2);
-
-    simde__m128i xmm4 = _mm_mulhi_epi16(xmm2, ONE_OVER_SQRT_42);
-    simde__m128i xmm5 = _mm_mulhi_epi16(xmm3, ONE_OVER_SQRT_42);
-    simde__m128i xmm6 = _mm_mulhi_epi16(xmm3, THREE_OVER_SQRT_42);
-    simde__m128i xmm7 = _mm_mulhi_epi16(xmm3, FIVE_OVER_SQRT_42);
-    simde__m128i xmm8 = _mm_mulhi_epi16(xmm3, SEVEN_OVER_SQRT_42);
-    xmm7 = _mm_slli_epi16(xmm7, 1);
-    xmm8 = _mm_slli_epi16(xmm8, 2);
-
-    simde__m128i rho_rpi_1_3 = _mm_adds_epi16(xmm4, xmm6);
-    simde__m128i rho_rmi_1_3 = _mm_subs_epi16(xmm4, xmm6);
-    simde__m128i rho_rpi_1_5 = _mm_adds_epi16(xmm4, xmm7);
-    simde__m128i rho_rmi_1_5 = _mm_subs_epi16(xmm4, xmm7);
-    simde__m128i rho_rpi_1_7 = _mm_adds_epi16(xmm4, xmm8);
-    simde__m128i rho_rmi_1_7 = _mm_subs_epi16(xmm4, xmm8);
-
-    xmm4 = _mm_mulhi_epi16(xmm2, THREE_OVER_SQRT_42);
-    simde__m128i rho_rpi_3_1 = _mm_adds_epi16(xmm4, xmm5);
-    simde__m128i rho_rmi_3_1 = _mm_subs_epi16(xmm4, xmm5);
-    simde__m128i rho_rpi_3_5 = _mm_adds_epi16(xmm4, xmm7);
-    simde__m128i rho_rmi_3_5 = _mm_subs_epi16(xmm4, xmm7);
-    simde__m128i rho_rpi_3_7 = _mm_adds_epi16(xmm4, xmm8);
-    simde__m128i rho_rmi_3_7 = _mm_subs_epi16(xmm4, xmm8);
-
-    xmm4 = _mm_mulhi_epi16(xmm2, FIVE_OVER_SQRT_42);
-    xmm4 = _mm_slli_epi16(xmm4, 1);
-    simde__m128i rho_rpi_5_1 = _mm_adds_epi16(xmm4, xmm5);
-    simde__m128i rho_rmi_5_1 = _mm_subs_epi16(xmm4, xmm5);
-    simde__m128i rho_rpi_5_3 = _mm_adds_epi16(xmm4, xmm6);
-    simde__m128i rho_rmi_5_3 = _mm_subs_epi16(xmm4, xmm6);
-    simde__m128i rho_rpi_5_7 = _mm_adds_epi16(xmm4, xmm8);
-    simde__m128i rho_rmi_5_7 = _mm_subs_epi16(xmm4, xmm8);
-
-    xmm4 = _mm_mulhi_epi16(xmm2, SEVEN_OVER_SQRT_42);
-    xmm4 = _mm_slli_epi16(xmm4, 2);
-    simde__m128i rho_rpi_7_1 = _mm_adds_epi16(xmm4, xmm5);
-    simde__m128i rho_rmi_7_1 = _mm_subs_epi16(xmm4, xmm5);
-    simde__m128i rho_rpi_7_3 = _mm_adds_epi16(xmm4, xmm6);
-    simde__m128i rho_rmi_7_3 = _mm_subs_epi16(xmm4, xmm6);
-    simde__m128i rho_rpi_7_5 = _mm_adds_epi16(xmm4, xmm7);
-    simde__m128i rho_rmi_7_5 = _mm_subs_epi16(xmm4, xmm7);
+    simde__m128i rho_rpi_1_1 = simde_mm_mulhi_epi16(rho_rpi, ONE_OVER_SQRT_42);
+    simde__m128i rho_rmi_1_1 = simde_mm_mulhi_epi16(rho_rmi, ONE_OVER_SQRT_42);
+    simde__m128i rho_rpi_3_3 = simde_mm_mulhi_epi16(rho_rpi, THREE_OVER_SQRT_42);
+    simde__m128i rho_rmi_3_3 = simde_mm_mulhi_epi16(rho_rmi, THREE_OVER_SQRT_42);
+    simde__m128i rho_rpi_5_5 = simde_mm_mulhi_epi16(rho_rpi, FIVE_OVER_SQRT_42);
+    simde__m128i rho_rmi_5_5 = simde_mm_mulhi_epi16(rho_rmi, FIVE_OVER_SQRT_42);
+    simde__m128i rho_rpi_7_7 = simde_mm_mulhi_epi16(rho_rpi, SEVEN_OVER_SQRT_42);
+    simde__m128i rho_rmi_7_7 = simde_mm_mulhi_epi16(rho_rmi, SEVEN_OVER_SQRT_42);
+
+    rho_rpi_5_5 = simde_mm_slli_epi16(rho_rpi_5_5, 1);
+    rho_rmi_5_5 = simde_mm_slli_epi16(rho_rmi_5_5, 1);
+    rho_rpi_7_7 = simde_mm_slli_epi16(rho_rpi_7_7, 2);
+    rho_rmi_7_7 = simde_mm_slli_epi16(rho_rmi_7_7, 2);
+
+    simde__m128i xmm4 = simde_mm_mulhi_epi16(xmm2, ONE_OVER_SQRT_42);
+    simde__m128i xmm5 = simde_mm_mulhi_epi16(xmm3, ONE_OVER_SQRT_42);
+    simde__m128i xmm6 = simde_mm_mulhi_epi16(xmm3, THREE_OVER_SQRT_42);
+    simde__m128i xmm7 = simde_mm_mulhi_epi16(xmm3, FIVE_OVER_SQRT_42);
+    simde__m128i xmm8 = simde_mm_mulhi_epi16(xmm3, SEVEN_OVER_SQRT_42);
+    xmm7 = simde_mm_slli_epi16(xmm7, 1);
+    xmm8 = simde_mm_slli_epi16(xmm8, 2);
+
+    simde__m128i rho_rpi_1_3 = simde_mm_adds_epi16(xmm4, xmm6);
+    simde__m128i rho_rmi_1_3 = simde_mm_subs_epi16(xmm4, xmm6);
+    simde__m128i rho_rpi_1_5 = simde_mm_adds_epi16(xmm4, xmm7);
+    simde__m128i rho_rmi_1_5 = simde_mm_subs_epi16(xmm4, xmm7);
+    simde__m128i rho_rpi_1_7 = simde_mm_adds_epi16(xmm4, xmm8);
+    simde__m128i rho_rmi_1_7 = simde_mm_subs_epi16(xmm4, xmm8);
+
+    xmm4 = simde_mm_mulhi_epi16(xmm2, THREE_OVER_SQRT_42);
+    simde__m128i rho_rpi_3_1 = simde_mm_adds_epi16(xmm4, xmm5);
+    simde__m128i rho_rmi_3_1 = simde_mm_subs_epi16(xmm4, xmm5);
+    simde__m128i rho_rpi_3_5 = simde_mm_adds_epi16(xmm4, xmm7);
+    simde__m128i rho_rmi_3_5 = simde_mm_subs_epi16(xmm4, xmm7);
+    simde__m128i rho_rpi_3_7 = simde_mm_adds_epi16(xmm4, xmm8);
+    simde__m128i rho_rmi_3_7 = simde_mm_subs_epi16(xmm4, xmm8);
+
+    xmm4 = simde_mm_mulhi_epi16(xmm2, FIVE_OVER_SQRT_42);
+    xmm4 = simde_mm_slli_epi16(xmm4, 1);
+    simde__m128i rho_rpi_5_1 = simde_mm_adds_epi16(xmm4, xmm5);
+    simde__m128i rho_rmi_5_1 = simde_mm_subs_epi16(xmm4, xmm5);
+    simde__m128i rho_rpi_5_3 = simde_mm_adds_epi16(xmm4, xmm6);
+    simde__m128i rho_rmi_5_3 = simde_mm_subs_epi16(xmm4, xmm6);
+    simde__m128i rho_rpi_5_7 = simde_mm_adds_epi16(xmm4, xmm8);
+    simde__m128i rho_rmi_5_7 = simde_mm_subs_epi16(xmm4, xmm8);
+
+    xmm4 = simde_mm_mulhi_epi16(xmm2, SEVEN_OVER_SQRT_42);
+    xmm4 = simde_mm_slli_epi16(xmm4, 2);
+    simde__m128i rho_rpi_7_1 = simde_mm_adds_epi16(xmm4, xmm5);
+    simde__m128i rho_rmi_7_1 = simde_mm_subs_epi16(xmm4, xmm5);
+    simde__m128i rho_rpi_7_3 = simde_mm_adds_epi16(xmm4, xmm6);
+    simde__m128i rho_rmi_7_3 = simde_mm_subs_epi16(xmm4, xmm6);
+    simde__m128i rho_rpi_7_5 = simde_mm_adds_epi16(xmm4, xmm7);
+    simde__m128i rho_rmi_7_5 = simde_mm_subs_epi16(xmm4, xmm7);
 
     // Rearrange interfering MF output
     xmm0 = stream1_128i_in[i];
     xmm1 = stream1_128i_in[i+1];
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflelo_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflehi_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shuffle_epi32(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflelo_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflehi_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shuffle_epi32(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    simde__m128i y1r = _mm_unpacklo_epi64(xmm0, xmm1); //[y1r(1),y1r(2),y1r(3),y1r(4)]
-    simde__m128i y1i = _mm_unpackhi_epi64(xmm0, xmm1); //[y1i(1),y1i(2),y1i(3),y1i(4)]
+    simde__m128i y1r = simde_mm_unpacklo_epi64(xmm0, xmm1); //[y1r(1),y1r(2),y1r(3),y1r(4)]
+    simde__m128i y1i = simde_mm_unpackhi_epi64(xmm0, xmm1); //[y1i(1),y1i(2),y1i(3),y1i(4)]
 
     // Psi_r calculation from rho_rpi or rho_rmi
-    xmm0 = _mm_setzero_si128(); // ZERO for abs_pi16
-    xmm2 = _mm_subs_epi16(rho_rpi_7_7, y1r);
-    simde__m128i psi_r_p7_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_7_5, y1r);
-    simde__m128i psi_r_p7_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_7_3, y1r);
-    simde__m128i psi_r_p7_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_7_1, y1r);
-    simde__m128i psi_r_p7_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_7_1, y1r);
-    simde__m128i psi_r_p7_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_7_3, y1r);
-    simde__m128i psi_r_p7_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_7_5, y1r);
-    simde__m128i psi_r_p7_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_7_7, y1r);
-    simde__m128i psi_r_p7_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_5_7, y1r);
-    simde__m128i psi_r_p5_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_5_5, y1r);
-    simde__m128i psi_r_p5_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_5_3, y1r);
-    simde__m128i psi_r_p5_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_5_1, y1r);
-    simde__m128i psi_r_p5_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_5_1, y1r);
-    simde__m128i psi_r_p5_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_5_3, y1r);
-    simde__m128i psi_r_p5_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_5_5, y1r);
-    simde__m128i psi_r_p5_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_5_7, y1r);
-    simde__m128i psi_r_p5_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_3_7, y1r);
-    simde__m128i psi_r_p3_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_3_5, y1r);
-    simde__m128i psi_r_p3_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_3_3, y1r);
-    simde__m128i psi_r_p3_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_3_1, y1r);
-    simde__m128i psi_r_p3_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_3_1, y1r);
-    simde__m128i psi_r_p3_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_3_3, y1r);
-    simde__m128i psi_r_p3_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_3_5, y1r);
-    simde__m128i psi_r_p3_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_3_7, y1r);
-    simde__m128i psi_r_p3_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_1_7, y1r);
-    simde__m128i psi_r_p1_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_1_5, y1r);
-    simde__m128i psi_r_p1_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_1_3, y1r);
-    simde__m128i psi_r_p1_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_1_1, y1r);
-    simde__m128i psi_r_p1_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_1_1, y1r);
-    simde__m128i psi_r_p1_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_1_3, y1r);
-    simde__m128i psi_r_p1_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_1_5, y1r);
-    simde__m128i psi_r_p1_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_1_7, y1r);
-    simde__m128i psi_r_p1_m7 = _mm_abs_epi16(xmm2);
-
-    xmm2 = _mm_adds_epi16(rho_rmi_1_7, y1r);
-    simde__m128i psi_r_m1_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_1_5, y1r);
-    simde__m128i psi_r_m1_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_1_3, y1r);
-    simde__m128i psi_r_m1_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_1_1, y1r);
-    simde__m128i psi_r_m1_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_1_1, y1r);
-    simde__m128i psi_r_m1_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_1_3, y1r);
-    simde__m128i psi_r_m1_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_1_5, y1r);
-    simde__m128i psi_r_m1_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_1_7, y1r);
-    simde__m128i psi_r_m1_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_3_7, y1r);
-    simde__m128i psi_r_m3_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_3_5, y1r);
-    simde__m128i psi_r_m3_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_3_3, y1r);
-    simde__m128i psi_r_m3_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_3_1, y1r);
-    simde__m128i psi_r_m3_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_3_1, y1r);
-    simde__m128i psi_r_m3_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_3_3, y1r);
-    simde__m128i psi_r_m3_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_3_5, y1r);
-    simde__m128i psi_r_m3_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_3_7, y1r);
-    simde__m128i psi_r_m3_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_5_7, y1r);
-    simde__m128i psi_r_m5_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_5_5, y1r);
-    simde__m128i psi_r_m5_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_5_3, y1r);
-    simde__m128i psi_r_m5_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_5_1, y1r);
-    simde__m128i psi_r_m5_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_5_1, y1r);
-    simde__m128i psi_r_m5_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_5_3, y1r);
-    simde__m128i psi_r_m5_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_5_5, y1r);
-    simde__m128i psi_r_m5_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_5_7, y1r);
-    simde__m128i psi_r_m5_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_7_7, y1r);
-    simde__m128i psi_r_m7_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_7_5, y1r);
-    simde__m128i psi_r_m7_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_7_3, y1r);
-    simde__m128i psi_r_m7_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_7_1, y1r);
-    simde__m128i psi_r_m7_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_7_1, y1r);
-    simde__m128i psi_r_m7_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_7_3, y1r);
-    simde__m128i psi_r_m7_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_7_5, y1r);
-    simde__m128i psi_r_m7_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_7_7, y1r);
-    simde__m128i psi_r_m7_m7 = _mm_abs_epi16(xmm2);
+    xmm0 = simde_mm_setzero_si128(); // ZERO for abs_pi16
+    xmm2 = simde_mm_subs_epi16(rho_rpi_7_7, y1r);
+    simde__m128i psi_r_p7_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_7_5, y1r);
+    simde__m128i psi_r_p7_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_7_3, y1r);
+    simde__m128i psi_r_p7_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_7_1, y1r);
+    simde__m128i psi_r_p7_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_7_1, y1r);
+    simde__m128i psi_r_p7_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_7_3, y1r);
+    simde__m128i psi_r_p7_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_7_5, y1r);
+    simde__m128i psi_r_p7_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_7_7, y1r);
+    simde__m128i psi_r_p7_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_5_7, y1r);
+    simde__m128i psi_r_p5_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_5_5, y1r);
+    simde__m128i psi_r_p5_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_5_3, y1r);
+    simde__m128i psi_r_p5_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_5_1, y1r);
+    simde__m128i psi_r_p5_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_5_1, y1r);
+    simde__m128i psi_r_p5_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_5_3, y1r);
+    simde__m128i psi_r_p5_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_5_5, y1r);
+    simde__m128i psi_r_p5_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_5_7, y1r);
+    simde__m128i psi_r_p5_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_7, y1r);
+    simde__m128i psi_r_p3_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_5, y1r);
+    simde__m128i psi_r_p3_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_3, y1r);
+    simde__m128i psi_r_p3_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_1, y1r);
+    simde__m128i psi_r_p3_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_1, y1r);
+    simde__m128i psi_r_p3_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_3, y1r);
+    simde__m128i psi_r_p3_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_5, y1r);
+    simde__m128i psi_r_p3_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_7, y1r);
+    simde__m128i psi_r_p3_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_7, y1r);
+    simde__m128i psi_r_p1_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_5, y1r);
+    simde__m128i psi_r_p1_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_3, y1r);
+    simde__m128i psi_r_p1_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_1, y1r);
+    simde__m128i psi_r_p1_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_1, y1r);
+    simde__m128i psi_r_p1_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_3, y1r);
+    simde__m128i psi_r_p1_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_5, y1r);
+    simde__m128i psi_r_p1_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_7, y1r);
+    simde__m128i psi_r_p1_m7 = simde_mm_abs_epi16(xmm2);
+
+    xmm2 = simde_mm_adds_epi16(rho_rmi_1_7, y1r);
+    simde__m128i psi_r_m1_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_1_5, y1r);
+    simde__m128i psi_r_m1_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_1_3, y1r);
+    simde__m128i psi_r_m1_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_1_1, y1r);
+    simde__m128i psi_r_m1_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_1, y1r);
+    simde__m128i psi_r_m1_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_3, y1r);
+    simde__m128i psi_r_m1_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_5, y1r);
+    simde__m128i psi_r_m1_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_7, y1r);
+    simde__m128i psi_r_m1_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_3_7, y1r);
+    simde__m128i psi_r_m3_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_3_5, y1r);
+    simde__m128i psi_r_m3_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_3_3, y1r);
+    simde__m128i psi_r_m3_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_3_1, y1r);
+    simde__m128i psi_r_m3_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_1, y1r);
+    simde__m128i psi_r_m3_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_3, y1r);
+    simde__m128i psi_r_m3_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_5, y1r);
+    simde__m128i psi_r_m3_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_7, y1r);
+    simde__m128i psi_r_m3_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_5_7, y1r);
+    simde__m128i psi_r_m5_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_5_5, y1r);
+    simde__m128i psi_r_m5_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_5_3, y1r);
+    simde__m128i psi_r_m5_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_5_1, y1r);
+    simde__m128i psi_r_m5_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_5_1, y1r);
+    simde__m128i psi_r_m5_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_5_3, y1r);
+    simde__m128i psi_r_m5_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_5_5, y1r);
+    simde__m128i psi_r_m5_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_5_7, y1r);
+    simde__m128i psi_r_m5_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_7_7, y1r);
+    simde__m128i psi_r_m7_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_7_5, y1r);
+    simde__m128i psi_r_m7_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_7_3, y1r);
+    simde__m128i psi_r_m7_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_7_1, y1r);
+    simde__m128i psi_r_m7_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_7_1, y1r);
+    simde__m128i psi_r_m7_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_7_3, y1r);
+    simde__m128i psi_r_m7_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_7_5, y1r);
+    simde__m128i psi_r_m7_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_7_7, y1r);
+    simde__m128i psi_r_m7_m7 = simde_mm_abs_epi16(xmm2);
 
     // Simde__M128i Psi_i calculation from rho_rpi or rho_rmi
-    xmm2 = _mm_subs_epi16(rho_rmi_7_7, y1i);
-    simde__m128i psi_i_p7_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_5_7, y1i);
-    simde__m128i psi_i_p7_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_3_7, y1i);
-    simde__m128i psi_i_p7_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_1_7, y1i);
-    simde__m128i psi_i_p7_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_1_7, y1i);
-    simde__m128i psi_i_p7_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_3_7, y1i);
-    simde__m128i psi_i_p7_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_5_7, y1i);
-    simde__m128i psi_i_p7_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_7_7, y1i);
-    simde__m128i psi_i_p7_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_7_5, y1i);
-    simde__m128i psi_i_p5_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_5_5, y1i);
-    simde__m128i psi_i_p5_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_3_5, y1i);
-    simde__m128i psi_i_p5_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_1_5, y1i);
-    simde__m128i psi_i_p5_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_1_5, y1i);
-    simde__m128i psi_i_p5_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_3_5, y1i);
-    simde__m128i psi_i_p5_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_5_5, y1i);
-    simde__m128i psi_i_p5_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_7_5, y1i);
-    simde__m128i psi_i_p5_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_7_3, y1i);
-    simde__m128i psi_i_p3_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_5_3, y1i);
-    simde__m128i psi_i_p3_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_3_3, y1i);
-    simde__m128i psi_i_p3_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_1_3, y1i);
-    simde__m128i psi_i_p3_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_1_3, y1i);
-    simde__m128i psi_i_p3_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_3_3, y1i);
-    simde__m128i psi_i_p3_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_5_3, y1i);
-    simde__m128i psi_i_p3_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_7_3, y1i);
-    simde__m128i psi_i_p3_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_7_1, y1i);
-    simde__m128i psi_i_p1_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_5_1, y1i);
-    simde__m128i psi_i_p1_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_3_1, y1i);
-    simde__m128i psi_i_p1_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_1_1, y1i);
-    simde__m128i psi_i_p1_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_1_1, y1i);
-    simde__m128i psi_i_p1_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_3_1, y1i);
-    simde__m128i psi_i_p1_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_5_1, y1i);
-    simde__m128i psi_i_p1_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_7_1, y1i);
-    simde__m128i psi_i_p1_m7 = _mm_abs_epi16(xmm2);
-
-    xmm2 = _mm_subs_epi16(rho_rpi_7_1, y1i);
-    simde__m128i psi_i_m1_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_5_1, y1i);
-    simde__m128i psi_i_m1_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_3_1, y1i);
-    simde__m128i psi_i_m1_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_1_1, y1i);
-    simde__m128i psi_i_m1_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_1_1, y1i);
-    simde__m128i psi_i_m1_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_3_1, y1i);
-    simde__m128i psi_i_m1_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_5_1, y1i);
-    simde__m128i psi_i_m1_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_7_1, y1i);
-    simde__m128i psi_i_m1_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_7_3, y1i);
-    simde__m128i psi_i_m3_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_5_3, y1i);
-    simde__m128i psi_i_m3_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_3_3, y1i);
-    simde__m128i psi_i_m3_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_1_3, y1i);
-    simde__m128i psi_i_m3_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_1_3, y1i);
-    simde__m128i psi_i_m3_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_3_3, y1i);
-    simde__m128i psi_i_m3_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_5_3, y1i);
-    simde__m128i psi_i_m3_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_7_3, y1i);
-    simde__m128i psi_i_m3_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_7_5, y1i);
-    simde__m128i psi_i_m5_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_5_5, y1i);
-    simde__m128i psi_i_m5_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_3_5, y1i);
-    simde__m128i psi_i_m5_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_1_5, y1i);
-    simde__m128i psi_i_m5_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_1_5, y1i);
-    simde__m128i psi_i_m5_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_3_5, y1i);
-    simde__m128i psi_i_m5_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_5_5, y1i);
-    simde__m128i psi_i_m5_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_7_5, y1i);
-    simde__m128i psi_i_m5_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_7_7, y1i);
-    simde__m128i psi_i_m7_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_5_7, y1i);
-    simde__m128i psi_i_m7_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_3_7, y1i);
-    simde__m128i psi_i_m7_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_1_7, y1i);
-    simde__m128i psi_i_m7_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_1_7, y1i);
-    simde__m128i psi_i_m7_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_3_7, y1i);
-    simde__m128i psi_i_m7_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_5_7, y1i);
-    simde__m128i psi_i_m7_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_7_7, y1i);
-    simde__m128i psi_i_m7_m7 = _mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_7_7, y1i);
+    simde__m128i psi_i_p7_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_5_7, y1i);
+    simde__m128i psi_i_p7_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_7, y1i);
+    simde__m128i psi_i_p7_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_7, y1i);
+    simde__m128i psi_i_p7_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_7, y1i);
+    simde__m128i psi_i_p7_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_7, y1i);
+    simde__m128i psi_i_p7_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_5_7, y1i);
+    simde__m128i psi_i_p7_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_7_7, y1i);
+    simde__m128i psi_i_p7_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_7_5, y1i);
+    simde__m128i psi_i_p5_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_5_5, y1i);
+    simde__m128i psi_i_p5_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_5, y1i);
+    simde__m128i psi_i_p5_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_5, y1i);
+    simde__m128i psi_i_p5_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_5, y1i);
+    simde__m128i psi_i_p5_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_5, y1i);
+    simde__m128i psi_i_p5_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_5_5, y1i);
+    simde__m128i psi_i_p5_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_7_5, y1i);
+    simde__m128i psi_i_p5_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_7_3, y1i);
+    simde__m128i psi_i_p3_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_5_3, y1i);
+    simde__m128i psi_i_p3_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_3, y1i);
+    simde__m128i psi_i_p3_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_3, y1i);
+    simde__m128i psi_i_p3_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_3, y1i);
+    simde__m128i psi_i_p3_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_3, y1i);
+    simde__m128i psi_i_p3_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_5_3, y1i);
+    simde__m128i psi_i_p3_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_7_3, y1i);
+    simde__m128i psi_i_p3_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_7_1, y1i);
+    simde__m128i psi_i_p1_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_5_1, y1i);
+    simde__m128i psi_i_p1_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_1, y1i);
+    simde__m128i psi_i_p1_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_1, y1i);
+    simde__m128i psi_i_p1_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_1, y1i);
+    simde__m128i psi_i_p1_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_1, y1i);
+    simde__m128i psi_i_p1_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_5_1, y1i);
+    simde__m128i psi_i_p1_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_7_1, y1i);
+    simde__m128i psi_i_p1_m7 = simde_mm_abs_epi16(xmm2);
+
+    xmm2 = simde_mm_subs_epi16(rho_rpi_7_1, y1i);
+    simde__m128i psi_i_m1_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_5_1, y1i);
+    simde__m128i psi_i_m1_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_1, y1i);
+    simde__m128i psi_i_m1_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_1, y1i);
+    simde__m128i psi_i_m1_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_1_1, y1i);
+    simde__m128i psi_i_m1_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_3_1, y1i);
+    simde__m128i psi_i_m1_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_5_1, y1i);
+    simde__m128i psi_i_m1_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_7_1, y1i);
+    simde__m128i psi_i_m1_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_7_3, y1i);
+    simde__m128i psi_i_m3_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_5_3, y1i);
+    simde__m128i psi_i_m3_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_3, y1i);
+    simde__m128i psi_i_m3_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_3, y1i);
+    simde__m128i psi_i_m3_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_1_3, y1i);
+    simde__m128i psi_i_m3_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_3_3, y1i);
+    simde__m128i psi_i_m3_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_5_3, y1i);
+    simde__m128i psi_i_m3_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_7_3, y1i);
+    simde__m128i psi_i_m3_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_7_5, y1i);
+    simde__m128i psi_i_m5_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_5_5, y1i);
+    simde__m128i psi_i_m5_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_5, y1i);
+    simde__m128i psi_i_m5_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_5, y1i);
+    simde__m128i psi_i_m5_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_1_5, y1i);
+    simde__m128i psi_i_m5_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_3_5, y1i);
+    simde__m128i psi_i_m5_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_5_5, y1i);
+    simde__m128i psi_i_m5_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_7_5, y1i);
+    simde__m128i psi_i_m5_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_7_7, y1i);
+    simde__m128i psi_i_m7_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_5_7, y1i);
+    simde__m128i psi_i_m7_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_7, y1i);
+    simde__m128i psi_i_m7_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_7, y1i);
+    simde__m128i psi_i_m7_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_1_7, y1i);
+    simde__m128i psi_i_m7_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_3_7, y1i);
+    simde__m128i psi_i_m7_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_5_7, y1i);
+    simde__m128i psi_i_m7_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_7_7, y1i);
+    simde__m128i psi_i_m7_m7 = simde_mm_abs_epi16(xmm2);
 
     // Rearrange desired MF output
     xmm0 = stream0_128i_in[i];
     xmm1 = stream0_128i_in[i+1];
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflelo_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflehi_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shuffle_epi32(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflelo_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflehi_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shuffle_epi32(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    simde__m128i y0r = _mm_unpacklo_epi64(xmm0, xmm1); // = [y0r(1),y0r(2),y0r(3),y0r(4)]
-    simde__m128i y0i = _mm_unpackhi_epi64(xmm0, xmm1);
+    simde__m128i y0r = simde_mm_unpacklo_epi64(xmm0, xmm1); // = [y0r(1),y0r(2),y0r(3),y0r(4)]
+    simde__m128i y0i = simde_mm_unpackhi_epi64(xmm0, xmm1);
 
     // Rearrange desired channel magnitudes
     xmm2 = ch_mag_128i[i]; // = [|h|^2(1),|h|^2(1),|h|^2(2),|h|^2(2)]*(2/sqrt(10))
     xmm3 = ch_mag_128i[i+1]; // = [|h|^2(3),|h|^2(3),|h|^2(4),|h|^2(4)]*(2/sqrt(10))
-    xmm2 = _mm_shufflelo_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = _mm_shufflehi_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = _mm_shuffle_epi32(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shufflelo_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shufflehi_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shuffle_epi32(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    ch_mag_des = _mm_unpacklo_epi64(xmm2,xmm3);
+    xmm2 = simde_mm_shufflelo_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shufflehi_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shuffle_epi32(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shufflelo_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shufflehi_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shuffle_epi32(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    ch_mag_des = simde_mm_unpacklo_epi64(xmm2,xmm3);
 
     // Rearrange interfering channel magnitudes
     xmm2 = ch_mag_128i_i[i];
     xmm3 = ch_mag_128i_i[i+1];
-    xmm2 = _mm_shufflelo_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = _mm_shufflehi_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = _mm_shuffle_epi32(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shufflelo_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shufflehi_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shuffle_epi32(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    ch_mag_int  = _mm_unpacklo_epi64(xmm2,xmm3);
-
-    y0r_one_over_sqrt_21   = _mm_mulhi_epi16(y0r, ONE_OVER_SQRT_42);
-    y0r_three_over_sqrt_21 = _mm_mulhi_epi16(y0r, THREE_OVER_SQRT_42);
-    y0r_five_over_sqrt_21  = _mm_mulhi_epi16(y0r, FIVE_OVER_SQRT_42);
-    y0r_five_over_sqrt_21  = _mm_slli_epi16(y0r_five_over_sqrt_21, 1);
-    y0r_seven_over_sqrt_21 = _mm_mulhi_epi16(y0r, SEVEN_OVER_SQRT_42);
-    y0r_seven_over_sqrt_21 = _mm_slli_epi16(y0r_seven_over_sqrt_21, 2); // Q2.14
-
-    y0i_one_over_sqrt_21   = _mm_mulhi_epi16(y0i, ONE_OVER_SQRT_42);
-    y0i_three_over_sqrt_21 = _mm_mulhi_epi16(y0i, THREE_OVER_SQRT_42);
-    y0i_five_over_sqrt_21  = _mm_mulhi_epi16(y0i, FIVE_OVER_SQRT_42);
-    y0i_five_over_sqrt_21  = _mm_slli_epi16(y0i_five_over_sqrt_21, 1);
-    y0i_seven_over_sqrt_21 = _mm_mulhi_epi16(y0i, SEVEN_OVER_SQRT_42);
-    y0i_seven_over_sqrt_21 = _mm_slli_epi16(y0i_seven_over_sqrt_21, 2); // Q2.14
-
-    simde__m128i y0_p_7_1 = _mm_adds_epi16(y0r_seven_over_sqrt_21, y0i_one_over_sqrt_21);
-    simde__m128i y0_p_7_3 = _mm_adds_epi16(y0r_seven_over_sqrt_21, y0i_three_over_sqrt_21);
-    simde__m128i y0_p_7_5 = _mm_adds_epi16(y0r_seven_over_sqrt_21, y0i_five_over_sqrt_21);
-    simde__m128i y0_p_7_7 = _mm_adds_epi16(y0r_seven_over_sqrt_21, y0i_seven_over_sqrt_21);
-    simde__m128i y0_p_5_1 = _mm_adds_epi16(y0r_five_over_sqrt_21, y0i_one_over_sqrt_21);
-    simde__m128i y0_p_5_3 = _mm_adds_epi16(y0r_five_over_sqrt_21, y0i_three_over_sqrt_21);
-    simde__m128i y0_p_5_5 = _mm_adds_epi16(y0r_five_over_sqrt_21, y0i_five_over_sqrt_21);
-    simde__m128i y0_p_5_7 = _mm_adds_epi16(y0r_five_over_sqrt_21, y0i_seven_over_sqrt_21);
-    simde__m128i y0_p_3_1 = _mm_adds_epi16(y0r_three_over_sqrt_21, y0i_one_over_sqrt_21);
-    simde__m128i y0_p_3_3 = _mm_adds_epi16(y0r_three_over_sqrt_21, y0i_three_over_sqrt_21);
-    simde__m128i y0_p_3_5 = _mm_adds_epi16(y0r_three_over_sqrt_21, y0i_five_over_sqrt_21);
-    simde__m128i y0_p_3_7 = _mm_adds_epi16(y0r_three_over_sqrt_21, y0i_seven_over_sqrt_21);
-    simde__m128i y0_p_1_1 = _mm_adds_epi16(y0r_one_over_sqrt_21, y0i_one_over_sqrt_21);
-    simde__m128i y0_p_1_3 = _mm_adds_epi16(y0r_one_over_sqrt_21, y0i_three_over_sqrt_21);
-    simde__m128i y0_p_1_5 = _mm_adds_epi16(y0r_one_over_sqrt_21, y0i_five_over_sqrt_21);
-    simde__m128i y0_p_1_7 = _mm_adds_epi16(y0r_one_over_sqrt_21, y0i_seven_over_sqrt_21);
-
-    simde__m128i y0_m_1_1 = _mm_subs_epi16(y0r_one_over_sqrt_21, y0i_one_over_sqrt_21);
-    simde__m128i y0_m_1_3 = _mm_subs_epi16(y0r_one_over_sqrt_21, y0i_three_over_sqrt_21);
-    simde__m128i y0_m_1_5 = _mm_subs_epi16(y0r_one_over_sqrt_21, y0i_five_over_sqrt_21);
-    simde__m128i y0_m_1_7 = _mm_subs_epi16(y0r_one_over_sqrt_21, y0i_seven_over_sqrt_21);
-    simde__m128i y0_m_3_1 = _mm_subs_epi16(y0r_three_over_sqrt_21, y0i_one_over_sqrt_21);
-    simde__m128i y0_m_3_3 = _mm_subs_epi16(y0r_three_over_sqrt_21, y0i_three_over_sqrt_21);
-    simde__m128i y0_m_3_5 = _mm_subs_epi16(y0r_three_over_sqrt_21, y0i_five_over_sqrt_21);
-    simde__m128i y0_m_3_7 = _mm_subs_epi16(y0r_three_over_sqrt_21, y0i_seven_over_sqrt_21);
-    simde__m128i y0_m_5_1 = _mm_subs_epi16(y0r_five_over_sqrt_21, y0i_one_over_sqrt_21);
-    simde__m128i y0_m_5_3 = _mm_subs_epi16(y0r_five_over_sqrt_21, y0i_three_over_sqrt_21);
-    simde__m128i y0_m_5_5 = _mm_subs_epi16(y0r_five_over_sqrt_21, y0i_five_over_sqrt_21);
-    simde__m128i y0_m_5_7 = _mm_subs_epi16(y0r_five_over_sqrt_21, y0i_seven_over_sqrt_21);
-    simde__m128i y0_m_7_1 = _mm_subs_epi16(y0r_seven_over_sqrt_21, y0i_one_over_sqrt_21);
-    simde__m128i y0_m_7_3 = _mm_subs_epi16(y0r_seven_over_sqrt_21, y0i_three_over_sqrt_21);
-    simde__m128i y0_m_7_5 = _mm_subs_epi16(y0r_seven_over_sqrt_21, y0i_five_over_sqrt_21);
-    simde__m128i y0_m_7_7 = _mm_subs_epi16(y0r_seven_over_sqrt_21, y0i_seven_over_sqrt_21);
+    xmm2 = simde_mm_shufflelo_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shufflehi_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shuffle_epi32(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shufflelo_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shufflehi_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shuffle_epi32(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    ch_mag_int  = simde_mm_unpacklo_epi64(xmm2,xmm3);
+
+    y0r_one_over_sqrt_21   = simde_mm_mulhi_epi16(y0r, ONE_OVER_SQRT_42);
+    y0r_three_over_sqrt_21 = simde_mm_mulhi_epi16(y0r, THREE_OVER_SQRT_42);
+    y0r_five_over_sqrt_21  = simde_mm_mulhi_epi16(y0r, FIVE_OVER_SQRT_42);
+    y0r_five_over_sqrt_21  = simde_mm_slli_epi16(y0r_five_over_sqrt_21, 1);
+    y0r_seven_over_sqrt_21 = simde_mm_mulhi_epi16(y0r, SEVEN_OVER_SQRT_42);
+    y0r_seven_over_sqrt_21 = simde_mm_slli_epi16(y0r_seven_over_sqrt_21, 2); // Q2.14
+
+    y0i_one_over_sqrt_21   = simde_mm_mulhi_epi16(y0i, ONE_OVER_SQRT_42);
+    y0i_three_over_sqrt_21 = simde_mm_mulhi_epi16(y0i, THREE_OVER_SQRT_42);
+    y0i_five_over_sqrt_21  = simde_mm_mulhi_epi16(y0i, FIVE_OVER_SQRT_42);
+    y0i_five_over_sqrt_21  = simde_mm_slli_epi16(y0i_five_over_sqrt_21, 1);
+    y0i_seven_over_sqrt_21 = simde_mm_mulhi_epi16(y0i, SEVEN_OVER_SQRT_42);
+    y0i_seven_over_sqrt_21 = simde_mm_slli_epi16(y0i_seven_over_sqrt_21, 2); // Q2.14
+
+    simde__m128i y0_p_7_1 = simde_mm_adds_epi16(y0r_seven_over_sqrt_21, y0i_one_over_sqrt_21);
+    simde__m128i y0_p_7_3 = simde_mm_adds_epi16(y0r_seven_over_sqrt_21, y0i_three_over_sqrt_21);
+    simde__m128i y0_p_7_5 = simde_mm_adds_epi16(y0r_seven_over_sqrt_21, y0i_five_over_sqrt_21);
+    simde__m128i y0_p_7_7 = simde_mm_adds_epi16(y0r_seven_over_sqrt_21, y0i_seven_over_sqrt_21);
+    simde__m128i y0_p_5_1 = simde_mm_adds_epi16(y0r_five_over_sqrt_21, y0i_one_over_sqrt_21);
+    simde__m128i y0_p_5_3 = simde_mm_adds_epi16(y0r_five_over_sqrt_21, y0i_three_over_sqrt_21);
+    simde__m128i y0_p_5_5 = simde_mm_adds_epi16(y0r_five_over_sqrt_21, y0i_five_over_sqrt_21);
+    simde__m128i y0_p_5_7 = simde_mm_adds_epi16(y0r_five_over_sqrt_21, y0i_seven_over_sqrt_21);
+    simde__m128i y0_p_3_1 = simde_mm_adds_epi16(y0r_three_over_sqrt_21, y0i_one_over_sqrt_21);
+    simde__m128i y0_p_3_3 = simde_mm_adds_epi16(y0r_three_over_sqrt_21, y0i_three_over_sqrt_21);
+    simde__m128i y0_p_3_5 = simde_mm_adds_epi16(y0r_three_over_sqrt_21, y0i_five_over_sqrt_21);
+    simde__m128i y0_p_3_7 = simde_mm_adds_epi16(y0r_three_over_sqrt_21, y0i_seven_over_sqrt_21);
+    simde__m128i y0_p_1_1 = simde_mm_adds_epi16(y0r_one_over_sqrt_21, y0i_one_over_sqrt_21);
+    simde__m128i y0_p_1_3 = simde_mm_adds_epi16(y0r_one_over_sqrt_21, y0i_three_over_sqrt_21);
+    simde__m128i y0_p_1_5 = simde_mm_adds_epi16(y0r_one_over_sqrt_21, y0i_five_over_sqrt_21);
+    simde__m128i y0_p_1_7 = simde_mm_adds_epi16(y0r_one_over_sqrt_21, y0i_seven_over_sqrt_21);
+
+    simde__m128i y0_m_1_1 = simde_mm_subs_epi16(y0r_one_over_sqrt_21, y0i_one_over_sqrt_21);
+    simde__m128i y0_m_1_3 = simde_mm_subs_epi16(y0r_one_over_sqrt_21, y0i_three_over_sqrt_21);
+    simde__m128i y0_m_1_5 = simde_mm_subs_epi16(y0r_one_over_sqrt_21, y0i_five_over_sqrt_21);
+    simde__m128i y0_m_1_7 = simde_mm_subs_epi16(y0r_one_over_sqrt_21, y0i_seven_over_sqrt_21);
+    simde__m128i y0_m_3_1 = simde_mm_subs_epi16(y0r_three_over_sqrt_21, y0i_one_over_sqrt_21);
+    simde__m128i y0_m_3_3 = simde_mm_subs_epi16(y0r_three_over_sqrt_21, y0i_three_over_sqrt_21);
+    simde__m128i y0_m_3_5 = simde_mm_subs_epi16(y0r_three_over_sqrt_21, y0i_five_over_sqrt_21);
+    simde__m128i y0_m_3_7 = simde_mm_subs_epi16(y0r_three_over_sqrt_21, y0i_seven_over_sqrt_21);
+    simde__m128i y0_m_5_1 = simde_mm_subs_epi16(y0r_five_over_sqrt_21, y0i_one_over_sqrt_21);
+    simde__m128i y0_m_5_3 = simde_mm_subs_epi16(y0r_five_over_sqrt_21, y0i_three_over_sqrt_21);
+    simde__m128i y0_m_5_5 = simde_mm_subs_epi16(y0r_five_over_sqrt_21, y0i_five_over_sqrt_21);
+    simde__m128i y0_m_5_7 = simde_mm_subs_epi16(y0r_five_over_sqrt_21, y0i_seven_over_sqrt_21);
+    simde__m128i y0_m_7_1 = simde_mm_subs_epi16(y0r_seven_over_sqrt_21, y0i_one_over_sqrt_21);
+    simde__m128i y0_m_7_3 = simde_mm_subs_epi16(y0r_seven_over_sqrt_21, y0i_three_over_sqrt_21);
+    simde__m128i y0_m_7_5 = simde_mm_subs_epi16(y0r_seven_over_sqrt_21, y0i_five_over_sqrt_21);
+    simde__m128i y0_m_7_7 = simde_mm_subs_epi16(y0r_seven_over_sqrt_21, y0i_seven_over_sqrt_21);
     simde__m128i tmp_result, tmp_result2;
     interference_abs_epi16(psi_r_p7_p7, ch_mag_int, a_r_p7_p7, ONE_OVER_SQRT_10_Q15, THREE_OVER_SQRT_10);
     interference_abs_epi16(psi_r_p7_p5, ch_mag_int, a_r_p7_p5, ONE_OVER_SQRT_10_Q15, THREE_OVER_SQRT_10);
@@ -5779,637 +5453,637 @@ void qam64_qam16(short *stream0_in,
 
     // Computing different multiples of ||h0||^2
     // x=1, y=1
-    ch_mag_2_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,ONE_OVER_FOUR_SQRT_42);
-    ch_mag_2_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_2_over_42_with_sigma2,1);
+    ch_mag_2_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,ONE_OVER_FOUR_SQRT_42);
+    ch_mag_2_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_2_over_42_with_sigma2,1);
     // x=1, y=3
-    ch_mag_10_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,FIVE_OVER_FOUR_SQRT_42);
-    ch_mag_10_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_10_over_42_with_sigma2,1);
+    ch_mag_10_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,FIVE_OVER_FOUR_SQRT_42);
+    ch_mag_10_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_10_over_42_with_sigma2,1);
     // x=1, x=5
-    ch_mag_26_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,THIRTEEN_OVER_FOUR_SQRT_42);
-    ch_mag_26_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_26_over_42_with_sigma2,1);
+    ch_mag_26_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,THIRTEEN_OVER_FOUR_SQRT_42);
+    ch_mag_26_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_26_over_42_with_sigma2,1);
     // x=1, y=7
-    ch_mag_50_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,TWENTYFIVE_OVER_FOUR_SQRT_42);
-    ch_mag_50_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_50_over_42_with_sigma2,1);
+    ch_mag_50_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,TWENTYFIVE_OVER_FOUR_SQRT_42);
+    ch_mag_50_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_50_over_42_with_sigma2,1);
     // x=3, y=3
-    ch_mag_18_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,NINE_OVER_FOUR_SQRT_42);
-    ch_mag_18_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_18_over_42_with_sigma2,1);
+    ch_mag_18_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,NINE_OVER_FOUR_SQRT_42);
+    ch_mag_18_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_18_over_42_with_sigma2,1);
     // x=3, y=5
-    ch_mag_34_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,SEVENTEEN_OVER_FOUR_SQRT_42);
-    ch_mag_34_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_34_over_42_with_sigma2,1);
+    ch_mag_34_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,SEVENTEEN_OVER_FOUR_SQRT_42);
+    ch_mag_34_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_34_over_42_with_sigma2,1);
     // x=3, y=7
-    ch_mag_58_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,TWENTYNINE_OVER_FOUR_SQRT_42);
-    ch_mag_58_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_58_over_42_with_sigma2,2);
+    ch_mag_58_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,TWENTYNINE_OVER_FOUR_SQRT_42);
+    ch_mag_58_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_58_over_42_with_sigma2,2);
     // x=5, y=5
-    ch_mag_50_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,TWENTYFIVE_OVER_FOUR_SQRT_42);
-    ch_mag_50_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_50_over_42_with_sigma2,1);
+    ch_mag_50_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,TWENTYFIVE_OVER_FOUR_SQRT_42);
+    ch_mag_50_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_50_over_42_with_sigma2,1);
     // x=5, y=7
-    ch_mag_74_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,THIRTYSEVEN_OVER_FOUR_SQRT_42);
-    ch_mag_74_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_74_over_42_with_sigma2,2);
+    ch_mag_74_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,THIRTYSEVEN_OVER_FOUR_SQRT_42);
+    ch_mag_74_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_74_over_42_with_sigma2,2);
     // x=7, y=7
-    ch_mag_98_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,FORTYNINE_OVER_FOUR_SQRT_42);
-    ch_mag_98_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_98_over_42_with_sigma2,2);
+    ch_mag_98_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,FORTYNINE_OVER_FOUR_SQRT_42);
+    ch_mag_98_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_98_over_42_with_sigma2,2);
 
     // Computing Metrics
-    xmm0 = _mm_subs_epi16(psi_a_p7_p7, a_sq_p7_p7);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_7_7);
-    simde__m128i bit_met_p7_p7 = _mm_subs_epi16(xmm1, ch_mag_98_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p7_p5, a_sq_p7_p5);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_7_5);
-    simde__m128i bit_met_p7_p5 = _mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p7_p3, a_sq_p7_p3);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_7_3);
-    simde__m128i bit_met_p7_p3 = _mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p7_p1, a_sq_p7_p1);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_7_1);
-    simde__m128i bit_met_p7_p1 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p7_m1, a_sq_p7_m1);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_7_1);
-    simde__m128i bit_met_p7_m1 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p7_m3, a_sq_p7_m3);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_7_3);
-    simde__m128i bit_met_p7_m3 = _mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p7_m5, a_sq_p7_m5);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_7_5);
-    simde__m128i bit_met_p7_m5 = _mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p7_m7, a_sq_p7_m7);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_7_7);
-    simde__m128i bit_met_p7_m7 = _mm_subs_epi16(xmm1, ch_mag_98_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p5_p7, a_sq_p5_p7);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_5_7);
-    simde__m128i bit_met_p5_p7 = _mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p5_p5, a_sq_p5_p5);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_5_5);
-    simde__m128i bit_met_p5_p5 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p5_p3, a_sq_p5_p3);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_5_3);
-    simde__m128i bit_met_p5_p3 = _mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p5_p1, a_sq_p5_p1);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_5_1);
-    simde__m128i bit_met_p5_p1 = _mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p5_m1, a_sq_p5_m1);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_5_1);
-    simde__m128i bit_met_p5_m1 = _mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p5_m3, a_sq_p5_m3);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_5_3);
-    simde__m128i bit_met_p5_m3 = _mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p5_m5, a_sq_p5_m5);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_5_5);
-    simde__m128i bit_met_p5_m5 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p5_m7, a_sq_p5_m7);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_5_7);
-    simde__m128i bit_met_p5_m7 = _mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p3_p7, a_sq_p3_p7);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_3_7);
-    simde__m128i bit_met_p3_p7 = _mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p3_p5, a_sq_p3_p5);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_3_5);
-    simde__m128i bit_met_p3_p5 = _mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p3_p3, a_sq_p3_p3);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_3_3);
-    simde__m128i bit_met_p3_p3 = _mm_subs_epi16(xmm1, ch_mag_18_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p3_p1, a_sq_p3_p1);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_3_1);
-    simde__m128i bit_met_p3_p1 = _mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p3_m1, a_sq_p3_m1);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_3_1);
-    simde__m128i bit_met_p3_m1 = _mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p3_m3, a_sq_p3_m3);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_3_3);
-    simde__m128i bit_met_p3_m3 = _mm_subs_epi16(xmm1, ch_mag_18_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p3_m5, a_sq_p3_m5);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_3_5);
-    simde__m128i bit_met_p3_m5 = _mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p3_m7, a_sq_p3_m7);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_3_7);
-    simde__m128i bit_met_p3_m7 = _mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p1_p7, a_sq_p1_p7);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_1_7);
-    simde__m128i bit_met_p1_p7 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p1_p5, a_sq_p1_p5);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_1_5);
-    simde__m128i bit_met_p1_p5 = _mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p1_p3, a_sq_p1_p3);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_1_3);
-    simde__m128i bit_met_p1_p3 = _mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p1_p1, a_sq_p1_p1);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_1_1);
-    simde__m128i bit_met_p1_p1 = _mm_subs_epi16(xmm1, ch_mag_2_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p1_m1, a_sq_p1_m1);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_1_1);
-    simde__m128i bit_met_p1_m1 = _mm_subs_epi16(xmm1, ch_mag_2_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p1_m3, a_sq_p1_m3);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_1_3);
-    simde__m128i bit_met_p1_m3 = _mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p1_m5, a_sq_p1_m5);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_1_5);
-    simde__m128i bit_met_p1_m5 = _mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p1_m7, a_sq_p1_m7);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_1_7);
-    simde__m128i bit_met_p1_m7 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-
-    xmm0 = _mm_subs_epi16(psi_a_m1_p7, a_sq_m1_p7);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_1_7);
-    simde__m128i bit_met_m1_p7 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m1_p5, a_sq_m1_p5);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_1_5);
-    simde__m128i bit_met_m1_p5 = _mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m1_p3, a_sq_m1_p3);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_1_3);
-    simde__m128i bit_met_m1_p3 = _mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m1_p1, a_sq_m1_p1);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_1_1);
-    simde__m128i bit_met_m1_p1 = _mm_subs_epi16(xmm1, ch_mag_2_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m1_m1, a_sq_m1_m1);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_1_1);
-    simde__m128i bit_met_m1_m1 = _mm_subs_epi16(xmm1, ch_mag_2_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m1_m3, a_sq_m1_m3);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_1_3);
-    simde__m128i bit_met_m1_m3 = _mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m1_m5, a_sq_m1_m5);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_1_5);
-    simde__m128i bit_met_m1_m5 = _mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m1_m7, a_sq_m1_m7);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_1_7);
-    simde__m128i bit_met_m1_m7 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m3_p7, a_sq_m3_p7);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_3_7);
-    simde__m128i bit_met_m3_p7 = _mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m3_p5, a_sq_m3_p5);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_3_5);
-    simde__m128i bit_met_m3_p5 = _mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m3_p3, a_sq_m3_p3);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_3_3);
-    simde__m128i bit_met_m3_p3 = _mm_subs_epi16(xmm1, ch_mag_18_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m3_p1, a_sq_m3_p1);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_3_1);
-    simde__m128i bit_met_m3_p1 = _mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m3_m1, a_sq_m3_m1);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_3_1);
-    simde__m128i bit_met_m3_m1 = _mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m3_m3, a_sq_m3_m3);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_3_3);
-    simde__m128i bit_met_m3_m3 = _mm_subs_epi16(xmm1, ch_mag_18_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m3_m5, a_sq_m3_m5);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_3_5);
-    simde__m128i bit_met_m3_m5 = _mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m3_m7, a_sq_m3_m7);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_3_7);
-    simde__m128i bit_met_m3_m7 = _mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m5_p7, a_sq_m5_p7);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_5_7);
-    simde__m128i bit_met_m5_p7 = _mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m5_p5, a_sq_m5_p5);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_5_5);
-    simde__m128i bit_met_m5_p5 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m5_p3, a_sq_m5_p3);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_5_3);
-    simde__m128i bit_met_m5_p3 = _mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m5_p1, a_sq_m5_p1);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_5_1);
-    simde__m128i bit_met_m5_p1 = _mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m5_m1, a_sq_m5_m1);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_5_1);
-    simde__m128i bit_met_m5_m1 = _mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m5_m3, a_sq_m5_m3);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_5_3);
-    simde__m128i bit_met_m5_m3 = _mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m5_m5, a_sq_m5_m5);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_5_5);
-    simde__m128i bit_met_m5_m5 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m5_m7, a_sq_m5_m7);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_5_7);
-    simde__m128i bit_met_m5_m7 = _mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m7_p7, a_sq_m7_p7);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_7_7);
-    simde__m128i bit_met_m7_p7 = _mm_subs_epi16(xmm1, ch_mag_98_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m7_p5, a_sq_m7_p5);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_7_5);
-    simde__m128i bit_met_m7_p5 = _mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m7_p3, a_sq_m7_p3);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_7_3);
-    simde__m128i bit_met_m7_p3 = _mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m7_p1, a_sq_m7_p1);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_7_1);
-    simde__m128i bit_met_m7_p1 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m7_m1, a_sq_m7_m1);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_7_1);
-    simde__m128i bit_met_m7_m1 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m7_m3, a_sq_m7_m3);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_7_3);
-    simde__m128i bit_met_m7_m3 = _mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m7_m5, a_sq_m7_m5);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_7_5);
-    simde__m128i bit_met_m7_m5 = _mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m7_m7, a_sq_m7_m7);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_7_7);
-    simde__m128i bit_met_m7_m7 = _mm_subs_epi16(xmm1, ch_mag_98_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p7_p7, a_sq_p7_p7);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_7_7);
+    simde__m128i bit_met_p7_p7 = simde_mm_subs_epi16(xmm1, ch_mag_98_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p7_p5, a_sq_p7_p5);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_7_5);
+    simde__m128i bit_met_p7_p5 = simde_mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p7_p3, a_sq_p7_p3);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_7_3);
+    simde__m128i bit_met_p7_p3 = simde_mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p7_p1, a_sq_p7_p1);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_7_1);
+    simde__m128i bit_met_p7_p1 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p7_m1, a_sq_p7_m1);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_7_1);
+    simde__m128i bit_met_p7_m1 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p7_m3, a_sq_p7_m3);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_7_3);
+    simde__m128i bit_met_p7_m3 = simde_mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p7_m5, a_sq_p7_m5);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_7_5);
+    simde__m128i bit_met_p7_m5 = simde_mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p7_m7, a_sq_p7_m7);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_7_7);
+    simde__m128i bit_met_p7_m7 = simde_mm_subs_epi16(xmm1, ch_mag_98_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p5_p7, a_sq_p5_p7);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_5_7);
+    simde__m128i bit_met_p5_p7 = simde_mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p5_p5, a_sq_p5_p5);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_5_5);
+    simde__m128i bit_met_p5_p5 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p5_p3, a_sq_p5_p3);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_5_3);
+    simde__m128i bit_met_p5_p3 = simde_mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p5_p1, a_sq_p5_p1);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_5_1);
+    simde__m128i bit_met_p5_p1 = simde_mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p5_m1, a_sq_p5_m1);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_5_1);
+    simde__m128i bit_met_p5_m1 = simde_mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p5_m3, a_sq_p5_m3);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_5_3);
+    simde__m128i bit_met_p5_m3 = simde_mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p5_m5, a_sq_p5_m5);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_5_5);
+    simde__m128i bit_met_p5_m5 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p5_m7, a_sq_p5_m7);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_5_7);
+    simde__m128i bit_met_p5_m7 = simde_mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p3_p7, a_sq_p3_p7);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_3_7);
+    simde__m128i bit_met_p3_p7 = simde_mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p3_p5, a_sq_p3_p5);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_3_5);
+    simde__m128i bit_met_p3_p5 = simde_mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p3_p3, a_sq_p3_p3);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_3_3);
+    simde__m128i bit_met_p3_p3 = simde_mm_subs_epi16(xmm1, ch_mag_18_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p3_p1, a_sq_p3_p1);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_3_1);
+    simde__m128i bit_met_p3_p1 = simde_mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p3_m1, a_sq_p3_m1);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_3_1);
+    simde__m128i bit_met_p3_m1 = simde_mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p3_m3, a_sq_p3_m3);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_3_3);
+    simde__m128i bit_met_p3_m3 = simde_mm_subs_epi16(xmm1, ch_mag_18_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p3_m5, a_sq_p3_m5);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_3_5);
+    simde__m128i bit_met_p3_m5 = simde_mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p3_m7, a_sq_p3_m7);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_3_7);
+    simde__m128i bit_met_p3_m7 = simde_mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p1_p7, a_sq_p1_p7);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_1_7);
+    simde__m128i bit_met_p1_p7 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p1_p5, a_sq_p1_p5);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_1_5);
+    simde__m128i bit_met_p1_p5 = simde_mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p1_p3, a_sq_p1_p3);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_1_3);
+    simde__m128i bit_met_p1_p3 = simde_mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p1_p1, a_sq_p1_p1);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_1_1);
+    simde__m128i bit_met_p1_p1 = simde_mm_subs_epi16(xmm1, ch_mag_2_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p1_m1, a_sq_p1_m1);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_1_1);
+    simde__m128i bit_met_p1_m1 = simde_mm_subs_epi16(xmm1, ch_mag_2_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p1_m3, a_sq_p1_m3);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_1_3);
+    simde__m128i bit_met_p1_m3 = simde_mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p1_m5, a_sq_p1_m5);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_1_5);
+    simde__m128i bit_met_p1_m5 = simde_mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p1_m7, a_sq_p1_m7);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_1_7);
+    simde__m128i bit_met_p1_m7 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+
+    xmm0 = simde_mm_subs_epi16(psi_a_m1_p7, a_sq_m1_p7);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_1_7);
+    simde__m128i bit_met_m1_p7 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m1_p5, a_sq_m1_p5);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_1_5);
+    simde__m128i bit_met_m1_p5 = simde_mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m1_p3, a_sq_m1_p3);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_1_3);
+    simde__m128i bit_met_m1_p3 = simde_mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m1_p1, a_sq_m1_p1);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_1_1);
+    simde__m128i bit_met_m1_p1 = simde_mm_subs_epi16(xmm1, ch_mag_2_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m1_m1, a_sq_m1_m1);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_1_1);
+    simde__m128i bit_met_m1_m1 = simde_mm_subs_epi16(xmm1, ch_mag_2_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m1_m3, a_sq_m1_m3);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_1_3);
+    simde__m128i bit_met_m1_m3 = simde_mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m1_m5, a_sq_m1_m5);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_1_5);
+    simde__m128i bit_met_m1_m5 = simde_mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m1_m7, a_sq_m1_m7);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_1_7);
+    simde__m128i bit_met_m1_m7 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m3_p7, a_sq_m3_p7);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_3_7);
+    simde__m128i bit_met_m3_p7 = simde_mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m3_p5, a_sq_m3_p5);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_3_5);
+    simde__m128i bit_met_m3_p5 = simde_mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m3_p3, a_sq_m3_p3);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_3_3);
+    simde__m128i bit_met_m3_p3 = simde_mm_subs_epi16(xmm1, ch_mag_18_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m3_p1, a_sq_m3_p1);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_3_1);
+    simde__m128i bit_met_m3_p1 = simde_mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m3_m1, a_sq_m3_m1);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_3_1);
+    simde__m128i bit_met_m3_m1 = simde_mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m3_m3, a_sq_m3_m3);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_3_3);
+    simde__m128i bit_met_m3_m3 = simde_mm_subs_epi16(xmm1, ch_mag_18_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m3_m5, a_sq_m3_m5);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_3_5);
+    simde__m128i bit_met_m3_m5 = simde_mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m3_m7, a_sq_m3_m7);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_3_7);
+    simde__m128i bit_met_m3_m7 = simde_mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m5_p7, a_sq_m5_p7);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_5_7);
+    simde__m128i bit_met_m5_p7 = simde_mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m5_p5, a_sq_m5_p5);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_5_5);
+    simde__m128i bit_met_m5_p5 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m5_p3, a_sq_m5_p3);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_5_3);
+    simde__m128i bit_met_m5_p3 = simde_mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m5_p1, a_sq_m5_p1);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_5_1);
+    simde__m128i bit_met_m5_p1 = simde_mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m5_m1, a_sq_m5_m1);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_5_1);
+    simde__m128i bit_met_m5_m1 = simde_mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m5_m3, a_sq_m5_m3);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_5_3);
+    simde__m128i bit_met_m5_m3 = simde_mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m5_m5, a_sq_m5_m5);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_5_5);
+    simde__m128i bit_met_m5_m5 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m5_m7, a_sq_m5_m7);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_5_7);
+    simde__m128i bit_met_m5_m7 = simde_mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m7_p7, a_sq_m7_p7);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_7_7);
+    simde__m128i bit_met_m7_p7 = simde_mm_subs_epi16(xmm1, ch_mag_98_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m7_p5, a_sq_m7_p5);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_7_5);
+    simde__m128i bit_met_m7_p5 = simde_mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m7_p3, a_sq_m7_p3);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_7_3);
+    simde__m128i bit_met_m7_p3 = simde_mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m7_p1, a_sq_m7_p1);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_7_1);
+    simde__m128i bit_met_m7_p1 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m7_m1, a_sq_m7_m1);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_7_1);
+    simde__m128i bit_met_m7_m1 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m7_m3, a_sq_m7_m3);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_7_3);
+    simde__m128i bit_met_m7_m3 = simde_mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m7_m5, a_sq_m7_m5);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_7_5);
+    simde__m128i bit_met_m7_m5 = simde_mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m7_m7, a_sq_m7_m7);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_7_7);
+    simde__m128i bit_met_m7_m7 = simde_mm_subs_epi16(xmm1, ch_mag_98_over_42_with_sigma2);
 
     // Detection for 1st bit (LTE mapping)
     // bit = 1
-    xmm0 = _mm_max_epi16(bit_met_m7_p7, bit_met_m7_p5);
-    xmm1 = _mm_max_epi16(bit_met_m7_p3, bit_met_m7_p1);
-    xmm2 = _mm_max_epi16(bit_met_m7_m1, bit_met_m7_m3);
-    xmm3 = _mm_max_epi16(bit_met_m7_m5, bit_met_m7_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    simde__m128i logmax_den_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_m5_p7, bit_met_m5_p5);
-    xmm1 = _mm_max_epi16(bit_met_m5_p3, bit_met_m5_p1);
-    xmm2 = _mm_max_epi16(bit_met_m5_m1, bit_met_m5_m3);
-    xmm3 = _mm_max_epi16(bit_met_m5_m5, bit_met_m5_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_m3_p7, bit_met_m3_p5);
-    xmm1 = _mm_max_epi16(bit_met_m3_p3, bit_met_m3_p1);
-    xmm2 = _mm_max_epi16(bit_met_m3_m1, bit_met_m3_m3);
-    xmm3 = _mm_max_epi16(bit_met_m3_m5, bit_met_m3_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_m1_p7, bit_met_m1_p5);
-    xmm1 = _mm_max_epi16(bit_met_m1_p3, bit_met_m1_p1);
-    xmm2 = _mm_max_epi16(bit_met_m1_m1, bit_met_m1_m3);
-    xmm3 = _mm_max_epi16(bit_met_m1_m5, bit_met_m1_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m7_p7, bit_met_m7_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_m7_p3, bit_met_m7_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_m7_m1, bit_met_m7_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_m7_m5, bit_met_m7_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_den_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m5_p7, bit_met_m5_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_m5_p3, bit_met_m5_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_m5_m1, bit_met_m5_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m5, bit_met_m5_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m3_p7, bit_met_m3_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_m3_p3, bit_met_m3_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_m3_m1, bit_met_m3_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_m3_m5, bit_met_m3_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m1_p7, bit_met_m1_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_m1_p3, bit_met_m1_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m1, bit_met_m1_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_m1_m5, bit_met_m1_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
 
     // bit = 0
-    xmm0 = _mm_max_epi16(bit_met_p7_p7, bit_met_p7_p5);
-    xmm1 = _mm_max_epi16(bit_met_p7_p3, bit_met_p7_p1);
-    xmm2 = _mm_max_epi16(bit_met_p7_m1, bit_met_p7_m3);
-    xmm3 = _mm_max_epi16(bit_met_p7_m5, bit_met_p7_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    simde__m128i logmax_num_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p5_p7, bit_met_p5_p5);
-    xmm1 = _mm_max_epi16(bit_met_p5_p3, bit_met_p5_p1);
-    xmm2 = _mm_max_epi16(bit_met_p5_m1, bit_met_p5_m3);
-    xmm3 = _mm_max_epi16(bit_met_p5_m5, bit_met_p5_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p3_p7, bit_met_p3_p5);
-    xmm1 = _mm_max_epi16(bit_met_p3_p3, bit_met_p3_p1);
-    xmm2 = _mm_max_epi16(bit_met_p3_m1, bit_met_p3_m3);
-    xmm3 = _mm_max_epi16(bit_met_p3_m5, bit_met_p3_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p1_p7, bit_met_p1_p5);
-    xmm1 = _mm_max_epi16(bit_met_p1_p3, bit_met_p1_p1);
-    xmm2 = _mm_max_epi16(bit_met_p1_m1, bit_met_p1_m3);
-    xmm3 = _mm_max_epi16(bit_met_p1_m5, bit_met_p1_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-
-    y0r = _mm_subs_epi16(logmax_num_re0, logmax_den_re0);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p7, bit_met_p7_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_p7_p3, bit_met_p7_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_p7_m1, bit_met_p7_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_p7_m5, bit_met_p7_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_num_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p5_p7, bit_met_p5_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_p5_p3, bit_met_p5_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_p5_m1, bit_met_p5_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_p5_m5, bit_met_p5_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p3_p7, bit_met_p3_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p3, bit_met_p3_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_p3_m1, bit_met_p3_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_p3_m5, bit_met_p3_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p1_p7, bit_met_p1_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_p1_p3, bit_met_p1_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_p1_m1, bit_met_p1_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_p1_m5, bit_met_p1_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+
+    y0r = simde_mm_subs_epi16(logmax_num_re0, logmax_den_re0);
 
     // Detection for 2nd bit (LTE mapping)
     // bit = 1
-    xmm0 = _mm_max_epi16(bit_met_p7_m1, bit_met_p5_m1);
-    xmm1 = _mm_max_epi16(bit_met_p3_m1, bit_met_p1_m1);
-    xmm2 = _mm_max_epi16(bit_met_m1_m1, bit_met_m3_m1);
-    xmm3 = _mm_max_epi16(bit_met_m5_m1, bit_met_m7_m1);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m3, bit_met_p5_m3);
-    xmm1 = _mm_max_epi16(bit_met_p3_m3, bit_met_p1_m3);
-    xmm2 = _mm_max_epi16(bit_met_m1_m3, bit_met_m3_m3);
-    xmm3 = _mm_max_epi16(bit_met_m5_m3, bit_met_m7_m3);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m5, bit_met_p5_m5);
-    xmm1 = _mm_max_epi16(bit_met_p3_m5, bit_met_p1_m5);
-    xmm2 = _mm_max_epi16(bit_met_m1_m5, bit_met_m3_m5);
-    xmm3 = _mm_max_epi16(bit_met_m5_m5, bit_met_m7_m5);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m7, bit_met_p5_m7);
-    xmm1 = _mm_max_epi16(bit_met_p3_m7, bit_met_p1_m7);
-    xmm2 = _mm_max_epi16(bit_met_m1_m7, bit_met_m3_m7);
-    xmm3 = _mm_max_epi16(bit_met_m5_m7, bit_met_m7_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m1, bit_met_p5_m1);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m1, bit_met_p1_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m1, bit_met_m3_m1);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m1, bit_met_m7_m1);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m3, bit_met_p5_m3);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m3, bit_met_p1_m3);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m3, bit_met_m3_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m3, bit_met_m7_m3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m5, bit_met_p5_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m5, bit_met_p1_m5);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m5, bit_met_m3_m5);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m5, bit_met_m7_m5);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m7, bit_met_p5_m7);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m7, bit_met_p1_m7);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m7, bit_met_m3_m7);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m7, bit_met_m7_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
 
     // bit = 0
-    xmm0 = _mm_max_epi16(bit_met_p7_p1, bit_met_p5_p1);
-    xmm1 = _mm_max_epi16(bit_met_p3_p1, bit_met_p1_p1);
-    xmm2 = _mm_max_epi16(bit_met_m1_p1, bit_met_m3_p1);
-    xmm3 = _mm_max_epi16(bit_met_m5_p1, bit_met_m7_p1);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_p3, bit_met_p5_p3);
-    xmm1 = _mm_max_epi16(bit_met_p3_p3, bit_met_p1_p3);
-    xmm2 = _mm_max_epi16(bit_met_m1_p3, bit_met_m3_p3);
-    xmm3 = _mm_max_epi16(bit_met_m5_p3, bit_met_m7_p3);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_p5, bit_met_p5_p5);
-    xmm1 = _mm_max_epi16(bit_met_p3_p5, bit_met_p1_p5);
-    xmm2 = _mm_max_epi16(bit_met_m1_p5, bit_met_m3_p5);
-    xmm3 = _mm_max_epi16(bit_met_m5_p5, bit_met_m7_p5);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_p7, bit_met_p5_p7);
-    xmm1 = _mm_max_epi16(bit_met_p3_p7, bit_met_p1_p7);
-    xmm2 = _mm_max_epi16(bit_met_m1_p7, bit_met_m3_p7);
-    xmm3 = _mm_max_epi16(bit_met_m5_p7, bit_met_m7_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-
-    y1r = _mm_subs_epi16(logmax_num_re0, logmax_den_re0);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p1, bit_met_p5_p1);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p1, bit_met_p1_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p1, bit_met_m3_p1);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p1, bit_met_m7_p1);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p3, bit_met_p5_p3);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p3, bit_met_p1_p3);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p3, bit_met_m3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p3, bit_met_m7_p3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p5, bit_met_p5_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p5, bit_met_p1_p5);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p5, bit_met_m3_p5);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p5, bit_met_m7_p5);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p7, bit_met_p5_p7);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p7, bit_met_p1_p7);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p7, bit_met_m3_p7);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p7, bit_met_m7_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+
+    y1r = simde_mm_subs_epi16(logmax_num_re0, logmax_den_re0);
 
     // Detection for 3rd bit (LTE mapping)
-    xmm0 = _mm_max_epi16(bit_met_m7_m7, bit_met_m7_m5);
-    xmm1 = _mm_max_epi16(bit_met_m7_m3, bit_met_m7_m1);
-    xmm2 = _mm_max_epi16(bit_met_m7_p1, bit_met_m7_p3);
-    xmm3 = _mm_max_epi16(bit_met_m7_p5, bit_met_m7_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_m5_m7, bit_met_m5_m5);
-    xmm1 = _mm_max_epi16(bit_met_m5_m3, bit_met_m5_m1);
-    xmm2 = _mm_max_epi16(bit_met_m5_p1, bit_met_m5_p3);
-    xmm3 = _mm_max_epi16(bit_met_m5_p5, bit_met_m5_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p5_m7, bit_met_p5_m5);
-    xmm1 = _mm_max_epi16(bit_met_p5_m3, bit_met_p5_m1);
-    xmm2 = _mm_max_epi16(bit_met_p5_p1, bit_met_p5_p3);
-    xmm3 = _mm_max_epi16(bit_met_p5_p5, bit_met_p5_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m7, bit_met_p7_m5);
-    xmm1 = _mm_max_epi16(bit_met_p7_m3, bit_met_p7_m1);
-    xmm2 = _mm_max_epi16(bit_met_p7_p1, bit_met_p7_p3);
-    xmm3 = _mm_max_epi16(bit_met_p7_p5, bit_met_p7_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-
-    xmm0 = _mm_max_epi16(bit_met_m3_m7, bit_met_m3_m5);
-    xmm1 = _mm_max_epi16(bit_met_m3_m3, bit_met_m3_m1);
-    xmm2 = _mm_max_epi16(bit_met_m3_p1, bit_met_m3_p3);
-    xmm3 = _mm_max_epi16(bit_met_m3_p5, bit_met_m3_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_m1_m7, bit_met_m1_m5);
-    xmm1 = _mm_max_epi16(bit_met_m1_m3, bit_met_m1_m1);
-    xmm2 = _mm_max_epi16(bit_met_m1_p1, bit_met_m1_p3);
-    xmm3 = _mm_max_epi16(bit_met_m1_p5, bit_met_m1_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p1_m7, bit_met_p1_m5);
-    xmm1 = _mm_max_epi16(bit_met_p1_m3, bit_met_p1_m1);
-    xmm2 = _mm_max_epi16(bit_met_p1_p1, bit_met_p1_p3);
-    xmm3 = _mm_max_epi16(bit_met_p1_p5, bit_met_p1_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p3_m7, bit_met_p3_m5);
-    xmm1 = _mm_max_epi16(bit_met_p3_m3, bit_met_p3_m1);
-    xmm2 = _mm_max_epi16(bit_met_p3_p1, bit_met_p3_p3);
-    xmm3 = _mm_max_epi16(bit_met_p3_p5, bit_met_p3_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-
-    simde__m128i y2r = _mm_subs_epi16(logmax_num_re0, logmax_den_re0);
+    xmm0 = simde_mm_max_epi16(bit_met_m7_m7, bit_met_m7_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_m7_m3, bit_met_m7_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m7_p1, bit_met_m7_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m7_p5, bit_met_m7_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m5_m7, bit_met_m5_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_m5_m3, bit_met_m5_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m5_p1, bit_met_m5_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p5, bit_met_m5_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p5_m7, bit_met_p5_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p5_m3, bit_met_p5_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p5_p1, bit_met_p5_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p5_p5, bit_met_p5_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m7, bit_met_p7_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p7_m3, bit_met_p7_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p7_p1, bit_met_p7_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p7_p5, bit_met_p7_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+
+    xmm0 = simde_mm_max_epi16(bit_met_m3_m7, bit_met_m3_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_m3_m3, bit_met_m3_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m3_p1, bit_met_m3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m3_p5, bit_met_m3_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m1_m7, bit_met_m1_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_m1_m3, bit_met_m1_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p1, bit_met_m1_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m1_p5, bit_met_m1_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p1_m7, bit_met_p1_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p1_m3, bit_met_p1_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p1_p1, bit_met_p1_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p1_p5, bit_met_p1_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p3_m7, bit_met_p3_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m3, bit_met_p3_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p3_p1, bit_met_p3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p3_p5, bit_met_p3_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+
+    simde__m128i y2r = simde_mm_subs_epi16(logmax_num_re0, logmax_den_re0);
 
     // Detection for 4th bit (LTE mapping)
-    xmm0 = _mm_max_epi16(bit_met_p7_p7, bit_met_p5_p7);
-    xmm1 = _mm_max_epi16(bit_met_p3_p7, bit_met_p1_p7);
-    xmm2 = _mm_max_epi16(bit_met_m1_p7, bit_met_m3_p7);
-    xmm3 = _mm_max_epi16(bit_met_m5_p7, bit_met_m7_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_p5, bit_met_p5_p5);
-    xmm1 = _mm_max_epi16(bit_met_p3_p5, bit_met_p1_p5);
-    xmm2 = _mm_max_epi16(bit_met_m1_p5, bit_met_m3_p5);
-    xmm3 = _mm_max_epi16(bit_met_m5_p5, bit_met_m5_p5);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m5, bit_met_p5_m5);
-    xmm1 = _mm_max_epi16(bit_met_p3_m5, bit_met_p1_m5);
-    xmm2 = _mm_max_epi16(bit_met_m1_m5, bit_met_m3_m5);
-    xmm3 = _mm_max_epi16(bit_met_m5_m5, bit_met_m7_m5);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m7, bit_met_p5_m7);
-    xmm1 = _mm_max_epi16(bit_met_p3_m7, bit_met_p1_m7);
-    xmm2 = _mm_max_epi16(bit_met_m1_m7, bit_met_m3_m7);
-    xmm3 = _mm_max_epi16(bit_met_m5_m7, bit_met_m7_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-
-    xmm0 = _mm_max_epi16(bit_met_p7_m1, bit_met_p5_m1);
-    xmm1 = _mm_max_epi16(bit_met_p3_m1, bit_met_p1_m1);
-    xmm2 = _mm_max_epi16(bit_met_m1_m1, bit_met_m3_m1);
-    xmm3 = _mm_max_epi16(bit_met_m5_m1, bit_met_m7_m1);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m3, bit_met_p5_m3);
-    xmm1 = _mm_max_epi16(bit_met_p3_m3, bit_met_p1_m3);
-    xmm2 = _mm_max_epi16(bit_met_m1_m3, bit_met_m3_m3);
-    xmm3 = _mm_max_epi16(bit_met_m5_m3, bit_met_m7_m3);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_p1, bit_met_p5_p1);
-    xmm1 = _mm_max_epi16(bit_met_p3_p1, bit_met_p1_p1);
-    xmm2 = _mm_max_epi16(bit_met_m1_p1, bit_met_m3_p1);
-    xmm3 = _mm_max_epi16(bit_met_m5_p1, bit_met_m7_p1);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_p3, bit_met_p5_p3);
-    xmm1 = _mm_max_epi16(bit_met_p3_p3, bit_met_p1_p3);
-    xmm2 = _mm_max_epi16(bit_met_m1_p3, bit_met_m3_p3);
-    xmm3 = _mm_max_epi16(bit_met_m5_p3, bit_met_m7_p3);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-
-    y0i = _mm_subs_epi16(logmax_num_re0, logmax_den_re0);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p7, bit_met_p5_p7);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p7, bit_met_p1_p7);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p7, bit_met_m3_p7);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p7, bit_met_m7_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p5, bit_met_p5_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p5, bit_met_p1_p5);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p5, bit_met_m3_p5);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p5, bit_met_m5_p5);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m5, bit_met_p5_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m5, bit_met_p1_m5);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m5, bit_met_m3_m5);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m5, bit_met_m7_m5);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m7, bit_met_p5_m7);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m7, bit_met_p1_m7);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m7, bit_met_m3_m7);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m7, bit_met_m7_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m1, bit_met_p5_m1);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m1, bit_met_p1_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m1, bit_met_m3_m1);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m1, bit_met_m7_m1);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m3, bit_met_p5_m3);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m3, bit_met_p1_m3);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m3, bit_met_m3_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m3, bit_met_m7_m3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p1, bit_met_p5_p1);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p1, bit_met_p1_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p1, bit_met_m3_p1);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p1, bit_met_m7_p1);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p3, bit_met_p5_p3);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p3, bit_met_p1_p3);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p3, bit_met_m3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p3, bit_met_m7_p3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+
+    y0i = simde_mm_subs_epi16(logmax_num_re0, logmax_den_re0);
 
 
     // Detection for 5th bit (LTE mapping)
-    xmm0 = _mm_max_epi16(bit_met_m7_m7, bit_met_m7_m5);
-    xmm1 = _mm_max_epi16(bit_met_m7_m3, bit_met_m7_m1);
-    xmm2 = _mm_max_epi16(bit_met_m7_p1, bit_met_m7_p3);
-    xmm3 = _mm_max_epi16(bit_met_m7_p5, bit_met_m7_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_m1_m7, bit_met_m1_m5);
-    xmm1 = _mm_max_epi16(bit_met_m1_m3, bit_met_m1_m1);
-    xmm2 = _mm_max_epi16(bit_met_m1_p1, bit_met_m1_p3);
-    xmm3 = _mm_max_epi16(bit_met_m1_p5, bit_met_m1_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p1_m7, bit_met_p1_m5);
-    xmm1 = _mm_max_epi16(bit_met_p1_m3, bit_met_p1_m1);
-    xmm2 = _mm_max_epi16(bit_met_p1_p1, bit_met_p1_p3);
-    xmm3 = _mm_max_epi16(bit_met_p1_p5, bit_met_p1_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m7, bit_met_p7_m5);
-    xmm1 = _mm_max_epi16(bit_met_p7_m3, bit_met_p7_m1);
-    xmm2 = _mm_max_epi16(bit_met_p7_p1, bit_met_p7_p3);
-    xmm3 = _mm_max_epi16(bit_met_p7_p5, bit_met_p7_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-
-    xmm0 = _mm_max_epi16(bit_met_m5_m7, bit_met_m5_m5);
-    xmm1 = _mm_max_epi16(bit_met_m5_m3, bit_met_m5_m1);
-    xmm2 = _mm_max_epi16(bit_met_m5_p1, bit_met_m5_p3);
-    xmm3 = _mm_max_epi16(bit_met_m5_p5, bit_met_m5_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_m3_m7, bit_met_m3_m5);
-    xmm1 = _mm_max_epi16(bit_met_m3_m3, bit_met_m3_m1);
-    xmm2 = _mm_max_epi16(bit_met_m3_p1, bit_met_m3_p3);
-    xmm3 = _mm_max_epi16(bit_met_m3_p5, bit_met_m3_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p3_m7, bit_met_p3_m5);
-    xmm1 = _mm_max_epi16(bit_met_p3_m3, bit_met_p3_m1);
-    xmm2 = _mm_max_epi16(bit_met_p3_p1, bit_met_p3_p3);
-    xmm3 = _mm_max_epi16(bit_met_p3_p5, bit_met_p3_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p5_m7, bit_met_p5_m5);
-    xmm1 = _mm_max_epi16(bit_met_p5_m3, bit_met_p5_m1);
-    xmm2 = _mm_max_epi16(bit_met_p5_p1, bit_met_p5_p3);
-    xmm3 = _mm_max_epi16(bit_met_p5_p5, bit_met_p5_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-
-    y1i = _mm_subs_epi16(logmax_num_re0, logmax_den_re0);
+    xmm0 = simde_mm_max_epi16(bit_met_m7_m7, bit_met_m7_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_m7_m3, bit_met_m7_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m7_p1, bit_met_m7_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m7_p5, bit_met_m7_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m1_m7, bit_met_m1_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_m1_m3, bit_met_m1_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p1, bit_met_m1_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m1_p5, bit_met_m1_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p1_m7, bit_met_p1_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p1_m3, bit_met_p1_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p1_p1, bit_met_p1_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p1_p5, bit_met_p1_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m7, bit_met_p7_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p7_m3, bit_met_p7_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p7_p1, bit_met_p7_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p7_p5, bit_met_p7_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+
+    xmm0 = simde_mm_max_epi16(bit_met_m5_m7, bit_met_m5_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_m5_m3, bit_met_m5_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m5_p1, bit_met_m5_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p5, bit_met_m5_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m3_m7, bit_met_m3_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_m3_m3, bit_met_m3_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m3_p1, bit_met_m3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m3_p5, bit_met_m3_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p3_m7, bit_met_p3_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m3, bit_met_p3_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p3_p1, bit_met_p3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p3_p5, bit_met_p3_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p5_m7, bit_met_p5_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p5_m3, bit_met_p5_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p5_p1, bit_met_p5_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p5_p5, bit_met_p5_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+
+    y1i = simde_mm_subs_epi16(logmax_num_re0, logmax_den_re0);
 
     // Detection for 6th bit (LTE mapping)
-    xmm0 = _mm_max_epi16(bit_met_p7_p7, bit_met_p5_p7);
-    xmm1 = _mm_max_epi16(bit_met_p3_p7, bit_met_p1_p7);
-    xmm2 = _mm_max_epi16(bit_met_m1_p7, bit_met_m3_p7);
-    xmm3 = _mm_max_epi16(bit_met_m5_p7, bit_met_m7_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_p1, bit_met_p5_p1);
-    xmm1 = _mm_max_epi16(bit_met_p3_p1, bit_met_p1_p1);
-    xmm2 = _mm_max_epi16(bit_met_m1_p1, bit_met_m3_p1);
-    xmm3 = _mm_max_epi16(bit_met_m5_p1, bit_met_m5_p1);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m1, bit_met_p5_m1);
-    xmm1 = _mm_max_epi16(bit_met_p3_m1, bit_met_p1_m1);
-    xmm2 = _mm_max_epi16(bit_met_m1_m1, bit_met_m3_m1);
-    xmm3 = _mm_max_epi16(bit_met_m5_m1, bit_met_m7_m1);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m7, bit_met_p5_m7);
-    xmm1 = _mm_max_epi16(bit_met_p3_m7, bit_met_p1_m7);
-    xmm2 = _mm_max_epi16(bit_met_m1_m7, bit_met_m3_m7);
-    xmm3 = _mm_max_epi16(bit_met_m5_m7, bit_met_m7_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-
-    xmm0 = _mm_max_epi16(bit_met_p7_m5, bit_met_p5_m5);
-    xmm1 = _mm_max_epi16(bit_met_p3_m5, bit_met_p1_m5);
-    xmm2 = _mm_max_epi16(bit_met_m1_m5, bit_met_m3_m5);
-    xmm3 = _mm_max_epi16(bit_met_m5_m5, bit_met_m7_m5);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m3, bit_met_p5_m3);
-    xmm1 = _mm_max_epi16(bit_met_p3_m3, bit_met_p1_m3);
-    xmm2 = _mm_max_epi16(bit_met_m1_m3, bit_met_m3_m3);
-    xmm3 = _mm_max_epi16(bit_met_m5_m3, bit_met_m7_m3);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_p3, bit_met_p5_p3);
-    xmm1 = _mm_max_epi16(bit_met_p3_p3, bit_met_p1_p3);
-    xmm2 = _mm_max_epi16(bit_met_m1_p3, bit_met_m3_p3);
-    xmm3 = _mm_max_epi16(bit_met_m5_p3, bit_met_m7_p3);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_p5, bit_met_p5_p5);
-    xmm1 = _mm_max_epi16(bit_met_p3_p5, bit_met_p1_p5);
-    xmm2 = _mm_max_epi16(bit_met_m1_p5, bit_met_m3_p5);
-    xmm3 = _mm_max_epi16(bit_met_m5_p5, bit_met_m7_p5);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-
-    simde__m128i y2i = _mm_subs_epi16(logmax_num_re0, logmax_den_re0);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p7, bit_met_p5_p7);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p7, bit_met_p1_p7);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p7, bit_met_m3_p7);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p7, bit_met_m7_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p1, bit_met_p5_p1);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p1, bit_met_p1_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p1, bit_met_m3_p1);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p1, bit_met_m5_p1);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m1, bit_met_p5_m1);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m1, bit_met_p1_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m1, bit_met_m3_m1);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m1, bit_met_m7_m1);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m7, bit_met_p5_m7);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m7, bit_met_p1_m7);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m7, bit_met_m3_m7);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m7, bit_met_m7_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m5, bit_met_p5_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m5, bit_met_p1_m5);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m5, bit_met_m3_m5);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m5, bit_met_m7_m5);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m3, bit_met_p5_m3);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m3, bit_met_p1_m3);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m3, bit_met_m3_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m3, bit_met_m7_m3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p3, bit_met_p5_p3);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p3, bit_met_p1_p3);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p3, bit_met_m3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p3, bit_met_m7_p3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p5, bit_met_p5_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p5, bit_met_p1_p5);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p5, bit_met_m3_p5);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p5, bit_met_m7_p5);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+
+    simde__m128i y2i = simde_mm_subs_epi16(logmax_num_re0, logmax_den_re0);
 
     // map to output stream, difficult to do in SIMD since we have 6 16bit LLRs
     // RE 1
@@ -6469,17 +6143,7 @@ void qam64_qam16(short *stream0_in,
     stream0_out[j + 45] = ((short *)&y0i)[7];
     stream0_out[j + 46] = ((short *)&y1i)[7];
     stream0_out[j + 47] = ((short *)&y2i)[7];
-
-#elif defined(__arm__) || defined(__aarch64__)
-
-#endif
   }
-
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
-
 }
 
 
@@ -6566,504 +6230,497 @@ void qam64_qam64(short *stream0_in,
     stream0_out: output LLRs for 1st stream
   */
 
-#if defined(__x86_64__) || defined(__i386__)
-
-  __m128i *rho01_128i      = (__m128i *)rho01;
-  __m128i *stream0_128i_in = (__m128i *)stream0_in;
-  __m128i *stream1_128i_in = (__m128i *)stream1_in;
-  __m128i *ch_mag_128i     = (__m128i *)ch_mag;
-  __m128i *ch_mag_128i_i   = (__m128i *)ch_mag_i;
-
-  __m128i ONE_OVER_SQRT_42 = _mm_set1_epi16(10112); // round(1/sqrt(42)*2^16)
-  __m128i THREE_OVER_SQRT_42 = _mm_set1_epi16(30337); // round(3/sqrt(42)*2^16)
-  __m128i FIVE_OVER_SQRT_42 = _mm_set1_epi16(25281); // round(5/sqrt(42)*2^15)
-  __m128i SEVEN_OVER_SQRT_42 = _mm_set1_epi16(17697); // round(7/sqrt(42)*2^14) Q2.14
-  __m128i ONE_OVER_SQRT_2 = _mm_set1_epi16(23170); // round(1/sqrt(2)*2^15)
-  __m128i ONE_OVER_SQRT_2_42 = _mm_set1_epi16(3575); // round(1/sqrt(2*42)*2^15)
-  __m128i THREE_OVER_SQRT_2_42 = _mm_set1_epi16(10726); // round(3/sqrt(2*42)*2^15)
-  __m128i FIVE_OVER_SQRT_2_42 = _mm_set1_epi16(17876); // round(5/sqrt(2*42)*2^15)
-  __m128i SEVEN_OVER_SQRT_2_42 = _mm_set1_epi16(25027); // round(7/sqrt(2*42)*2^15)
-  __m128i FORTYNINE_OVER_FOUR_SQRT_42 = _mm_set1_epi16(30969); // round(49/(4*sqrt(42))*2^14), Q2.14
-  __m128i THIRTYSEVEN_OVER_FOUR_SQRT_42 = _mm_set1_epi16(23385); // round(37/(4*sqrt(42))*2^14), Q2.14
-  __m128i TWENTYFIVE_OVER_FOUR_SQRT_42 = _mm_set1_epi16(31601); // round(25/(4*sqrt(42))*2^15)
-  __m128i TWENTYNINE_OVER_FOUR_SQRT_42 = _mm_set1_epi16(18329); // round(29/(4*sqrt(42))*2^15), Q2.14
-  __m128i SEVENTEEN_OVER_FOUR_SQRT_42 = _mm_set1_epi16(21489); // round(17/(4*sqrt(42))*2^15)
-  __m128i NINE_OVER_FOUR_SQRT_42 = _mm_set1_epi16(11376); // round(9/(4*sqrt(42))*2^15)
-  __m128i THIRTEEN_OVER_FOUR_SQRT_42 = _mm_set1_epi16(16433); // round(13/(4*sqrt(42))*2^15)
-  __m128i FIVE_OVER_FOUR_SQRT_42 = _mm_set1_epi16(6320); // round(5/(4*sqrt(42))*2^15)
-  __m128i ONE_OVER_FOUR_SQRT_42 = _mm_set1_epi16(1264); // round(1/(4*sqrt(42))*2^15)
-  __m128i SQRT_42_OVER_FOUR = _mm_set1_epi16(13272); // round(sqrt(42)/4*2^13), Q3.12
-
-  __m128i ch_mag_des;
-  __m128i ch_mag_int;
-  __m128i ch_mag_98_over_42_with_sigma2;
-  __m128i ch_mag_74_over_42_with_sigma2;
-  __m128i ch_mag_58_over_42_with_sigma2;
-  __m128i ch_mag_50_over_42_with_sigma2;
-  __m128i ch_mag_34_over_42_with_sigma2;
-  __m128i ch_mag_18_over_42_with_sigma2;
-  __m128i ch_mag_26_over_42_with_sigma2;
-  __m128i ch_mag_10_over_42_with_sigma2;
-  __m128i ch_mag_2_over_42_with_sigma2;
-  __m128i  y0r_one_over_sqrt_21;
-  __m128i  y0r_three_over_sqrt_21;
-  __m128i  y0r_five_over_sqrt_21;
-  __m128i  y0r_seven_over_sqrt_21;
-  __m128i  y0i_one_over_sqrt_21;
-  __m128i  y0i_three_over_sqrt_21;
-  __m128i  y0i_five_over_sqrt_21;
-  __m128i  y0i_seven_over_sqrt_21;
-  __m128i ch_mag_int_with_sigma2;
-  __m128i two_ch_mag_int_with_sigma2;
-  __m128i three_ch_mag_int_with_sigma2;
-#elif defined(__arm__) || defined(__aarch64__)
-
-#endif
+  simde__m128i *rho01_128i      = (simde__m128i *)rho01;
+  simde__m128i *stream0_128i_in = (simde__m128i *)stream0_in;
+  simde__m128i *stream1_128i_in = (simde__m128i *)stream1_in;
+  simde__m128i *ch_mag_128i     = (simde__m128i *)ch_mag;
+  simde__m128i *ch_mag_128i_i   = (simde__m128i *)ch_mag_i;
+
+  simde__m128i ONE_OVER_SQRT_42 = simde_mm_set1_epi16(10112); // round(1/sqrt(42)*2^16)
+  simde__m128i THREE_OVER_SQRT_42 = simde_mm_set1_epi16(30337); // round(3/sqrt(42)*2^16)
+  simde__m128i FIVE_OVER_SQRT_42 = simde_mm_set1_epi16(25281); // round(5/sqrt(42)*2^15)
+  simde__m128i SEVEN_OVER_SQRT_42 = simde_mm_set1_epi16(17697); // round(7/sqrt(42)*2^14) Q2.14
+  simde__m128i ONE_OVER_SQRT_2 = simde_mm_set1_epi16(23170); // round(1/sqrt(2)*2^15)
+  simde__m128i ONE_OVER_SQRT_2_42 = simde_mm_set1_epi16(3575); // round(1/sqrt(2*42)*2^15)
+  simde__m128i THREE_OVER_SQRT_2_42 = simde_mm_set1_epi16(10726); // round(3/sqrt(2*42)*2^15)
+  simde__m128i FIVE_OVER_SQRT_2_42 = simde_mm_set1_epi16(17876); // round(5/sqrt(2*42)*2^15)
+  simde__m128i SEVEN_OVER_SQRT_2_42 = simde_mm_set1_epi16(25027); // round(7/sqrt(2*42)*2^15)
+  simde__m128i FORTYNINE_OVER_FOUR_SQRT_42 = simde_mm_set1_epi16(30969); // round(49/(4*sqrt(42))*2^14), Q2.14
+  simde__m128i THIRTYSEVEN_OVER_FOUR_SQRT_42 = simde_mm_set1_epi16(23385); // round(37/(4*sqrt(42))*2^14), Q2.14
+  simde__m128i TWENTYFIVE_OVER_FOUR_SQRT_42 = simde_mm_set1_epi16(31601); // round(25/(4*sqrt(42))*2^15)
+  simde__m128i TWENTYNINE_OVER_FOUR_SQRT_42 = simde_mm_set1_epi16(18329); // round(29/(4*sqrt(42))*2^15), Q2.14
+  simde__m128i SEVENTEEN_OVER_FOUR_SQRT_42 = simde_mm_set1_epi16(21489); // round(17/(4*sqrt(42))*2^15)
+  simde__m128i NINE_OVER_FOUR_SQRT_42 = simde_mm_set1_epi16(11376); // round(9/(4*sqrt(42))*2^15)
+  simde__m128i THIRTEEN_OVER_FOUR_SQRT_42 = simde_mm_set1_epi16(16433); // round(13/(4*sqrt(42))*2^15)
+  simde__m128i FIVE_OVER_FOUR_SQRT_42 = simde_mm_set1_epi16(6320); // round(5/(4*sqrt(42))*2^15)
+  simde__m128i ONE_OVER_FOUR_SQRT_42 = simde_mm_set1_epi16(1264); // round(1/(4*sqrt(42))*2^15)
+  simde__m128i SQRT_42_OVER_FOUR = simde_mm_set1_epi16(13272); // round(sqrt(42)/4*2^13), Q3.12
+
+  simde__m128i ch_mag_des;
+  simde__m128i ch_mag_int;
+  simde__m128i ch_mag_98_over_42_with_sigma2;
+  simde__m128i ch_mag_74_over_42_with_sigma2;
+  simde__m128i ch_mag_58_over_42_with_sigma2;
+  simde__m128i ch_mag_50_over_42_with_sigma2;
+  simde__m128i ch_mag_34_over_42_with_sigma2;
+  simde__m128i ch_mag_18_over_42_with_sigma2;
+  simde__m128i ch_mag_26_over_42_with_sigma2;
+  simde__m128i ch_mag_10_over_42_with_sigma2;
+  simde__m128i ch_mag_2_over_42_with_sigma2;
+  simde__m128i  y0r_one_over_sqrt_21;
+  simde__m128i  y0r_three_over_sqrt_21;
+  simde__m128i  y0r_five_over_sqrt_21;
+  simde__m128i  y0r_seven_over_sqrt_21;
+  simde__m128i  y0i_one_over_sqrt_21;
+  simde__m128i  y0i_three_over_sqrt_21;
+  simde__m128i  y0i_five_over_sqrt_21;
+  simde__m128i  y0i_seven_over_sqrt_21;
+  simde__m128i ch_mag_int_with_sigma2;
+  simde__m128i two_ch_mag_int_with_sigma2;
+  simde__m128i three_ch_mag_int_with_sigma2;
 
   int i,j;
 
 
   for (i=0; i<length>>2; i+=2) {
 
-#if defined(__x86_64__) || defined(__i386__)
-
     // Get rho
     simde__m128i xmm0 = rho01_128i[i];
     simde__m128i xmm1 = rho01_128i[i + 1];
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflelo_epi16(xmm0, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflehi_epi16(xmm0, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shuffle_epi32(xmm0, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflelo_epi16(xmm1, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflehi_epi16(xmm1, 0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shuffle_epi32(xmm1, 0xd8); //_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    simde__m128i xmm2 = _mm_unpacklo_epi64(xmm0, xmm1); // Re(rho)
-    simde__m128i xmm3 = _mm_unpackhi_epi64(xmm0, xmm1); // Im(rho)
-    simde__m128i rho_rpi = _mm_adds_epi16(xmm2, xmm3); // rho = Re(rho) + Im(rho)
-    simde__m128i rho_rmi = _mm_subs_epi16(xmm2, xmm3); // rho* = Re(rho) - Im(rho)
+    simde__m128i xmm2 = simde_mm_unpacklo_epi64(xmm0, xmm1); // Re(rho)
+    simde__m128i xmm3 = simde_mm_unpackhi_epi64(xmm0, xmm1); // Im(rho)
+    simde__m128i rho_rpi = simde_mm_adds_epi16(xmm2, xmm3); // rho = Re(rho) + Im(rho)
+    simde__m128i rho_rmi = simde_mm_subs_epi16(xmm2, xmm3); // rho* = Re(rho) - Im(rho)
 
     // Compute the different rhos
-    simde__m128i rho_rpi_1_1 = _mm_mulhi_epi16(rho_rpi, ONE_OVER_SQRT_42);
-    simde__m128i rho_rmi_1_1 = _mm_mulhi_epi16(rho_rmi, ONE_OVER_SQRT_42);
-    simde__m128i rho_rpi_3_3 = _mm_mulhi_epi16(rho_rpi, THREE_OVER_SQRT_42);
-    simde__m128i rho_rmi_3_3 = _mm_mulhi_epi16(rho_rmi, THREE_OVER_SQRT_42);
-    simde__m128i rho_rpi_5_5 = _mm_mulhi_epi16(rho_rpi, FIVE_OVER_SQRT_42);
-    simde__m128i rho_rmi_5_5 = _mm_mulhi_epi16(rho_rmi, FIVE_OVER_SQRT_42);
-    simde__m128i rho_rpi_7_7 = _mm_mulhi_epi16(rho_rpi, SEVEN_OVER_SQRT_42);
-    simde__m128i rho_rmi_7_7 = _mm_mulhi_epi16(rho_rmi, SEVEN_OVER_SQRT_42);
-
-    rho_rpi_5_5 = _mm_slli_epi16(rho_rpi_5_5, 1);
-    rho_rmi_5_5 = _mm_slli_epi16(rho_rmi_5_5, 1);
-    rho_rpi_7_7 = _mm_slli_epi16(rho_rpi_7_7, 2);
-    rho_rmi_7_7 = _mm_slli_epi16(rho_rmi_7_7, 2);
-
-    simde__m128i xmm4 = _mm_mulhi_epi16(xmm2, ONE_OVER_SQRT_42);
-    simde__m128i xmm5 = _mm_mulhi_epi16(xmm3, ONE_OVER_SQRT_42);
-    simde__m128i xmm6 = _mm_mulhi_epi16(xmm3, THREE_OVER_SQRT_42);
-    simde__m128i xmm7 = _mm_mulhi_epi16(xmm3, FIVE_OVER_SQRT_42);
-    simde__m128i xmm8 = _mm_mulhi_epi16(xmm3, SEVEN_OVER_SQRT_42);
-    xmm7 = _mm_slli_epi16(xmm7, 1);
-    xmm8 = _mm_slli_epi16(xmm8, 2);
-
-    simde__m128i rho_rpi_1_3 = _mm_adds_epi16(xmm4, xmm6);
-    simde__m128i rho_rmi_1_3 = _mm_subs_epi16(xmm4, xmm6);
-    simde__m128i rho_rpi_1_5 = _mm_adds_epi16(xmm4, xmm7);
-    simde__m128i rho_rmi_1_5 = _mm_subs_epi16(xmm4, xmm7);
-    simde__m128i rho_rpi_1_7 = _mm_adds_epi16(xmm4, xmm8);
-    simde__m128i rho_rmi_1_7 = _mm_subs_epi16(xmm4, xmm8);
-
-    xmm4 = _mm_mulhi_epi16(xmm2, THREE_OVER_SQRT_42);
-    simde__m128i rho_rpi_3_1 = _mm_adds_epi16(xmm4, xmm5);
-    simde__m128i rho_rmi_3_1 = _mm_subs_epi16(xmm4, xmm5);
-    simde__m128i rho_rpi_3_5 = _mm_adds_epi16(xmm4, xmm7);
-    simde__m128i rho_rmi_3_5 = _mm_subs_epi16(xmm4, xmm7);
-    simde__m128i rho_rpi_3_7 = _mm_adds_epi16(xmm4, xmm8);
-    simde__m128i rho_rmi_3_7 = _mm_subs_epi16(xmm4, xmm8);
-
-    xmm4 = _mm_mulhi_epi16(xmm2, FIVE_OVER_SQRT_42);
-    xmm4 = _mm_slli_epi16(xmm4, 1);
-    simde__m128i rho_rpi_5_1 = _mm_adds_epi16(xmm4, xmm5);
-    simde__m128i rho_rmi_5_1 = _mm_subs_epi16(xmm4, xmm5);
-    simde__m128i rho_rpi_5_3 = _mm_adds_epi16(xmm4, xmm6);
-    simde__m128i rho_rmi_5_3 = _mm_subs_epi16(xmm4, xmm6);
-    simde__m128i rho_rpi_5_7 = _mm_adds_epi16(xmm4, xmm8);
-    simde__m128i rho_rmi_5_7 = _mm_subs_epi16(xmm4, xmm8);
-
-    xmm4 = _mm_mulhi_epi16(xmm2, SEVEN_OVER_SQRT_42);
-    xmm4 = _mm_slli_epi16(xmm4, 2);
-    simde__m128i rho_rpi_7_1 = _mm_adds_epi16(xmm4, xmm5);
-    simde__m128i rho_rmi_7_1 = _mm_subs_epi16(xmm4, xmm5);
-    simde__m128i rho_rpi_7_3 = _mm_adds_epi16(xmm4, xmm6);
-    simde__m128i rho_rmi_7_3 = _mm_subs_epi16(xmm4, xmm6);
-    simde__m128i rho_rpi_7_5 = _mm_adds_epi16(xmm4, xmm7);
-    simde__m128i rho_rmi_7_5 = _mm_subs_epi16(xmm4, xmm7);
+    simde__m128i rho_rpi_1_1 = simde_mm_mulhi_epi16(rho_rpi, ONE_OVER_SQRT_42);
+    simde__m128i rho_rmi_1_1 = simde_mm_mulhi_epi16(rho_rmi, ONE_OVER_SQRT_42);
+    simde__m128i rho_rpi_3_3 = simde_mm_mulhi_epi16(rho_rpi, THREE_OVER_SQRT_42);
+    simde__m128i rho_rmi_3_3 = simde_mm_mulhi_epi16(rho_rmi, THREE_OVER_SQRT_42);
+    simde__m128i rho_rpi_5_5 = simde_mm_mulhi_epi16(rho_rpi, FIVE_OVER_SQRT_42);
+    simde__m128i rho_rmi_5_5 = simde_mm_mulhi_epi16(rho_rmi, FIVE_OVER_SQRT_42);
+    simde__m128i rho_rpi_7_7 = simde_mm_mulhi_epi16(rho_rpi, SEVEN_OVER_SQRT_42);
+    simde__m128i rho_rmi_7_7 = simde_mm_mulhi_epi16(rho_rmi, SEVEN_OVER_SQRT_42);
+
+    rho_rpi_5_5 = simde_mm_slli_epi16(rho_rpi_5_5, 1);
+    rho_rmi_5_5 = simde_mm_slli_epi16(rho_rmi_5_5, 1);
+    rho_rpi_7_7 = simde_mm_slli_epi16(rho_rpi_7_7, 2);
+    rho_rmi_7_7 = simde_mm_slli_epi16(rho_rmi_7_7, 2);
+
+    simde__m128i xmm4 = simde_mm_mulhi_epi16(xmm2, ONE_OVER_SQRT_42);
+    simde__m128i xmm5 = simde_mm_mulhi_epi16(xmm3, ONE_OVER_SQRT_42);
+    simde__m128i xmm6 = simde_mm_mulhi_epi16(xmm3, THREE_OVER_SQRT_42);
+    simde__m128i xmm7 = simde_mm_mulhi_epi16(xmm3, FIVE_OVER_SQRT_42);
+    simde__m128i xmm8 = simde_mm_mulhi_epi16(xmm3, SEVEN_OVER_SQRT_42);
+    xmm7 = simde_mm_slli_epi16(xmm7, 1);
+    xmm8 = simde_mm_slli_epi16(xmm8, 2);
+
+    simde__m128i rho_rpi_1_3 = simde_mm_adds_epi16(xmm4, xmm6);
+    simde__m128i rho_rmi_1_3 = simde_mm_subs_epi16(xmm4, xmm6);
+    simde__m128i rho_rpi_1_5 = simde_mm_adds_epi16(xmm4, xmm7);
+    simde__m128i rho_rmi_1_5 = simde_mm_subs_epi16(xmm4, xmm7);
+    simde__m128i rho_rpi_1_7 = simde_mm_adds_epi16(xmm4, xmm8);
+    simde__m128i rho_rmi_1_7 = simde_mm_subs_epi16(xmm4, xmm8);
+
+    xmm4 = simde_mm_mulhi_epi16(xmm2, THREE_OVER_SQRT_42);
+    simde__m128i rho_rpi_3_1 = simde_mm_adds_epi16(xmm4, xmm5);
+    simde__m128i rho_rmi_3_1 = simde_mm_subs_epi16(xmm4, xmm5);
+    simde__m128i rho_rpi_3_5 = simde_mm_adds_epi16(xmm4, xmm7);
+    simde__m128i rho_rmi_3_5 = simde_mm_subs_epi16(xmm4, xmm7);
+    simde__m128i rho_rpi_3_7 = simde_mm_adds_epi16(xmm4, xmm8);
+    simde__m128i rho_rmi_3_7 = simde_mm_subs_epi16(xmm4, xmm8);
+
+    xmm4 = simde_mm_mulhi_epi16(xmm2, FIVE_OVER_SQRT_42);
+    xmm4 = simde_mm_slli_epi16(xmm4, 1);
+    simde__m128i rho_rpi_5_1 = simde_mm_adds_epi16(xmm4, xmm5);
+    simde__m128i rho_rmi_5_1 = simde_mm_subs_epi16(xmm4, xmm5);
+    simde__m128i rho_rpi_5_3 = simde_mm_adds_epi16(xmm4, xmm6);
+    simde__m128i rho_rmi_5_3 = simde_mm_subs_epi16(xmm4, xmm6);
+    simde__m128i rho_rpi_5_7 = simde_mm_adds_epi16(xmm4, xmm8);
+    simde__m128i rho_rmi_5_7 = simde_mm_subs_epi16(xmm4, xmm8);
+
+    xmm4 = simde_mm_mulhi_epi16(xmm2, SEVEN_OVER_SQRT_42);
+    xmm4 = simde_mm_slli_epi16(xmm4, 2);
+    simde__m128i rho_rpi_7_1 = simde_mm_adds_epi16(xmm4, xmm5);
+    simde__m128i rho_rmi_7_1 = simde_mm_subs_epi16(xmm4, xmm5);
+    simde__m128i rho_rpi_7_3 = simde_mm_adds_epi16(xmm4, xmm6);
+    simde__m128i rho_rmi_7_3 = simde_mm_subs_epi16(xmm4, xmm6);
+    simde__m128i rho_rpi_7_5 = simde_mm_adds_epi16(xmm4, xmm7);
+    simde__m128i rho_rmi_7_5 = simde_mm_subs_epi16(xmm4, xmm7);
 
     // Rearrange interfering MF output
     xmm0 = stream1_128i_in[i];
     xmm1 = stream1_128i_in[i+1];
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflelo_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflehi_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shuffle_epi32(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflelo_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflehi_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shuffle_epi32(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    simde__m128i y1r = _mm_unpacklo_epi64(xmm0, xmm1); //[y1r(1),y1r(2),y1r(3),y1r(4)]
-    simde__m128i y1i = _mm_unpackhi_epi64(xmm0, xmm1); //[y1i(1),y1i(2),y1i(3),y1i(4)]
+    simde__m128i y1r = simde_mm_unpacklo_epi64(xmm0, xmm1); //[y1r(1),y1r(2),y1r(3),y1r(4)]
+    simde__m128i y1i = simde_mm_unpackhi_epi64(xmm0, xmm1); //[y1i(1),y1i(2),y1i(3),y1i(4)]
 
     // Psi_r calculation from rho_rpi or rho_rmi
-    xmm0 = _mm_setzero_si128(); // ZERO for abs_pi16
-    xmm2 = _mm_subs_epi16(rho_rpi_7_7, y1r);
-    simde__m128i psi_r_p7_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_7_5, y1r);
-    simde__m128i psi_r_p7_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_7_3, y1r);
-    simde__m128i psi_r_p7_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_7_1, y1r);
-    simde__m128i psi_r_p7_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_7_1, y1r);
-    simde__m128i psi_r_p7_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_7_3, y1r);
-    simde__m128i psi_r_p7_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_7_5, y1r);
-    simde__m128i psi_r_p7_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_7_7, y1r);
-    simde__m128i psi_r_p7_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_5_7, y1r);
-    simde__m128i psi_r_p5_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_5_5, y1r);
-    simde__m128i psi_r_p5_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_5_3, y1r);
-    simde__m128i psi_r_p5_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_5_1, y1r);
-    simde__m128i psi_r_p5_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_5_1, y1r);
-    simde__m128i psi_r_p5_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_5_3, y1r);
-    simde__m128i psi_r_p5_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_5_5, y1r);
-    simde__m128i psi_r_p5_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_5_7, y1r);
-    simde__m128i psi_r_p5_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_3_7, y1r);
-    simde__m128i psi_r_p3_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_3_5, y1r);
-    simde__m128i psi_r_p3_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_3_3, y1r);
-    simde__m128i psi_r_p3_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_3_1, y1r);
-    simde__m128i psi_r_p3_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_3_1, y1r);
-    simde__m128i psi_r_p3_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_3_3, y1r);
-    simde__m128i psi_r_p3_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_3_5, y1r);
-    simde__m128i psi_r_p3_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_3_7, y1r);
-    simde__m128i psi_r_p3_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_1_7, y1r);
-    simde__m128i psi_r_p1_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_1_5, y1r);
-    simde__m128i psi_r_p1_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_1_3, y1r);
-    simde__m128i psi_r_p1_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_1_1, y1r);
-    simde__m128i psi_r_p1_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_1_1, y1r);
-    simde__m128i psi_r_p1_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_1_3, y1r);
-    simde__m128i psi_r_p1_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_1_5, y1r);
-    simde__m128i psi_r_p1_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_1_7, y1r);
-    simde__m128i psi_r_p1_m7 = _mm_abs_epi16(xmm2);
-
-    xmm2 = _mm_adds_epi16(rho_rmi_1_7, y1r);
-    simde__m128i psi_r_m1_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_1_5, y1r);
-    simde__m128i psi_r_m1_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_1_3, y1r);
-    simde__m128i psi_r_m1_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_1_1, y1r);
-    simde__m128i psi_r_m1_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_1_1, y1r);
-    simde__m128i psi_r_m1_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_1_3, y1r);
-    simde__m128i psi_r_m1_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_1_5, y1r);
-    simde__m128i psi_r_m1_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_1_7, y1r);
-    simde__m128i psi_r_m1_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_3_7, y1r);
-    simde__m128i psi_r_m3_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_3_5, y1r);
-    simde__m128i psi_r_m3_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_3_3, y1r);
-    simde__m128i psi_r_m3_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_3_1, y1r);
-    simde__m128i psi_r_m3_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_3_1, y1r);
-    simde__m128i psi_r_m3_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_3_3, y1r);
-    simde__m128i psi_r_m3_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_3_5, y1r);
-    simde__m128i psi_r_m3_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_3_7, y1r);
-    simde__m128i psi_r_m3_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_5_7, y1r);
-    simde__m128i psi_r_m5_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_5_5, y1r);
-    simde__m128i psi_r_m5_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_5_3, y1r);
-    simde__m128i psi_r_m5_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_5_1, y1r);
-    simde__m128i psi_r_m5_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_5_1, y1r);
-    simde__m128i psi_r_m5_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_5_3, y1r);
-    simde__m128i psi_r_m5_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_5_5, y1r);
-    simde__m128i psi_r_m5_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_5_7, y1r);
-    simde__m128i psi_r_m5_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_7_7, y1r);
-    simde__m128i psi_r_m7_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_7_5, y1r);
-    simde__m128i psi_r_m7_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_7_3, y1r);
-    simde__m128i psi_r_m7_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_7_1, y1r);
-    simde__m128i psi_r_m7_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_7_1, y1r);
-    simde__m128i psi_r_m7_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_7_3, y1r);
-    simde__m128i psi_r_m7_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_7_5, y1r);
-    simde__m128i psi_r_m7_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_7_7, y1r);
-    simde__m128i psi_r_m7_m7 = _mm_abs_epi16(xmm2);
+    xmm0 = simde_mm_setzero_si128(); // ZERO for abs_pi16
+    xmm2 = simde_mm_subs_epi16(rho_rpi_7_7, y1r);
+    simde__m128i psi_r_p7_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_7_5, y1r);
+    simde__m128i psi_r_p7_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_7_3, y1r);
+    simde__m128i psi_r_p7_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_7_1, y1r);
+    simde__m128i psi_r_p7_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_7_1, y1r);
+    simde__m128i psi_r_p7_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_7_3, y1r);
+    simde__m128i psi_r_p7_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_7_5, y1r);
+    simde__m128i psi_r_p7_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_7_7, y1r);
+    simde__m128i psi_r_p7_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_5_7, y1r);
+    simde__m128i psi_r_p5_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_5_5, y1r);
+    simde__m128i psi_r_p5_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_5_3, y1r);
+    simde__m128i psi_r_p5_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_5_1, y1r);
+    simde__m128i psi_r_p5_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_5_1, y1r);
+    simde__m128i psi_r_p5_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_5_3, y1r);
+    simde__m128i psi_r_p5_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_5_5, y1r);
+    simde__m128i psi_r_p5_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_5_7, y1r);
+    simde__m128i psi_r_p5_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_7, y1r);
+    simde__m128i psi_r_p3_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_5, y1r);
+    simde__m128i psi_r_p3_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_3, y1r);
+    simde__m128i psi_r_p3_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_1, y1r);
+    simde__m128i psi_r_p3_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_1, y1r);
+    simde__m128i psi_r_p3_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_3, y1r);
+    simde__m128i psi_r_p3_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_5, y1r);
+    simde__m128i psi_r_p3_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_7, y1r);
+    simde__m128i psi_r_p3_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_7, y1r);
+    simde__m128i psi_r_p1_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_5, y1r);
+    simde__m128i psi_r_p1_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_3, y1r);
+    simde__m128i psi_r_p1_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_1, y1r);
+    simde__m128i psi_r_p1_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_1, y1r);
+    simde__m128i psi_r_p1_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_3, y1r);
+    simde__m128i psi_r_p1_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_5, y1r);
+    simde__m128i psi_r_p1_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_7, y1r);
+    simde__m128i psi_r_p1_m7 = simde_mm_abs_epi16(xmm2);
+
+    xmm2 = simde_mm_adds_epi16(rho_rmi_1_7, y1r);
+    simde__m128i psi_r_m1_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_1_5, y1r);
+    simde__m128i psi_r_m1_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_1_3, y1r);
+    simde__m128i psi_r_m1_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_1_1, y1r);
+    simde__m128i psi_r_m1_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_1, y1r);
+    simde__m128i psi_r_m1_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_3, y1r);
+    simde__m128i psi_r_m1_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_5, y1r);
+    simde__m128i psi_r_m1_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_7, y1r);
+    simde__m128i psi_r_m1_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_3_7, y1r);
+    simde__m128i psi_r_m3_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_3_5, y1r);
+    simde__m128i psi_r_m3_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_3_3, y1r);
+    simde__m128i psi_r_m3_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_3_1, y1r);
+    simde__m128i psi_r_m3_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_1, y1r);
+    simde__m128i psi_r_m3_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_3, y1r);
+    simde__m128i psi_r_m3_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_5, y1r);
+    simde__m128i psi_r_m3_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_7, y1r);
+    simde__m128i psi_r_m3_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_5_7, y1r);
+    simde__m128i psi_r_m5_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_5_5, y1r);
+    simde__m128i psi_r_m5_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_5_3, y1r);
+    simde__m128i psi_r_m5_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_5_1, y1r);
+    simde__m128i psi_r_m5_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_5_1, y1r);
+    simde__m128i psi_r_m5_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_5_3, y1r);
+    simde__m128i psi_r_m5_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_5_5, y1r);
+    simde__m128i psi_r_m5_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_5_7, y1r);
+    simde__m128i psi_r_m5_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_7_7, y1r);
+    simde__m128i psi_r_m7_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_7_5, y1r);
+    simde__m128i psi_r_m7_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_7_3, y1r);
+    simde__m128i psi_r_m7_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_7_1, y1r);
+    simde__m128i psi_r_m7_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_7_1, y1r);
+    simde__m128i psi_r_m7_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_7_3, y1r);
+    simde__m128i psi_r_m7_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_7_5, y1r);
+    simde__m128i psi_r_m7_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_7_7, y1r);
+    simde__m128i psi_r_m7_m7 = simde_mm_abs_epi16(xmm2);
 
     // Simde__M128i Psi_i calculation from rho_rpi or rho_rmi
-    xmm2 = _mm_subs_epi16(rho_rmi_7_7, y1i);
-    simde__m128i psi_i_p7_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_5_7, y1i);
-    simde__m128i psi_i_p7_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_3_7, y1i);
-    simde__m128i psi_i_p7_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_1_7, y1i);
-    simde__m128i psi_i_p7_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_1_7, y1i);
-    simde__m128i psi_i_p7_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_3_7, y1i);
-    simde__m128i psi_i_p7_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_5_7, y1i);
-    simde__m128i psi_i_p7_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_7_7, y1i);
-    simde__m128i psi_i_p7_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_7_5, y1i);
-    simde__m128i psi_i_p5_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_5_5, y1i);
-    simde__m128i psi_i_p5_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_3_5, y1i);
-    simde__m128i psi_i_p5_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_1_5, y1i);
-    simde__m128i psi_i_p5_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_1_5, y1i);
-    simde__m128i psi_i_p5_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_3_5, y1i);
-    simde__m128i psi_i_p5_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_5_5, y1i);
-    simde__m128i psi_i_p5_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_7_5, y1i);
-    simde__m128i psi_i_p5_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_7_3, y1i);
-    simde__m128i psi_i_p3_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_5_3, y1i);
-    simde__m128i psi_i_p3_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_3_3, y1i);
-    simde__m128i psi_i_p3_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_1_3, y1i);
-    simde__m128i psi_i_p3_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_1_3, y1i);
-    simde__m128i psi_i_p3_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_3_3, y1i);
-    simde__m128i psi_i_p3_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_5_3, y1i);
-    simde__m128i psi_i_p3_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_7_3, y1i);
-    simde__m128i psi_i_p3_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_7_1, y1i);
-    simde__m128i psi_i_p1_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_5_1, y1i);
-    simde__m128i psi_i_p1_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_3_1, y1i);
-    simde__m128i psi_i_p1_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rmi_1_1, y1i);
-    simde__m128i psi_i_p1_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_1_1, y1i);
-    simde__m128i psi_i_p1_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_3_1, y1i);
-    simde__m128i psi_i_p1_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_5_1, y1i);
-    simde__m128i psi_i_p1_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rpi_7_1, y1i);
-    simde__m128i psi_i_p1_m7 = _mm_abs_epi16(xmm2);
-
-    xmm2 = _mm_subs_epi16(rho_rpi_7_1, y1i);
-    simde__m128i psi_i_m1_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_5_1, y1i);
-    simde__m128i psi_i_m1_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_3_1, y1i);
-    simde__m128i psi_i_m1_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_1_1, y1i);
-    simde__m128i psi_i_m1_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_1_1, y1i);
-    simde__m128i psi_i_m1_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_3_1, y1i);
-    simde__m128i psi_i_m1_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_5_1, y1i);
-    simde__m128i psi_i_m1_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_7_1, y1i);
-    simde__m128i psi_i_m1_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_7_3, y1i);
-    simde__m128i psi_i_m3_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_5_3, y1i);
-    simde__m128i psi_i_m3_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_3_3, y1i);
-    simde__m128i psi_i_m3_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_1_3, y1i);
-    simde__m128i psi_i_m3_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_1_3, y1i);
-    simde__m128i psi_i_m3_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_3_3, y1i);
-    simde__m128i psi_i_m3_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_5_3, y1i);
-    simde__m128i psi_i_m3_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_7_3, y1i);
-    simde__m128i psi_i_m3_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_7_5, y1i);
-    simde__m128i psi_i_m5_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_5_5, y1i);
-    simde__m128i psi_i_m5_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_3_5, y1i);
-    simde__m128i psi_i_m5_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_1_5, y1i);
-    simde__m128i psi_i_m5_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_1_5, y1i);
-    simde__m128i psi_i_m5_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_3_5, y1i);
-    simde__m128i psi_i_m5_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_5_5, y1i);
-    simde__m128i psi_i_m5_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_7_5, y1i);
-    simde__m128i psi_i_m5_m7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_7_7, y1i);
-    simde__m128i psi_i_m7_p7 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_5_7, y1i);
-    simde__m128i psi_i_m7_p5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_3_7, y1i);
-    simde__m128i psi_i_m7_p3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_subs_epi16(rho_rpi_1_7, y1i);
-    simde__m128i psi_i_m7_p1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_1_7, y1i);
-    simde__m128i psi_i_m7_m1 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_3_7, y1i);
-    simde__m128i psi_i_m7_m3 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_5_7, y1i);
-    simde__m128i psi_i_m7_m5 = _mm_abs_epi16(xmm2);
-    xmm2 = _mm_adds_epi16(rho_rmi_7_7, y1i);
-    simde__m128i psi_i_m7_m7 = _mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_7_7, y1i);
+    simde__m128i psi_i_p7_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_5_7, y1i);
+    simde__m128i psi_i_p7_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_7, y1i);
+    simde__m128i psi_i_p7_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_7, y1i);
+    simde__m128i psi_i_p7_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_7, y1i);
+    simde__m128i psi_i_p7_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_7, y1i);
+    simde__m128i psi_i_p7_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_5_7, y1i);
+    simde__m128i psi_i_p7_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_7_7, y1i);
+    simde__m128i psi_i_p7_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_7_5, y1i);
+    simde__m128i psi_i_p5_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_5_5, y1i);
+    simde__m128i psi_i_p5_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_5, y1i);
+    simde__m128i psi_i_p5_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_5, y1i);
+    simde__m128i psi_i_p5_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_5, y1i);
+    simde__m128i psi_i_p5_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_5, y1i);
+    simde__m128i psi_i_p5_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_5_5, y1i);
+    simde__m128i psi_i_p5_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_7_5, y1i);
+    simde__m128i psi_i_p5_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_7_3, y1i);
+    simde__m128i psi_i_p3_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_5_3, y1i);
+    simde__m128i psi_i_p3_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_3, y1i);
+    simde__m128i psi_i_p3_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_3, y1i);
+    simde__m128i psi_i_p3_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_3, y1i);
+    simde__m128i psi_i_p3_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_3, y1i);
+    simde__m128i psi_i_p3_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_5_3, y1i);
+    simde__m128i psi_i_p3_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_7_3, y1i);
+    simde__m128i psi_i_p3_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_7_1, y1i);
+    simde__m128i psi_i_p1_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_5_1, y1i);
+    simde__m128i psi_i_p1_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_3_1, y1i);
+    simde__m128i psi_i_p1_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rmi_1_1, y1i);
+    simde__m128i psi_i_p1_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_1_1, y1i);
+    simde__m128i psi_i_p1_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_3_1, y1i);
+    simde__m128i psi_i_p1_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_5_1, y1i);
+    simde__m128i psi_i_p1_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rpi_7_1, y1i);
+    simde__m128i psi_i_p1_m7 = simde_mm_abs_epi16(xmm2);
+
+    xmm2 = simde_mm_subs_epi16(rho_rpi_7_1, y1i);
+    simde__m128i psi_i_m1_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_5_1, y1i);
+    simde__m128i psi_i_m1_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_1, y1i);
+    simde__m128i psi_i_m1_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_1, y1i);
+    simde__m128i psi_i_m1_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_1_1, y1i);
+    simde__m128i psi_i_m1_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_3_1, y1i);
+    simde__m128i psi_i_m1_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_5_1, y1i);
+    simde__m128i psi_i_m1_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_7_1, y1i);
+    simde__m128i psi_i_m1_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_7_3, y1i);
+    simde__m128i psi_i_m3_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_5_3, y1i);
+    simde__m128i psi_i_m3_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_3, y1i);
+    simde__m128i psi_i_m3_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_3, y1i);
+    simde__m128i psi_i_m3_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_1_3, y1i);
+    simde__m128i psi_i_m3_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_3_3, y1i);
+    simde__m128i psi_i_m3_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_5_3, y1i);
+    simde__m128i psi_i_m3_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_7_3, y1i);
+    simde__m128i psi_i_m3_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_7_5, y1i);
+    simde__m128i psi_i_m5_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_5_5, y1i);
+    simde__m128i psi_i_m5_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_5, y1i);
+    simde__m128i psi_i_m5_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_5, y1i);
+    simde__m128i psi_i_m5_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_1_5, y1i);
+    simde__m128i psi_i_m5_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_3_5, y1i);
+    simde__m128i psi_i_m5_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_5_5, y1i);
+    simde__m128i psi_i_m5_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_7_5, y1i);
+    simde__m128i psi_i_m5_m7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_7_7, y1i);
+    simde__m128i psi_i_m7_p7 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_5_7, y1i);
+    simde__m128i psi_i_m7_p5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_3_7, y1i);
+    simde__m128i psi_i_m7_p3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_subs_epi16(rho_rpi_1_7, y1i);
+    simde__m128i psi_i_m7_p1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_1_7, y1i);
+    simde__m128i psi_i_m7_m1 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_3_7, y1i);
+    simde__m128i psi_i_m7_m3 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_5_7, y1i);
+    simde__m128i psi_i_m7_m5 = simde_mm_abs_epi16(xmm2);
+    xmm2 = simde_mm_adds_epi16(rho_rmi_7_7, y1i);
+    simde__m128i psi_i_m7_m7 = simde_mm_abs_epi16(xmm2);
 
     // Rearrange desired MF output
     xmm0 = stream0_128i_in[i];
     xmm1 = stream0_128i_in[i+1];
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflelo_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflehi_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shuffle_epi32(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflelo_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflehi_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shuffle_epi32(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    simde__m128i y0r = _mm_unpacklo_epi64(xmm0, xmm1); // = [y0r(1),y0r(2),y0r(3),y0r(4)]
-    simde__m128i y0i = _mm_unpackhi_epi64(xmm0, xmm1);
+    simde__m128i y0r = simde_mm_unpacklo_epi64(xmm0, xmm1); // = [y0r(1),y0r(2),y0r(3),y0r(4)]
+    simde__m128i y0i = simde_mm_unpackhi_epi64(xmm0, xmm1);
 
     // Rearrange desired channel magnitudes
     xmm2 = ch_mag_128i[i]; // = [|h|^2(1),|h|^2(1),|h|^2(2),|h|^2(2)]*(2/sqrt(10))
     xmm3 = ch_mag_128i[i+1]; // = [|h|^2(3),|h|^2(3),|h|^2(4),|h|^2(4)]*(2/sqrt(10))
-    xmm2 = _mm_shufflelo_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = _mm_shufflehi_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = _mm_shuffle_epi32(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shufflelo_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shufflehi_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shuffle_epi32(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    ch_mag_des = _mm_unpacklo_epi64(xmm2,xmm3);
+    xmm2 = simde_mm_shufflelo_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shufflehi_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shuffle_epi32(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shufflelo_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shufflehi_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shuffle_epi32(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    ch_mag_des = simde_mm_unpacklo_epi64(xmm2,xmm3);
 
     // Rearrange interfering channel magnitudes
     xmm2 = ch_mag_128i_i[i];
     xmm3 = ch_mag_128i_i[i+1];
-    xmm2 = _mm_shufflelo_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = _mm_shufflehi_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = _mm_shuffle_epi32(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shufflelo_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shufflehi_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shuffle_epi32(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    ch_mag_int  = _mm_unpacklo_epi64(xmm2,xmm3);
-
-    y0r_one_over_sqrt_21   = _mm_mulhi_epi16(y0r, ONE_OVER_SQRT_42);
-    y0r_three_over_sqrt_21 = _mm_mulhi_epi16(y0r, THREE_OVER_SQRT_42);
-    y0r_five_over_sqrt_21  = _mm_mulhi_epi16(y0r, FIVE_OVER_SQRT_42);
-    y0r_five_over_sqrt_21  = _mm_slli_epi16(y0r_five_over_sqrt_21, 1);
-    y0r_seven_over_sqrt_21 = _mm_mulhi_epi16(y0r, SEVEN_OVER_SQRT_42);
-    y0r_seven_over_sqrt_21 = _mm_slli_epi16(y0r_seven_over_sqrt_21, 2); // Q2.14
-
-    y0i_one_over_sqrt_21   = _mm_mulhi_epi16(y0i, ONE_OVER_SQRT_42);
-    y0i_three_over_sqrt_21 = _mm_mulhi_epi16(y0i, THREE_OVER_SQRT_42);
-    y0i_five_over_sqrt_21  = _mm_mulhi_epi16(y0i, FIVE_OVER_SQRT_42);
-    y0i_five_over_sqrt_21  = _mm_slli_epi16(y0i_five_over_sqrt_21, 1);
-    y0i_seven_over_sqrt_21 = _mm_mulhi_epi16(y0i, SEVEN_OVER_SQRT_42);
-    y0i_seven_over_sqrt_21 = _mm_slli_epi16(y0i_seven_over_sqrt_21, 2); // Q2.14
-
-    simde__m128i y0_p_7_1 = _mm_adds_epi16(y0r_seven_over_sqrt_21, y0i_one_over_sqrt_21);
-    simde__m128i y0_p_7_3 = _mm_adds_epi16(y0r_seven_over_sqrt_21, y0i_three_over_sqrt_21);
-    simde__m128i y0_p_7_5 = _mm_adds_epi16(y0r_seven_over_sqrt_21, y0i_five_over_sqrt_21);
-    simde__m128i y0_p_7_7 = _mm_adds_epi16(y0r_seven_over_sqrt_21, y0i_seven_over_sqrt_21);
-    simde__m128i y0_p_5_1 = _mm_adds_epi16(y0r_five_over_sqrt_21, y0i_one_over_sqrt_21);
-    simde__m128i y0_p_5_3 = _mm_adds_epi16(y0r_five_over_sqrt_21, y0i_three_over_sqrt_21);
-    simde__m128i y0_p_5_5 = _mm_adds_epi16(y0r_five_over_sqrt_21, y0i_five_over_sqrt_21);
-    simde__m128i y0_p_5_7 = _mm_adds_epi16(y0r_five_over_sqrt_21, y0i_seven_over_sqrt_21);
-    simde__m128i y0_p_3_1 = _mm_adds_epi16(y0r_three_over_sqrt_21, y0i_one_over_sqrt_21);
-    simde__m128i y0_p_3_3 = _mm_adds_epi16(y0r_three_over_sqrt_21, y0i_three_over_sqrt_21);
-    simde__m128i y0_p_3_5 = _mm_adds_epi16(y0r_three_over_sqrt_21, y0i_five_over_sqrt_21);
-    simde__m128i y0_p_3_7 = _mm_adds_epi16(y0r_three_over_sqrt_21, y0i_seven_over_sqrt_21);
-    simde__m128i y0_p_1_1 = _mm_adds_epi16(y0r_one_over_sqrt_21, y0i_one_over_sqrt_21);
-    simde__m128i y0_p_1_3 = _mm_adds_epi16(y0r_one_over_sqrt_21, y0i_three_over_sqrt_21);
-    simde__m128i y0_p_1_5 = _mm_adds_epi16(y0r_one_over_sqrt_21, y0i_five_over_sqrt_21);
-    simde__m128i y0_p_1_7 = _mm_adds_epi16(y0r_one_over_sqrt_21, y0i_seven_over_sqrt_21);
-
-    simde__m128i y0_m_1_1 = _mm_subs_epi16(y0r_one_over_sqrt_21, y0i_one_over_sqrt_21);
-    simde__m128i y0_m_1_3 = _mm_subs_epi16(y0r_one_over_sqrt_21, y0i_three_over_sqrt_21);
-    simde__m128i y0_m_1_5 = _mm_subs_epi16(y0r_one_over_sqrt_21, y0i_five_over_sqrt_21);
-    simde__m128i y0_m_1_7 = _mm_subs_epi16(y0r_one_over_sqrt_21, y0i_seven_over_sqrt_21);
-    simde__m128i y0_m_3_1 = _mm_subs_epi16(y0r_three_over_sqrt_21, y0i_one_over_sqrt_21);
-    simde__m128i y0_m_3_3 = _mm_subs_epi16(y0r_three_over_sqrt_21, y0i_three_over_sqrt_21);
-    simde__m128i y0_m_3_5 = _mm_subs_epi16(y0r_three_over_sqrt_21, y0i_five_over_sqrt_21);
-    simde__m128i y0_m_3_7 = _mm_subs_epi16(y0r_three_over_sqrt_21, y0i_seven_over_sqrt_21);
-    simde__m128i y0_m_5_1 = _mm_subs_epi16(y0r_five_over_sqrt_21, y0i_one_over_sqrt_21);
-    simde__m128i y0_m_5_3 = _mm_subs_epi16(y0r_five_over_sqrt_21, y0i_three_over_sqrt_21);
-    simde__m128i y0_m_5_5 = _mm_subs_epi16(y0r_five_over_sqrt_21, y0i_five_over_sqrt_21);
-    simde__m128i y0_m_5_7 = _mm_subs_epi16(y0r_five_over_sqrt_21, y0i_seven_over_sqrt_21);
-    simde__m128i y0_m_7_1 = _mm_subs_epi16(y0r_seven_over_sqrt_21, y0i_one_over_sqrt_21);
-    simde__m128i y0_m_7_3 = _mm_subs_epi16(y0r_seven_over_sqrt_21, y0i_three_over_sqrt_21);
-    simde__m128i y0_m_7_5 = _mm_subs_epi16(y0r_seven_over_sqrt_21, y0i_five_over_sqrt_21);
-    simde__m128i y0_m_7_7 = _mm_subs_epi16(y0r_seven_over_sqrt_21, y0i_seven_over_sqrt_21);
+    xmm2 = simde_mm_shufflelo_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shufflehi_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shuffle_epi32(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shufflelo_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shufflehi_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shuffle_epi32(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    ch_mag_int  = simde_mm_unpacklo_epi64(xmm2,xmm3);
+
+    y0r_one_over_sqrt_21   = simde_mm_mulhi_epi16(y0r, ONE_OVER_SQRT_42);
+    y0r_three_over_sqrt_21 = simde_mm_mulhi_epi16(y0r, THREE_OVER_SQRT_42);
+    y0r_five_over_sqrt_21  = simde_mm_mulhi_epi16(y0r, FIVE_OVER_SQRT_42);
+    y0r_five_over_sqrt_21  = simde_mm_slli_epi16(y0r_five_over_sqrt_21, 1);
+    y0r_seven_over_sqrt_21 = simde_mm_mulhi_epi16(y0r, SEVEN_OVER_SQRT_42);
+    y0r_seven_over_sqrt_21 = simde_mm_slli_epi16(y0r_seven_over_sqrt_21, 2); // Q2.14
+
+    y0i_one_over_sqrt_21   = simde_mm_mulhi_epi16(y0i, ONE_OVER_SQRT_42);
+    y0i_three_over_sqrt_21 = simde_mm_mulhi_epi16(y0i, THREE_OVER_SQRT_42);
+    y0i_five_over_sqrt_21  = simde_mm_mulhi_epi16(y0i, FIVE_OVER_SQRT_42);
+    y0i_five_over_sqrt_21  = simde_mm_slli_epi16(y0i_five_over_sqrt_21, 1);
+    y0i_seven_over_sqrt_21 = simde_mm_mulhi_epi16(y0i, SEVEN_OVER_SQRT_42);
+    y0i_seven_over_sqrt_21 = simde_mm_slli_epi16(y0i_seven_over_sqrt_21, 2); // Q2.14
+
+    simde__m128i y0_p_7_1 = simde_mm_adds_epi16(y0r_seven_over_sqrt_21, y0i_one_over_sqrt_21);
+    simde__m128i y0_p_7_3 = simde_mm_adds_epi16(y0r_seven_over_sqrt_21, y0i_three_over_sqrt_21);
+    simde__m128i y0_p_7_5 = simde_mm_adds_epi16(y0r_seven_over_sqrt_21, y0i_five_over_sqrt_21);
+    simde__m128i y0_p_7_7 = simde_mm_adds_epi16(y0r_seven_over_sqrt_21, y0i_seven_over_sqrt_21);
+    simde__m128i y0_p_5_1 = simde_mm_adds_epi16(y0r_five_over_sqrt_21, y0i_one_over_sqrt_21);
+    simde__m128i y0_p_5_3 = simde_mm_adds_epi16(y0r_five_over_sqrt_21, y0i_three_over_sqrt_21);
+    simde__m128i y0_p_5_5 = simde_mm_adds_epi16(y0r_five_over_sqrt_21, y0i_five_over_sqrt_21);
+    simde__m128i y0_p_5_7 = simde_mm_adds_epi16(y0r_five_over_sqrt_21, y0i_seven_over_sqrt_21);
+    simde__m128i y0_p_3_1 = simde_mm_adds_epi16(y0r_three_over_sqrt_21, y0i_one_over_sqrt_21);
+    simde__m128i y0_p_3_3 = simde_mm_adds_epi16(y0r_three_over_sqrt_21, y0i_three_over_sqrt_21);
+    simde__m128i y0_p_3_5 = simde_mm_adds_epi16(y0r_three_over_sqrt_21, y0i_five_over_sqrt_21);
+    simde__m128i y0_p_3_7 = simde_mm_adds_epi16(y0r_three_over_sqrt_21, y0i_seven_over_sqrt_21);
+    simde__m128i y0_p_1_1 = simde_mm_adds_epi16(y0r_one_over_sqrt_21, y0i_one_over_sqrt_21);
+    simde__m128i y0_p_1_3 = simde_mm_adds_epi16(y0r_one_over_sqrt_21, y0i_three_over_sqrt_21);
+    simde__m128i y0_p_1_5 = simde_mm_adds_epi16(y0r_one_over_sqrt_21, y0i_five_over_sqrt_21);
+    simde__m128i y0_p_1_7 = simde_mm_adds_epi16(y0r_one_over_sqrt_21, y0i_seven_over_sqrt_21);
+
+    simde__m128i y0_m_1_1 = simde_mm_subs_epi16(y0r_one_over_sqrt_21, y0i_one_over_sqrt_21);
+    simde__m128i y0_m_1_3 = simde_mm_subs_epi16(y0r_one_over_sqrt_21, y0i_three_over_sqrt_21);
+    simde__m128i y0_m_1_5 = simde_mm_subs_epi16(y0r_one_over_sqrt_21, y0i_five_over_sqrt_21);
+    simde__m128i y0_m_1_7 = simde_mm_subs_epi16(y0r_one_over_sqrt_21, y0i_seven_over_sqrt_21);
+    simde__m128i y0_m_3_1 = simde_mm_subs_epi16(y0r_three_over_sqrt_21, y0i_one_over_sqrt_21);
+    simde__m128i y0_m_3_3 = simde_mm_subs_epi16(y0r_three_over_sqrt_21, y0i_three_over_sqrt_21);
+    simde__m128i y0_m_3_5 = simde_mm_subs_epi16(y0r_three_over_sqrt_21, y0i_five_over_sqrt_21);
+    simde__m128i y0_m_3_7 = simde_mm_subs_epi16(y0r_three_over_sqrt_21, y0i_seven_over_sqrt_21);
+    simde__m128i y0_m_5_1 = simde_mm_subs_epi16(y0r_five_over_sqrt_21, y0i_one_over_sqrt_21);
+    simde__m128i y0_m_5_3 = simde_mm_subs_epi16(y0r_five_over_sqrt_21, y0i_three_over_sqrt_21);
+    simde__m128i y0_m_5_5 = simde_mm_subs_epi16(y0r_five_over_sqrt_21, y0i_five_over_sqrt_21);
+    simde__m128i y0_m_5_7 = simde_mm_subs_epi16(y0r_five_over_sqrt_21, y0i_seven_over_sqrt_21);
+    simde__m128i y0_m_7_1 = simde_mm_subs_epi16(y0r_seven_over_sqrt_21, y0i_one_over_sqrt_21);
+    simde__m128i y0_m_7_3 = simde_mm_subs_epi16(y0r_seven_over_sqrt_21, y0i_three_over_sqrt_21);
+    simde__m128i y0_m_7_5 = simde_mm_subs_epi16(y0r_seven_over_sqrt_21, y0i_five_over_sqrt_21);
+    simde__m128i y0_m_7_7 = simde_mm_subs_epi16(y0r_seven_over_sqrt_21, y0i_seven_over_sqrt_21);
 
     // Detection of interference term
-    ch_mag_int_with_sigma2       = _mm_srai_epi16(ch_mag_int, 1); // *2
+    ch_mag_int_with_sigma2       = simde_mm_srai_epi16(ch_mag_int, 1); // *2
     two_ch_mag_int_with_sigma2   = ch_mag_int; // *4
-    three_ch_mag_int_with_sigma2 = _mm_adds_epi16(ch_mag_int_with_sigma2, two_ch_mag_int_with_sigma2); // *6
+    three_ch_mag_int_with_sigma2 = simde_mm_adds_epi16(ch_mag_int_with_sigma2, two_ch_mag_int_with_sigma2); // *6
     simde__m128i tmp_result, tmp_result2, tmp_result3, tmp_result4;
     interference_abs_64qam_epi16(psi_r_p7_p7, ch_mag_int_with_sigma2, two_ch_mag_int_with_sigma2, three_ch_mag_int_with_sigma2, a_r_p7_p7, ONE_OVER_SQRT_2_42, THREE_OVER_SQRT_2_42, FIVE_OVER_SQRT_2_42,
                                  SEVEN_OVER_SQRT_2_42);
@@ -7390,134 +7047,134 @@ void qam64_qam64(short *stream0_in,
     prodsum_psi_a_epi16(psi_r_m7_m7, a_r_m7_m7, psi_i_m7_m7, a_i_m7_m7, psi_a_m7_m7);
 
     // Multiply by sqrt(2)
-    psi_a_p7_p7 = _mm_mulhi_epi16(psi_a_p7_p7, ONE_OVER_SQRT_2);
-    psi_a_p7_p7 = _mm_slli_epi16(psi_a_p7_p7, 2);
-    psi_a_p7_p5 = _mm_mulhi_epi16(psi_a_p7_p5, ONE_OVER_SQRT_2);
-    psi_a_p7_p5 = _mm_slli_epi16(psi_a_p7_p5, 2);
-    psi_a_p7_p3 = _mm_mulhi_epi16(psi_a_p7_p3, ONE_OVER_SQRT_2);
-    psi_a_p7_p3 = _mm_slli_epi16(psi_a_p7_p3, 2);
-    psi_a_p7_p1 = _mm_mulhi_epi16(psi_a_p7_p1, ONE_OVER_SQRT_2);
-    psi_a_p7_p1 = _mm_slli_epi16(psi_a_p7_p1, 2);
-    psi_a_p7_m1 = _mm_mulhi_epi16(psi_a_p7_m1, ONE_OVER_SQRT_2);
-    psi_a_p7_m1 = _mm_slli_epi16(psi_a_p7_m1, 2);
-    psi_a_p7_m3 = _mm_mulhi_epi16(psi_a_p7_m3, ONE_OVER_SQRT_2);
-    psi_a_p7_m3 = _mm_slli_epi16(psi_a_p7_m3, 2);
-    psi_a_p7_m5 = _mm_mulhi_epi16(psi_a_p7_m5, ONE_OVER_SQRT_2);
-    psi_a_p7_m5 = _mm_slli_epi16(psi_a_p7_m5, 2);
-    psi_a_p7_m7 = _mm_mulhi_epi16(psi_a_p7_m7, ONE_OVER_SQRT_2);
-    psi_a_p7_m7 = _mm_slli_epi16(psi_a_p7_m7, 2);
-    psi_a_p5_p7 = _mm_mulhi_epi16(psi_a_p5_p7, ONE_OVER_SQRT_2);
-    psi_a_p5_p7 = _mm_slli_epi16(psi_a_p5_p7, 2);
-    psi_a_p5_p5 = _mm_mulhi_epi16(psi_a_p5_p5, ONE_OVER_SQRT_2);
-    psi_a_p5_p5 = _mm_slli_epi16(psi_a_p5_p5, 2);
-    psi_a_p5_p3 = _mm_mulhi_epi16(psi_a_p5_p3, ONE_OVER_SQRT_2);
-    psi_a_p5_p3 = _mm_slli_epi16(psi_a_p5_p3, 2);
-    psi_a_p5_p1 = _mm_mulhi_epi16(psi_a_p5_p1, ONE_OVER_SQRT_2);
-    psi_a_p5_p1 = _mm_slli_epi16(psi_a_p5_p1, 2);
-    psi_a_p5_m1 = _mm_mulhi_epi16(psi_a_p5_m1, ONE_OVER_SQRT_2);
-    psi_a_p5_m1 = _mm_slli_epi16(psi_a_p5_m1, 2);
-    psi_a_p5_m3 = _mm_mulhi_epi16(psi_a_p5_m3, ONE_OVER_SQRT_2);
-    psi_a_p5_m3 = _mm_slli_epi16(psi_a_p5_m3, 2);
-    psi_a_p5_m5 = _mm_mulhi_epi16(psi_a_p5_m5, ONE_OVER_SQRT_2);
-    psi_a_p5_m5 = _mm_slli_epi16(psi_a_p5_m5, 2);
-    psi_a_p5_m7 = _mm_mulhi_epi16(psi_a_p5_m7, ONE_OVER_SQRT_2);
-    psi_a_p5_m7 = _mm_slli_epi16(psi_a_p5_m7, 2);
-    psi_a_p3_p7 = _mm_mulhi_epi16(psi_a_p3_p7, ONE_OVER_SQRT_2);
-    psi_a_p3_p7 = _mm_slli_epi16(psi_a_p3_p7, 2);
-    psi_a_p3_p5 = _mm_mulhi_epi16(psi_a_p3_p5, ONE_OVER_SQRT_2);
-    psi_a_p3_p5 = _mm_slli_epi16(psi_a_p3_p5, 2);
-    psi_a_p3_p3 = _mm_mulhi_epi16(psi_a_p3_p3, ONE_OVER_SQRT_2);
-    psi_a_p3_p3 = _mm_slli_epi16(psi_a_p3_p3, 2);
-    psi_a_p3_p1 = _mm_mulhi_epi16(psi_a_p3_p1, ONE_OVER_SQRT_2);
-    psi_a_p3_p1 = _mm_slli_epi16(psi_a_p3_p1, 2);
-    psi_a_p3_m1 = _mm_mulhi_epi16(psi_a_p3_m1, ONE_OVER_SQRT_2);
-    psi_a_p3_m1 = _mm_slli_epi16(psi_a_p3_m1, 2);
-    psi_a_p3_m3 = _mm_mulhi_epi16(psi_a_p3_m3, ONE_OVER_SQRT_2);
-    psi_a_p3_m3 = _mm_slli_epi16(psi_a_p3_m3, 2);
-    psi_a_p3_m5 = _mm_mulhi_epi16(psi_a_p3_m5, ONE_OVER_SQRT_2);
-    psi_a_p3_m5 = _mm_slli_epi16(psi_a_p3_m5, 2);
-    psi_a_p3_m7 = _mm_mulhi_epi16(psi_a_p3_m7, ONE_OVER_SQRT_2);
-    psi_a_p3_m7 = _mm_slli_epi16(psi_a_p3_m7, 2);
-    psi_a_p1_p7 = _mm_mulhi_epi16(psi_a_p1_p7, ONE_OVER_SQRT_2);
-    psi_a_p1_p7 = _mm_slli_epi16(psi_a_p1_p7, 2);
-    psi_a_p1_p5 = _mm_mulhi_epi16(psi_a_p1_p5, ONE_OVER_SQRT_2);
-    psi_a_p1_p5 = _mm_slli_epi16(psi_a_p1_p5, 2);
-    psi_a_p1_p3 = _mm_mulhi_epi16(psi_a_p1_p3, ONE_OVER_SQRT_2);
-    psi_a_p1_p3 = _mm_slli_epi16(psi_a_p1_p3, 2);
-    psi_a_p1_p1 = _mm_mulhi_epi16(psi_a_p1_p1, ONE_OVER_SQRT_2);
-    psi_a_p1_p1 = _mm_slli_epi16(psi_a_p1_p1, 2);
-    psi_a_p1_m1 = _mm_mulhi_epi16(psi_a_p1_m1, ONE_OVER_SQRT_2);
-    psi_a_p1_m1 = _mm_slli_epi16(psi_a_p1_m1, 2);
-    psi_a_p1_m3 = _mm_mulhi_epi16(psi_a_p1_m3, ONE_OVER_SQRT_2);
-    psi_a_p1_m3 = _mm_slli_epi16(psi_a_p1_m3, 2);
-    psi_a_p1_m5 = _mm_mulhi_epi16(psi_a_p1_m5, ONE_OVER_SQRT_2);
-    psi_a_p1_m5 = _mm_slli_epi16(psi_a_p1_m5, 2);
-    psi_a_p1_m7 = _mm_mulhi_epi16(psi_a_p1_m7, ONE_OVER_SQRT_2);
-    psi_a_p1_m7 = _mm_slli_epi16(psi_a_p1_m7, 2);
-    psi_a_m1_p7 = _mm_mulhi_epi16(psi_a_m1_p7, ONE_OVER_SQRT_2);
-    psi_a_m1_p7 = _mm_slli_epi16(psi_a_m1_p7, 2);
-    psi_a_m1_p5 = _mm_mulhi_epi16(psi_a_m1_p5, ONE_OVER_SQRT_2);
-    psi_a_m1_p5 = _mm_slli_epi16(psi_a_m1_p5, 2);
-    psi_a_m1_p3 = _mm_mulhi_epi16(psi_a_m1_p3, ONE_OVER_SQRT_2);
-    psi_a_m1_p3 = _mm_slli_epi16(psi_a_m1_p3, 2);
-    psi_a_m1_p1 = _mm_mulhi_epi16(psi_a_m1_p1, ONE_OVER_SQRT_2);
-    psi_a_m1_p1 = _mm_slli_epi16(psi_a_m1_p1, 2);
-    psi_a_m1_m1 = _mm_mulhi_epi16(psi_a_m1_m1, ONE_OVER_SQRT_2);
-    psi_a_m1_m1 = _mm_slli_epi16(psi_a_m1_m1, 2);
-    psi_a_m1_m3 = _mm_mulhi_epi16(psi_a_m1_m3, ONE_OVER_SQRT_2);
-    psi_a_m1_m3 = _mm_slli_epi16(psi_a_m1_m3, 2);
-    psi_a_m1_m5 = _mm_mulhi_epi16(psi_a_m1_m5, ONE_OVER_SQRT_2);
-    psi_a_m1_m5 = _mm_slli_epi16(psi_a_m1_m5, 2);
-    psi_a_m1_m7 = _mm_mulhi_epi16(psi_a_m1_m7, ONE_OVER_SQRT_2);
-    psi_a_m1_m7 = _mm_slli_epi16(psi_a_m1_m7, 2);
-    psi_a_m3_p7 = _mm_mulhi_epi16(psi_a_m3_p7, ONE_OVER_SQRT_2);
-    psi_a_m3_p7 = _mm_slli_epi16(psi_a_m3_p7, 2);
-    psi_a_m3_p5 = _mm_mulhi_epi16(psi_a_m3_p5, ONE_OVER_SQRT_2);
-    psi_a_m3_p5 = _mm_slli_epi16(psi_a_m3_p5, 2);
-    psi_a_m3_p3 = _mm_mulhi_epi16(psi_a_m3_p3, ONE_OVER_SQRT_2);
-    psi_a_m3_p3 = _mm_slli_epi16(psi_a_m3_p3, 2);
-    psi_a_m3_p1 = _mm_mulhi_epi16(psi_a_m3_p1, ONE_OVER_SQRT_2);
-    psi_a_m3_p1 = _mm_slli_epi16(psi_a_m3_p1, 2);
-    psi_a_m3_m1 = _mm_mulhi_epi16(psi_a_m3_m1, ONE_OVER_SQRT_2);
-    psi_a_m3_m1 = _mm_slli_epi16(psi_a_m3_m1, 2);
-    psi_a_m3_m3 = _mm_mulhi_epi16(psi_a_m3_m3, ONE_OVER_SQRT_2);
-    psi_a_m3_m3 = _mm_slli_epi16(psi_a_m3_m3, 2);
-    psi_a_m3_m5 = _mm_mulhi_epi16(psi_a_m3_m5, ONE_OVER_SQRT_2);
-    psi_a_m3_m5 = _mm_slli_epi16(psi_a_m3_m5, 2);
-    psi_a_m3_m7 = _mm_mulhi_epi16(psi_a_m3_m7, ONE_OVER_SQRT_2);
-    psi_a_m3_m7 = _mm_slli_epi16(psi_a_m3_m7, 2);
-    psi_a_m5_p7 = _mm_mulhi_epi16(psi_a_m5_p7, ONE_OVER_SQRT_2);
-    psi_a_m5_p7 = _mm_slli_epi16(psi_a_m5_p7, 2);
-    psi_a_m5_p5 = _mm_mulhi_epi16(psi_a_m5_p5, ONE_OVER_SQRT_2);
-    psi_a_m5_p5 = _mm_slli_epi16(psi_a_m5_p5, 2);
-    psi_a_m5_p3 = _mm_mulhi_epi16(psi_a_m5_p3, ONE_OVER_SQRT_2);
-    psi_a_m5_p3 = _mm_slli_epi16(psi_a_m5_p3, 2);
-    psi_a_m5_p1 = _mm_mulhi_epi16(psi_a_m5_p1, ONE_OVER_SQRT_2);
-    psi_a_m5_p1 = _mm_slli_epi16(psi_a_m5_p1, 2);
-    psi_a_m5_m1 = _mm_mulhi_epi16(psi_a_m5_m1, ONE_OVER_SQRT_2);
-    psi_a_m5_m1 = _mm_slli_epi16(psi_a_m5_m1, 2);
-    psi_a_m5_m3 = _mm_mulhi_epi16(psi_a_m5_m3, ONE_OVER_SQRT_2);
-    psi_a_m5_m3 = _mm_slli_epi16(psi_a_m5_m3, 2);
-    psi_a_m5_m5 = _mm_mulhi_epi16(psi_a_m5_m5, ONE_OVER_SQRT_2);
-    psi_a_m5_m5 = _mm_slli_epi16(psi_a_m5_m5, 2);
-    psi_a_m5_m7 = _mm_mulhi_epi16(psi_a_m5_m7, ONE_OVER_SQRT_2);
-    psi_a_m5_m7 = _mm_slli_epi16(psi_a_m5_m7, 2);
-    psi_a_m7_p7 = _mm_mulhi_epi16(psi_a_m7_p7, ONE_OVER_SQRT_2);
-    psi_a_m7_p7 = _mm_slli_epi16(psi_a_m7_p7, 2);
-    psi_a_m7_p5 = _mm_mulhi_epi16(psi_a_m7_p5, ONE_OVER_SQRT_2);
-    psi_a_m7_p5 = _mm_slli_epi16(psi_a_m7_p5, 2);
-    psi_a_m7_p3 = _mm_mulhi_epi16(psi_a_m7_p3, ONE_OVER_SQRT_2);
-    psi_a_m7_p3 = _mm_slli_epi16(psi_a_m7_p3, 2);
-    psi_a_m7_p1 = _mm_mulhi_epi16(psi_a_m7_p1, ONE_OVER_SQRT_2);
-    psi_a_m7_p1 = _mm_slli_epi16(psi_a_m7_p1, 2);
-    psi_a_m7_m1 = _mm_mulhi_epi16(psi_a_m7_m1, ONE_OVER_SQRT_2);
-    psi_a_m7_m1 = _mm_slli_epi16(psi_a_m7_m1, 2);
-    psi_a_m7_m3 = _mm_mulhi_epi16(psi_a_m7_m3, ONE_OVER_SQRT_2);
-    psi_a_m7_m3 = _mm_slli_epi16(psi_a_m7_m3, 2);
-    psi_a_m7_m5 = _mm_mulhi_epi16(psi_a_m7_m5, ONE_OVER_SQRT_2);
-    psi_a_m7_m5 = _mm_slli_epi16(psi_a_m7_m5, 2);
-    psi_a_m7_m7 = _mm_mulhi_epi16(psi_a_m7_m7, ONE_OVER_SQRT_2);
-    psi_a_m7_m7 = _mm_slli_epi16(psi_a_m7_m7, 2);
+    psi_a_p7_p7 = simde_mm_mulhi_epi16(psi_a_p7_p7, ONE_OVER_SQRT_2);
+    psi_a_p7_p7 = simde_mm_slli_epi16(psi_a_p7_p7, 2);
+    psi_a_p7_p5 = simde_mm_mulhi_epi16(psi_a_p7_p5, ONE_OVER_SQRT_2);
+    psi_a_p7_p5 = simde_mm_slli_epi16(psi_a_p7_p5, 2);
+    psi_a_p7_p3 = simde_mm_mulhi_epi16(psi_a_p7_p3, ONE_OVER_SQRT_2);
+    psi_a_p7_p3 = simde_mm_slli_epi16(psi_a_p7_p3, 2);
+    psi_a_p7_p1 = simde_mm_mulhi_epi16(psi_a_p7_p1, ONE_OVER_SQRT_2);
+    psi_a_p7_p1 = simde_mm_slli_epi16(psi_a_p7_p1, 2);
+    psi_a_p7_m1 = simde_mm_mulhi_epi16(psi_a_p7_m1, ONE_OVER_SQRT_2);
+    psi_a_p7_m1 = simde_mm_slli_epi16(psi_a_p7_m1, 2);
+    psi_a_p7_m3 = simde_mm_mulhi_epi16(psi_a_p7_m3, ONE_OVER_SQRT_2);
+    psi_a_p7_m3 = simde_mm_slli_epi16(psi_a_p7_m3, 2);
+    psi_a_p7_m5 = simde_mm_mulhi_epi16(psi_a_p7_m5, ONE_OVER_SQRT_2);
+    psi_a_p7_m5 = simde_mm_slli_epi16(psi_a_p7_m5, 2);
+    psi_a_p7_m7 = simde_mm_mulhi_epi16(psi_a_p7_m7, ONE_OVER_SQRT_2);
+    psi_a_p7_m7 = simde_mm_slli_epi16(psi_a_p7_m7, 2);
+    psi_a_p5_p7 = simde_mm_mulhi_epi16(psi_a_p5_p7, ONE_OVER_SQRT_2);
+    psi_a_p5_p7 = simde_mm_slli_epi16(psi_a_p5_p7, 2);
+    psi_a_p5_p5 = simde_mm_mulhi_epi16(psi_a_p5_p5, ONE_OVER_SQRT_2);
+    psi_a_p5_p5 = simde_mm_slli_epi16(psi_a_p5_p5, 2);
+    psi_a_p5_p3 = simde_mm_mulhi_epi16(psi_a_p5_p3, ONE_OVER_SQRT_2);
+    psi_a_p5_p3 = simde_mm_slli_epi16(psi_a_p5_p3, 2);
+    psi_a_p5_p1 = simde_mm_mulhi_epi16(psi_a_p5_p1, ONE_OVER_SQRT_2);
+    psi_a_p5_p1 = simde_mm_slli_epi16(psi_a_p5_p1, 2);
+    psi_a_p5_m1 = simde_mm_mulhi_epi16(psi_a_p5_m1, ONE_OVER_SQRT_2);
+    psi_a_p5_m1 = simde_mm_slli_epi16(psi_a_p5_m1, 2);
+    psi_a_p5_m3 = simde_mm_mulhi_epi16(psi_a_p5_m3, ONE_OVER_SQRT_2);
+    psi_a_p5_m3 = simde_mm_slli_epi16(psi_a_p5_m3, 2);
+    psi_a_p5_m5 = simde_mm_mulhi_epi16(psi_a_p5_m5, ONE_OVER_SQRT_2);
+    psi_a_p5_m5 = simde_mm_slli_epi16(psi_a_p5_m5, 2);
+    psi_a_p5_m7 = simde_mm_mulhi_epi16(psi_a_p5_m7, ONE_OVER_SQRT_2);
+    psi_a_p5_m7 = simde_mm_slli_epi16(psi_a_p5_m7, 2);
+    psi_a_p3_p7 = simde_mm_mulhi_epi16(psi_a_p3_p7, ONE_OVER_SQRT_2);
+    psi_a_p3_p7 = simde_mm_slli_epi16(psi_a_p3_p7, 2);
+    psi_a_p3_p5 = simde_mm_mulhi_epi16(psi_a_p3_p5, ONE_OVER_SQRT_2);
+    psi_a_p3_p5 = simde_mm_slli_epi16(psi_a_p3_p5, 2);
+    psi_a_p3_p3 = simde_mm_mulhi_epi16(psi_a_p3_p3, ONE_OVER_SQRT_2);
+    psi_a_p3_p3 = simde_mm_slli_epi16(psi_a_p3_p3, 2);
+    psi_a_p3_p1 = simde_mm_mulhi_epi16(psi_a_p3_p1, ONE_OVER_SQRT_2);
+    psi_a_p3_p1 = simde_mm_slli_epi16(psi_a_p3_p1, 2);
+    psi_a_p3_m1 = simde_mm_mulhi_epi16(psi_a_p3_m1, ONE_OVER_SQRT_2);
+    psi_a_p3_m1 = simde_mm_slli_epi16(psi_a_p3_m1, 2);
+    psi_a_p3_m3 = simde_mm_mulhi_epi16(psi_a_p3_m3, ONE_OVER_SQRT_2);
+    psi_a_p3_m3 = simde_mm_slli_epi16(psi_a_p3_m3, 2);
+    psi_a_p3_m5 = simde_mm_mulhi_epi16(psi_a_p3_m5, ONE_OVER_SQRT_2);
+    psi_a_p3_m5 = simde_mm_slli_epi16(psi_a_p3_m5, 2);
+    psi_a_p3_m7 = simde_mm_mulhi_epi16(psi_a_p3_m7, ONE_OVER_SQRT_2);
+    psi_a_p3_m7 = simde_mm_slli_epi16(psi_a_p3_m7, 2);
+    psi_a_p1_p7 = simde_mm_mulhi_epi16(psi_a_p1_p7, ONE_OVER_SQRT_2);
+    psi_a_p1_p7 = simde_mm_slli_epi16(psi_a_p1_p7, 2);
+    psi_a_p1_p5 = simde_mm_mulhi_epi16(psi_a_p1_p5, ONE_OVER_SQRT_2);
+    psi_a_p1_p5 = simde_mm_slli_epi16(psi_a_p1_p5, 2);
+    psi_a_p1_p3 = simde_mm_mulhi_epi16(psi_a_p1_p3, ONE_OVER_SQRT_2);
+    psi_a_p1_p3 = simde_mm_slli_epi16(psi_a_p1_p3, 2);
+    psi_a_p1_p1 = simde_mm_mulhi_epi16(psi_a_p1_p1, ONE_OVER_SQRT_2);
+    psi_a_p1_p1 = simde_mm_slli_epi16(psi_a_p1_p1, 2);
+    psi_a_p1_m1 = simde_mm_mulhi_epi16(psi_a_p1_m1, ONE_OVER_SQRT_2);
+    psi_a_p1_m1 = simde_mm_slli_epi16(psi_a_p1_m1, 2);
+    psi_a_p1_m3 = simde_mm_mulhi_epi16(psi_a_p1_m3, ONE_OVER_SQRT_2);
+    psi_a_p1_m3 = simde_mm_slli_epi16(psi_a_p1_m3, 2);
+    psi_a_p1_m5 = simde_mm_mulhi_epi16(psi_a_p1_m5, ONE_OVER_SQRT_2);
+    psi_a_p1_m5 = simde_mm_slli_epi16(psi_a_p1_m5, 2);
+    psi_a_p1_m7 = simde_mm_mulhi_epi16(psi_a_p1_m7, ONE_OVER_SQRT_2);
+    psi_a_p1_m7 = simde_mm_slli_epi16(psi_a_p1_m7, 2);
+    psi_a_m1_p7 = simde_mm_mulhi_epi16(psi_a_m1_p7, ONE_OVER_SQRT_2);
+    psi_a_m1_p7 = simde_mm_slli_epi16(psi_a_m1_p7, 2);
+    psi_a_m1_p5 = simde_mm_mulhi_epi16(psi_a_m1_p5, ONE_OVER_SQRT_2);
+    psi_a_m1_p5 = simde_mm_slli_epi16(psi_a_m1_p5, 2);
+    psi_a_m1_p3 = simde_mm_mulhi_epi16(psi_a_m1_p3, ONE_OVER_SQRT_2);
+    psi_a_m1_p3 = simde_mm_slli_epi16(psi_a_m1_p3, 2);
+    psi_a_m1_p1 = simde_mm_mulhi_epi16(psi_a_m1_p1, ONE_OVER_SQRT_2);
+    psi_a_m1_p1 = simde_mm_slli_epi16(psi_a_m1_p1, 2);
+    psi_a_m1_m1 = simde_mm_mulhi_epi16(psi_a_m1_m1, ONE_OVER_SQRT_2);
+    psi_a_m1_m1 = simde_mm_slli_epi16(psi_a_m1_m1, 2);
+    psi_a_m1_m3 = simde_mm_mulhi_epi16(psi_a_m1_m3, ONE_OVER_SQRT_2);
+    psi_a_m1_m3 = simde_mm_slli_epi16(psi_a_m1_m3, 2);
+    psi_a_m1_m5 = simde_mm_mulhi_epi16(psi_a_m1_m5, ONE_OVER_SQRT_2);
+    psi_a_m1_m5 = simde_mm_slli_epi16(psi_a_m1_m5, 2);
+    psi_a_m1_m7 = simde_mm_mulhi_epi16(psi_a_m1_m7, ONE_OVER_SQRT_2);
+    psi_a_m1_m7 = simde_mm_slli_epi16(psi_a_m1_m7, 2);
+    psi_a_m3_p7 = simde_mm_mulhi_epi16(psi_a_m3_p7, ONE_OVER_SQRT_2);
+    psi_a_m3_p7 = simde_mm_slli_epi16(psi_a_m3_p7, 2);
+    psi_a_m3_p5 = simde_mm_mulhi_epi16(psi_a_m3_p5, ONE_OVER_SQRT_2);
+    psi_a_m3_p5 = simde_mm_slli_epi16(psi_a_m3_p5, 2);
+    psi_a_m3_p3 = simde_mm_mulhi_epi16(psi_a_m3_p3, ONE_OVER_SQRT_2);
+    psi_a_m3_p3 = simde_mm_slli_epi16(psi_a_m3_p3, 2);
+    psi_a_m3_p1 = simde_mm_mulhi_epi16(psi_a_m3_p1, ONE_OVER_SQRT_2);
+    psi_a_m3_p1 = simde_mm_slli_epi16(psi_a_m3_p1, 2);
+    psi_a_m3_m1 = simde_mm_mulhi_epi16(psi_a_m3_m1, ONE_OVER_SQRT_2);
+    psi_a_m3_m1 = simde_mm_slli_epi16(psi_a_m3_m1, 2);
+    psi_a_m3_m3 = simde_mm_mulhi_epi16(psi_a_m3_m3, ONE_OVER_SQRT_2);
+    psi_a_m3_m3 = simde_mm_slli_epi16(psi_a_m3_m3, 2);
+    psi_a_m3_m5 = simde_mm_mulhi_epi16(psi_a_m3_m5, ONE_OVER_SQRT_2);
+    psi_a_m3_m5 = simde_mm_slli_epi16(psi_a_m3_m5, 2);
+    psi_a_m3_m7 = simde_mm_mulhi_epi16(psi_a_m3_m7, ONE_OVER_SQRT_2);
+    psi_a_m3_m7 = simde_mm_slli_epi16(psi_a_m3_m7, 2);
+    psi_a_m5_p7 = simde_mm_mulhi_epi16(psi_a_m5_p7, ONE_OVER_SQRT_2);
+    psi_a_m5_p7 = simde_mm_slli_epi16(psi_a_m5_p7, 2);
+    psi_a_m5_p5 = simde_mm_mulhi_epi16(psi_a_m5_p5, ONE_OVER_SQRT_2);
+    psi_a_m5_p5 = simde_mm_slli_epi16(psi_a_m5_p5, 2);
+    psi_a_m5_p3 = simde_mm_mulhi_epi16(psi_a_m5_p3, ONE_OVER_SQRT_2);
+    psi_a_m5_p3 = simde_mm_slli_epi16(psi_a_m5_p3, 2);
+    psi_a_m5_p1 = simde_mm_mulhi_epi16(psi_a_m5_p1, ONE_OVER_SQRT_2);
+    psi_a_m5_p1 = simde_mm_slli_epi16(psi_a_m5_p1, 2);
+    psi_a_m5_m1 = simde_mm_mulhi_epi16(psi_a_m5_m1, ONE_OVER_SQRT_2);
+    psi_a_m5_m1 = simde_mm_slli_epi16(psi_a_m5_m1, 2);
+    psi_a_m5_m3 = simde_mm_mulhi_epi16(psi_a_m5_m3, ONE_OVER_SQRT_2);
+    psi_a_m5_m3 = simde_mm_slli_epi16(psi_a_m5_m3, 2);
+    psi_a_m5_m5 = simde_mm_mulhi_epi16(psi_a_m5_m5, ONE_OVER_SQRT_2);
+    psi_a_m5_m5 = simde_mm_slli_epi16(psi_a_m5_m5, 2);
+    psi_a_m5_m7 = simde_mm_mulhi_epi16(psi_a_m5_m7, ONE_OVER_SQRT_2);
+    psi_a_m5_m7 = simde_mm_slli_epi16(psi_a_m5_m7, 2);
+    psi_a_m7_p7 = simde_mm_mulhi_epi16(psi_a_m7_p7, ONE_OVER_SQRT_2);
+    psi_a_m7_p7 = simde_mm_slli_epi16(psi_a_m7_p7, 2);
+    psi_a_m7_p5 = simde_mm_mulhi_epi16(psi_a_m7_p5, ONE_OVER_SQRT_2);
+    psi_a_m7_p5 = simde_mm_slli_epi16(psi_a_m7_p5, 2);
+    psi_a_m7_p3 = simde_mm_mulhi_epi16(psi_a_m7_p3, ONE_OVER_SQRT_2);
+    psi_a_m7_p3 = simde_mm_slli_epi16(psi_a_m7_p3, 2);
+    psi_a_m7_p1 = simde_mm_mulhi_epi16(psi_a_m7_p1, ONE_OVER_SQRT_2);
+    psi_a_m7_p1 = simde_mm_slli_epi16(psi_a_m7_p1, 2);
+    psi_a_m7_m1 = simde_mm_mulhi_epi16(psi_a_m7_m1, ONE_OVER_SQRT_2);
+    psi_a_m7_m1 = simde_mm_slli_epi16(psi_a_m7_m1, 2);
+    psi_a_m7_m3 = simde_mm_mulhi_epi16(psi_a_m7_m3, ONE_OVER_SQRT_2);
+    psi_a_m7_m3 = simde_mm_slli_epi16(psi_a_m7_m3, 2);
+    psi_a_m7_m5 = simde_mm_mulhi_epi16(psi_a_m7_m5, ONE_OVER_SQRT_2);
+    psi_a_m7_m5 = simde_mm_slli_epi16(psi_a_m7_m5, 2);
+    psi_a_m7_m7 = simde_mm_mulhi_epi16(psi_a_m7_m7, ONE_OVER_SQRT_2);
+    psi_a_m7_m7 = simde_mm_slli_epi16(psi_a_m7_m7, 2);
 
     // Calculation of a group of two terms in the bit metric involving squares of interference
     square_a_64qam_epi16(a_r_p7_p7, a_i_p7_p7, ch_mag_int, SQRT_42_OVER_FOUR, a_sq_p7_p7);
@@ -7587,637 +7244,637 @@ void qam64_qam64(short *stream0_in,
 
     // Computing different multiples of ||h0||^2
     // x=1, y=1
-    ch_mag_2_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,ONE_OVER_FOUR_SQRT_42);
-    ch_mag_2_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_2_over_42_with_sigma2,1);
+    ch_mag_2_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,ONE_OVER_FOUR_SQRT_42);
+    ch_mag_2_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_2_over_42_with_sigma2,1);
     // x=1, y=3
-    ch_mag_10_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,FIVE_OVER_FOUR_SQRT_42);
-    ch_mag_10_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_10_over_42_with_sigma2,1);
+    ch_mag_10_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,FIVE_OVER_FOUR_SQRT_42);
+    ch_mag_10_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_10_over_42_with_sigma2,1);
     // x=1, x=5
-    ch_mag_26_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,THIRTEEN_OVER_FOUR_SQRT_42);
-    ch_mag_26_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_26_over_42_with_sigma2,1);
+    ch_mag_26_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,THIRTEEN_OVER_FOUR_SQRT_42);
+    ch_mag_26_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_26_over_42_with_sigma2,1);
     // x=1, y=7
-    ch_mag_50_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,TWENTYFIVE_OVER_FOUR_SQRT_42);
-    ch_mag_50_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_50_over_42_with_sigma2,1);
+    ch_mag_50_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,TWENTYFIVE_OVER_FOUR_SQRT_42);
+    ch_mag_50_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_50_over_42_with_sigma2,1);
     // x=3, y=3
-    ch_mag_18_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,NINE_OVER_FOUR_SQRT_42);
-    ch_mag_18_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_18_over_42_with_sigma2,1);
+    ch_mag_18_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,NINE_OVER_FOUR_SQRT_42);
+    ch_mag_18_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_18_over_42_with_sigma2,1);
     // x=3, y=5
-    ch_mag_34_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,SEVENTEEN_OVER_FOUR_SQRT_42);
-    ch_mag_34_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_34_over_42_with_sigma2,1);
+    ch_mag_34_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,SEVENTEEN_OVER_FOUR_SQRT_42);
+    ch_mag_34_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_34_over_42_with_sigma2,1);
     // x=3, y=7
-    ch_mag_58_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,TWENTYNINE_OVER_FOUR_SQRT_42);
-    ch_mag_58_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_58_over_42_with_sigma2,2);
+    ch_mag_58_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,TWENTYNINE_OVER_FOUR_SQRT_42);
+    ch_mag_58_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_58_over_42_with_sigma2,2);
     // x=5, y=5
-    ch_mag_50_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,TWENTYFIVE_OVER_FOUR_SQRT_42);
-    ch_mag_50_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_50_over_42_with_sigma2,1);
+    ch_mag_50_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,TWENTYFIVE_OVER_FOUR_SQRT_42);
+    ch_mag_50_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_50_over_42_with_sigma2,1);
     // x=5, y=7
-    ch_mag_74_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,THIRTYSEVEN_OVER_FOUR_SQRT_42);
-    ch_mag_74_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_74_over_42_with_sigma2,2);
+    ch_mag_74_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,THIRTYSEVEN_OVER_FOUR_SQRT_42);
+    ch_mag_74_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_74_over_42_with_sigma2,2);
     // x=7, y=7
-    ch_mag_98_over_42_with_sigma2 = _mm_mulhi_epi16(ch_mag_des,FORTYNINE_OVER_FOUR_SQRT_42);
-    ch_mag_98_over_42_with_sigma2 = _mm_slli_epi16(ch_mag_98_over_42_with_sigma2,2);
+    ch_mag_98_over_42_with_sigma2 = simde_mm_mulhi_epi16(ch_mag_des,FORTYNINE_OVER_FOUR_SQRT_42);
+    ch_mag_98_over_42_with_sigma2 = simde_mm_slli_epi16(ch_mag_98_over_42_with_sigma2,2);
 
     // Computing Metrics
-    xmm0 = _mm_subs_epi16(psi_a_p7_p7, a_sq_p7_p7);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_7_7);
-    simde__m128i bit_met_p7_p7 = _mm_subs_epi16(xmm1, ch_mag_98_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p7_p5, a_sq_p7_p5);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_7_5);
-    simde__m128i bit_met_p7_p5 = _mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p7_p3, a_sq_p7_p3);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_7_3);
-    simde__m128i bit_met_p7_p3 = _mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p7_p1, a_sq_p7_p1);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_7_1);
-    simde__m128i bit_met_p7_p1 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p7_m1, a_sq_p7_m1);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_7_1);
-    simde__m128i bit_met_p7_m1 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p7_m3, a_sq_p7_m3);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_7_3);
-    simde__m128i bit_met_p7_m3 = _mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p7_m5, a_sq_p7_m5);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_7_5);
-    simde__m128i bit_met_p7_m5 = _mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p7_m7, a_sq_p7_m7);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_7_7);
-    simde__m128i bit_met_p7_m7 = _mm_subs_epi16(xmm1, ch_mag_98_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p5_p7, a_sq_p5_p7);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_5_7);
-    simde__m128i bit_met_p5_p7 = _mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p5_p5, a_sq_p5_p5);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_5_5);
-    simde__m128i bit_met_p5_p5 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p5_p3, a_sq_p5_p3);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_5_3);
-    simde__m128i bit_met_p5_p3 = _mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p5_p1, a_sq_p5_p1);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_5_1);
-    simde__m128i bit_met_p5_p1 = _mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p5_m1, a_sq_p5_m1);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_5_1);
-    simde__m128i bit_met_p5_m1 = _mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p5_m3, a_sq_p5_m3);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_5_3);
-    simde__m128i bit_met_p5_m3 = _mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p5_m5, a_sq_p5_m5);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_5_5);
-    simde__m128i bit_met_p5_m5 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p5_m7, a_sq_p5_m7);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_5_7);
-    simde__m128i bit_met_p5_m7 = _mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p3_p7, a_sq_p3_p7);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_3_7);
-    simde__m128i bit_met_p3_p7 = _mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p3_p5, a_sq_p3_p5);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_3_5);
-    simde__m128i bit_met_p3_p5 = _mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p3_p3, a_sq_p3_p3);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_3_3);
-    simde__m128i bit_met_p3_p3 = _mm_subs_epi16(xmm1, ch_mag_18_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p3_p1, a_sq_p3_p1);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_3_1);
-    simde__m128i bit_met_p3_p1 = _mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p3_m1, a_sq_p3_m1);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_3_1);
-    simde__m128i bit_met_p3_m1 = _mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p3_m3, a_sq_p3_m3);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_3_3);
-    simde__m128i bit_met_p3_m3 = _mm_subs_epi16(xmm1, ch_mag_18_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p3_m5, a_sq_p3_m5);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_3_5);
-    simde__m128i bit_met_p3_m5 = _mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p3_m7, a_sq_p3_m7);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_3_7);
-    simde__m128i bit_met_p3_m7 = _mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p1_p7, a_sq_p1_p7);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_1_7);
-    simde__m128i bit_met_p1_p7 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p1_p5, a_sq_p1_p5);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_1_5);
-    simde__m128i bit_met_p1_p5 = _mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p1_p3, a_sq_p1_p3);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_1_3);
-    simde__m128i bit_met_p1_p3 = _mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p1_p1, a_sq_p1_p1);
-    xmm1 = _mm_adds_epi16(xmm0, y0_p_1_1);
-    simde__m128i bit_met_p1_p1 = _mm_subs_epi16(xmm1, ch_mag_2_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p1_m1, a_sq_p1_m1);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_1_1);
-    simde__m128i bit_met_p1_m1 = _mm_subs_epi16(xmm1, ch_mag_2_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p1_m3, a_sq_p1_m3);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_1_3);
-    simde__m128i bit_met_p1_m3 = _mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p1_m5, a_sq_p1_m5);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_1_5);
-    simde__m128i bit_met_p1_m5 = _mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_p1_m7, a_sq_p1_m7);
-    xmm1 = _mm_adds_epi16(xmm0, y0_m_1_7);
-    simde__m128i bit_met_p1_m7 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-
-    xmm0 = _mm_subs_epi16(psi_a_m1_p7, a_sq_m1_p7);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_1_7);
-    simde__m128i bit_met_m1_p7 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m1_p5, a_sq_m1_p5);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_1_5);
-    simde__m128i bit_met_m1_p5 = _mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m1_p3, a_sq_m1_p3);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_1_3);
-    simde__m128i bit_met_m1_p3 = _mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m1_p1, a_sq_m1_p1);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_1_1);
-    simde__m128i bit_met_m1_p1 = _mm_subs_epi16(xmm1, ch_mag_2_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m1_m1, a_sq_m1_m1);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_1_1);
-    simde__m128i bit_met_m1_m1 = _mm_subs_epi16(xmm1, ch_mag_2_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m1_m3, a_sq_m1_m3);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_1_3);
-    simde__m128i bit_met_m1_m3 = _mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m1_m5, a_sq_m1_m5);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_1_5);
-    simde__m128i bit_met_m1_m5 = _mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m1_m7, a_sq_m1_m7);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_1_7);
-    simde__m128i bit_met_m1_m7 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m3_p7, a_sq_m3_p7);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_3_7);
-    simde__m128i bit_met_m3_p7 = _mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m3_p5, a_sq_m3_p5);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_3_5);
-    simde__m128i bit_met_m3_p5 = _mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m3_p3, a_sq_m3_p3);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_3_3);
-    simde__m128i bit_met_m3_p3 = _mm_subs_epi16(xmm1, ch_mag_18_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m3_p1, a_sq_m3_p1);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_3_1);
-    simde__m128i bit_met_m3_p1 = _mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m3_m1, a_sq_m3_m1);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_3_1);
-    simde__m128i bit_met_m3_m1 = _mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m3_m3, a_sq_m3_m3);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_3_3);
-    simde__m128i bit_met_m3_m3 = _mm_subs_epi16(xmm1, ch_mag_18_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m3_m5, a_sq_m3_m5);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_3_5);
-    simde__m128i bit_met_m3_m5 = _mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m3_m7, a_sq_m3_m7);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_3_7);
-    simde__m128i bit_met_m3_m7 = _mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m5_p7, a_sq_m5_p7);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_5_7);
-    simde__m128i bit_met_m5_p7 = _mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m5_p5, a_sq_m5_p5);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_5_5);
-    simde__m128i bit_met_m5_p5 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m5_p3, a_sq_m5_p3);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_5_3);
-    simde__m128i bit_met_m5_p3 = _mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m5_p1, a_sq_m5_p1);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_5_1);
-    simde__m128i bit_met_m5_p1 = _mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m5_m1, a_sq_m5_m1);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_5_1);
-    simde__m128i bit_met_m5_m1 = _mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m5_m3, a_sq_m5_m3);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_5_3);
-    simde__m128i bit_met_m5_m3 = _mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m5_m5, a_sq_m5_m5);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_5_5);
-    simde__m128i bit_met_m5_m5 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m5_m7, a_sq_m5_m7);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_5_7);
-    simde__m128i bit_met_m5_m7 = _mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m7_p7, a_sq_m7_p7);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_7_7);
-    simde__m128i bit_met_m7_p7 = _mm_subs_epi16(xmm1, ch_mag_98_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m7_p5, a_sq_m7_p5);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_7_5);
-    simde__m128i bit_met_m7_p5 = _mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m7_p3, a_sq_m7_p3);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_7_3);
-    simde__m128i bit_met_m7_p3 = _mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m7_p1, a_sq_m7_p1);
-    xmm1 = _mm_subs_epi16(xmm0, y0_m_7_1);
-    simde__m128i bit_met_m7_p1 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m7_m1, a_sq_m7_m1);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_7_1);
-    simde__m128i bit_met_m7_m1 = _mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m7_m3, a_sq_m7_m3);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_7_3);
-    simde__m128i bit_met_m7_m3 = _mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m7_m5, a_sq_m7_m5);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_7_5);
-    simde__m128i bit_met_m7_m5 = _mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
-    xmm0 = _mm_subs_epi16(psi_a_m7_m7, a_sq_m7_m7);
-    xmm1 = _mm_subs_epi16(xmm0, y0_p_7_7);
-    simde__m128i bit_met_m7_m7 = _mm_subs_epi16(xmm1, ch_mag_98_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p7_p7, a_sq_p7_p7);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_7_7);
+    simde__m128i bit_met_p7_p7 = simde_mm_subs_epi16(xmm1, ch_mag_98_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p7_p5, a_sq_p7_p5);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_7_5);
+    simde__m128i bit_met_p7_p5 = simde_mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p7_p3, a_sq_p7_p3);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_7_3);
+    simde__m128i bit_met_p7_p3 = simde_mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p7_p1, a_sq_p7_p1);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_7_1);
+    simde__m128i bit_met_p7_p1 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p7_m1, a_sq_p7_m1);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_7_1);
+    simde__m128i bit_met_p7_m1 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p7_m3, a_sq_p7_m3);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_7_3);
+    simde__m128i bit_met_p7_m3 = simde_mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p7_m5, a_sq_p7_m5);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_7_5);
+    simde__m128i bit_met_p7_m5 = simde_mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p7_m7, a_sq_p7_m7);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_7_7);
+    simde__m128i bit_met_p7_m7 = simde_mm_subs_epi16(xmm1, ch_mag_98_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p5_p7, a_sq_p5_p7);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_5_7);
+    simde__m128i bit_met_p5_p7 = simde_mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p5_p5, a_sq_p5_p5);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_5_5);
+    simde__m128i bit_met_p5_p5 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p5_p3, a_sq_p5_p3);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_5_3);
+    simde__m128i bit_met_p5_p3 = simde_mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p5_p1, a_sq_p5_p1);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_5_1);
+    simde__m128i bit_met_p5_p1 = simde_mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p5_m1, a_sq_p5_m1);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_5_1);
+    simde__m128i bit_met_p5_m1 = simde_mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p5_m3, a_sq_p5_m3);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_5_3);
+    simde__m128i bit_met_p5_m3 = simde_mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p5_m5, a_sq_p5_m5);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_5_5);
+    simde__m128i bit_met_p5_m5 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p5_m7, a_sq_p5_m7);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_5_7);
+    simde__m128i bit_met_p5_m7 = simde_mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p3_p7, a_sq_p3_p7);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_3_7);
+    simde__m128i bit_met_p3_p7 = simde_mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p3_p5, a_sq_p3_p5);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_3_5);
+    simde__m128i bit_met_p3_p5 = simde_mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p3_p3, a_sq_p3_p3);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_3_3);
+    simde__m128i bit_met_p3_p3 = simde_mm_subs_epi16(xmm1, ch_mag_18_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p3_p1, a_sq_p3_p1);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_3_1);
+    simde__m128i bit_met_p3_p1 = simde_mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p3_m1, a_sq_p3_m1);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_3_1);
+    simde__m128i bit_met_p3_m1 = simde_mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p3_m3, a_sq_p3_m3);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_3_3);
+    simde__m128i bit_met_p3_m3 = simde_mm_subs_epi16(xmm1, ch_mag_18_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p3_m5, a_sq_p3_m5);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_3_5);
+    simde__m128i bit_met_p3_m5 = simde_mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p3_m7, a_sq_p3_m7);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_3_7);
+    simde__m128i bit_met_p3_m7 = simde_mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p1_p7, a_sq_p1_p7);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_1_7);
+    simde__m128i bit_met_p1_p7 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p1_p5, a_sq_p1_p5);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_1_5);
+    simde__m128i bit_met_p1_p5 = simde_mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p1_p3, a_sq_p1_p3);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_1_3);
+    simde__m128i bit_met_p1_p3 = simde_mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p1_p1, a_sq_p1_p1);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_p_1_1);
+    simde__m128i bit_met_p1_p1 = simde_mm_subs_epi16(xmm1, ch_mag_2_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p1_m1, a_sq_p1_m1);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_1_1);
+    simde__m128i bit_met_p1_m1 = simde_mm_subs_epi16(xmm1, ch_mag_2_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p1_m3, a_sq_p1_m3);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_1_3);
+    simde__m128i bit_met_p1_m3 = simde_mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p1_m5, a_sq_p1_m5);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_1_5);
+    simde__m128i bit_met_p1_m5 = simde_mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_p1_m7, a_sq_p1_m7);
+    xmm1 = simde_mm_adds_epi16(xmm0, y0_m_1_7);
+    simde__m128i bit_met_p1_m7 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+
+    xmm0 = simde_mm_subs_epi16(psi_a_m1_p7, a_sq_m1_p7);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_1_7);
+    simde__m128i bit_met_m1_p7 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m1_p5, a_sq_m1_p5);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_1_5);
+    simde__m128i bit_met_m1_p5 = simde_mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m1_p3, a_sq_m1_p3);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_1_3);
+    simde__m128i bit_met_m1_p3 = simde_mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m1_p1, a_sq_m1_p1);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_1_1);
+    simde__m128i bit_met_m1_p1 = simde_mm_subs_epi16(xmm1, ch_mag_2_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m1_m1, a_sq_m1_m1);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_1_1);
+    simde__m128i bit_met_m1_m1 = simde_mm_subs_epi16(xmm1, ch_mag_2_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m1_m3, a_sq_m1_m3);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_1_3);
+    simde__m128i bit_met_m1_m3 = simde_mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m1_m5, a_sq_m1_m5);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_1_5);
+    simde__m128i bit_met_m1_m5 = simde_mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m1_m7, a_sq_m1_m7);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_1_7);
+    simde__m128i bit_met_m1_m7 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m3_p7, a_sq_m3_p7);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_3_7);
+    simde__m128i bit_met_m3_p7 = simde_mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m3_p5, a_sq_m3_p5);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_3_5);
+    simde__m128i bit_met_m3_p5 = simde_mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m3_p3, a_sq_m3_p3);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_3_3);
+    simde__m128i bit_met_m3_p3 = simde_mm_subs_epi16(xmm1, ch_mag_18_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m3_p1, a_sq_m3_p1);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_3_1);
+    simde__m128i bit_met_m3_p1 = simde_mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m3_m1, a_sq_m3_m1);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_3_1);
+    simde__m128i bit_met_m3_m1 = simde_mm_subs_epi16(xmm1, ch_mag_10_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m3_m3, a_sq_m3_m3);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_3_3);
+    simde__m128i bit_met_m3_m3 = simde_mm_subs_epi16(xmm1, ch_mag_18_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m3_m5, a_sq_m3_m5);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_3_5);
+    simde__m128i bit_met_m3_m5 = simde_mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m3_m7, a_sq_m3_m7);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_3_7);
+    simde__m128i bit_met_m3_m7 = simde_mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m5_p7, a_sq_m5_p7);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_5_7);
+    simde__m128i bit_met_m5_p7 = simde_mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m5_p5, a_sq_m5_p5);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_5_5);
+    simde__m128i bit_met_m5_p5 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m5_p3, a_sq_m5_p3);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_5_3);
+    simde__m128i bit_met_m5_p3 = simde_mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m5_p1, a_sq_m5_p1);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_5_1);
+    simde__m128i bit_met_m5_p1 = simde_mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m5_m1, a_sq_m5_m1);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_5_1);
+    simde__m128i bit_met_m5_m1 = simde_mm_subs_epi16(xmm1, ch_mag_26_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m5_m3, a_sq_m5_m3);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_5_3);
+    simde__m128i bit_met_m5_m3 = simde_mm_subs_epi16(xmm1, ch_mag_34_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m5_m5, a_sq_m5_m5);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_5_5);
+    simde__m128i bit_met_m5_m5 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m5_m7, a_sq_m5_m7);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_5_7);
+    simde__m128i bit_met_m5_m7 = simde_mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m7_p7, a_sq_m7_p7);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_7_7);
+    simde__m128i bit_met_m7_p7 = simde_mm_subs_epi16(xmm1, ch_mag_98_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m7_p5, a_sq_m7_p5);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_7_5);
+    simde__m128i bit_met_m7_p5 = simde_mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m7_p3, a_sq_m7_p3);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_7_3);
+    simde__m128i bit_met_m7_p3 = simde_mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m7_p1, a_sq_m7_p1);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_m_7_1);
+    simde__m128i bit_met_m7_p1 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m7_m1, a_sq_m7_m1);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_7_1);
+    simde__m128i bit_met_m7_m1 = simde_mm_subs_epi16(xmm1, ch_mag_50_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m7_m3, a_sq_m7_m3);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_7_3);
+    simde__m128i bit_met_m7_m3 = simde_mm_subs_epi16(xmm1, ch_mag_58_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m7_m5, a_sq_m7_m5);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_7_5);
+    simde__m128i bit_met_m7_m5 = simde_mm_subs_epi16(xmm1, ch_mag_74_over_42_with_sigma2);
+    xmm0 = simde_mm_subs_epi16(psi_a_m7_m7, a_sq_m7_m7);
+    xmm1 = simde_mm_subs_epi16(xmm0, y0_p_7_7);
+    simde__m128i bit_met_m7_m7 = simde_mm_subs_epi16(xmm1, ch_mag_98_over_42_with_sigma2);
 
     // Detection for 1st bit (LTE mapping)
     // bit = 1
-    xmm0 = _mm_max_epi16(bit_met_m7_p7, bit_met_m7_p5);
-    xmm1 = _mm_max_epi16(bit_met_m7_p3, bit_met_m7_p1);
-    xmm2 = _mm_max_epi16(bit_met_m7_m1, bit_met_m7_m3);
-    xmm3 = _mm_max_epi16(bit_met_m7_m5, bit_met_m7_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    simde__m128i logmax_den_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_m5_p7, bit_met_m5_p5);
-    xmm1 = _mm_max_epi16(bit_met_m5_p3, bit_met_m5_p1);
-    xmm2 = _mm_max_epi16(bit_met_m5_m1, bit_met_m5_m3);
-    xmm3 = _mm_max_epi16(bit_met_m5_m5, bit_met_m5_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_m3_p7, bit_met_m3_p5);
-    xmm1 = _mm_max_epi16(bit_met_m3_p3, bit_met_m3_p1);
-    xmm2 = _mm_max_epi16(bit_met_m3_m1, bit_met_m3_m3);
-    xmm3 = _mm_max_epi16(bit_met_m3_m5, bit_met_m3_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_m1_p7, bit_met_m1_p5);
-    xmm1 = _mm_max_epi16(bit_met_m1_p3, bit_met_m1_p1);
-    xmm2 = _mm_max_epi16(bit_met_m1_m1, bit_met_m1_m3);
-    xmm3 = _mm_max_epi16(bit_met_m1_m5, bit_met_m1_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m7_p7, bit_met_m7_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_m7_p3, bit_met_m7_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_m7_m1, bit_met_m7_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_m7_m5, bit_met_m7_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_den_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m5_p7, bit_met_m5_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_m5_p3, bit_met_m5_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_m5_m1, bit_met_m5_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m5, bit_met_m5_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m3_p7, bit_met_m3_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_m3_p3, bit_met_m3_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_m3_m1, bit_met_m3_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_m3_m5, bit_met_m3_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m1_p7, bit_met_m1_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_m1_p3, bit_met_m1_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m1, bit_met_m1_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_m1_m5, bit_met_m1_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
 
     // bit = 0
-    xmm0 = _mm_max_epi16(bit_met_p7_p7, bit_met_p7_p5);
-    xmm1 = _mm_max_epi16(bit_met_p7_p3, bit_met_p7_p1);
-    xmm2 = _mm_max_epi16(bit_met_p7_m1, bit_met_p7_m3);
-    xmm3 = _mm_max_epi16(bit_met_p7_m5, bit_met_p7_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    simde__m128i logmax_num_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p5_p7, bit_met_p5_p5);
-    xmm1 = _mm_max_epi16(bit_met_p5_p3, bit_met_p5_p1);
-    xmm2 = _mm_max_epi16(bit_met_p5_m1, bit_met_p5_m3);
-    xmm3 = _mm_max_epi16(bit_met_p5_m5, bit_met_p5_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p3_p7, bit_met_p3_p5);
-    xmm1 = _mm_max_epi16(bit_met_p3_p3, bit_met_p3_p1);
-    xmm2 = _mm_max_epi16(bit_met_p3_m1, bit_met_p3_m3);
-    xmm3 = _mm_max_epi16(bit_met_p3_m5, bit_met_p3_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p1_p7, bit_met_p1_p5);
-    xmm1 = _mm_max_epi16(bit_met_p1_p3, bit_met_p1_p1);
-    xmm2 = _mm_max_epi16(bit_met_p1_m1, bit_met_p1_m3);
-    xmm3 = _mm_max_epi16(bit_met_p1_m5, bit_met_p1_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-
-    y0r = _mm_subs_epi16(logmax_num_re0, logmax_den_re0);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p7, bit_met_p7_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_p7_p3, bit_met_p7_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_p7_m1, bit_met_p7_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_p7_m5, bit_met_p7_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    simde__m128i logmax_num_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p5_p7, bit_met_p5_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_p5_p3, bit_met_p5_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_p5_m1, bit_met_p5_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_p5_m5, bit_met_p5_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p3_p7, bit_met_p3_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p3, bit_met_p3_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_p3_m1, bit_met_p3_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_p3_m5, bit_met_p3_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p1_p7, bit_met_p1_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_p1_p3, bit_met_p1_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_p1_m1, bit_met_p1_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_p1_m5, bit_met_p1_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+
+    y0r = simde_mm_subs_epi16(logmax_num_re0, logmax_den_re0);
 
     // Detection for 2nd bit (LTE mapping)
     // bit = 1
-    xmm0 = _mm_max_epi16(bit_met_p7_m1, bit_met_p5_m1);
-    xmm1 = _mm_max_epi16(bit_met_p3_m1, bit_met_p1_m1);
-    xmm2 = _mm_max_epi16(bit_met_m1_m1, bit_met_m3_m1);
-    xmm3 = _mm_max_epi16(bit_met_m5_m1, bit_met_m7_m1);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m3, bit_met_p5_m3);
-    xmm1 = _mm_max_epi16(bit_met_p3_m3, bit_met_p1_m3);
-    xmm2 = _mm_max_epi16(bit_met_m1_m3, bit_met_m3_m3);
-    xmm3 = _mm_max_epi16(bit_met_m5_m3, bit_met_m7_m3);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m5, bit_met_p5_m5);
-    xmm1 = _mm_max_epi16(bit_met_p3_m5, bit_met_p1_m5);
-    xmm2 = _mm_max_epi16(bit_met_m1_m5, bit_met_m3_m5);
-    xmm3 = _mm_max_epi16(bit_met_m5_m5, bit_met_m7_m5);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m7, bit_met_p5_m7);
-    xmm1 = _mm_max_epi16(bit_met_p3_m7, bit_met_p1_m7);
-    xmm2 = _mm_max_epi16(bit_met_m1_m7, bit_met_m3_m7);
-    xmm3 = _mm_max_epi16(bit_met_m5_m7, bit_met_m7_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m1, bit_met_p5_m1);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m1, bit_met_p1_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m1, bit_met_m3_m1);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m1, bit_met_m7_m1);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m3, bit_met_p5_m3);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m3, bit_met_p1_m3);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m3, bit_met_m3_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m3, bit_met_m7_m3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m5, bit_met_p5_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m5, bit_met_p1_m5);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m5, bit_met_m3_m5);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m5, bit_met_m7_m5);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m7, bit_met_p5_m7);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m7, bit_met_p1_m7);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m7, bit_met_m3_m7);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m7, bit_met_m7_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
 
     // bit = 0
-    xmm0 = _mm_max_epi16(bit_met_p7_p1, bit_met_p5_p1);
-    xmm1 = _mm_max_epi16(bit_met_p3_p1, bit_met_p1_p1);
-    xmm2 = _mm_max_epi16(bit_met_m1_p1, bit_met_m3_p1);
-    xmm3 = _mm_max_epi16(bit_met_m5_p1, bit_met_m7_p1);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_p3, bit_met_p5_p3);
-    xmm1 = _mm_max_epi16(bit_met_p3_p3, bit_met_p1_p3);
-    xmm2 = _mm_max_epi16(bit_met_m1_p3, bit_met_m3_p3);
-    xmm3 = _mm_max_epi16(bit_met_m5_p3, bit_met_m7_p3);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_p5, bit_met_p5_p5);
-    xmm1 = _mm_max_epi16(bit_met_p3_p5, bit_met_p1_p5);
-    xmm2 = _mm_max_epi16(bit_met_m1_p5, bit_met_m3_p5);
-    xmm3 = _mm_max_epi16(bit_met_m5_p5, bit_met_m7_p5);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_p7, bit_met_p5_p7);
-    xmm1 = _mm_max_epi16(bit_met_p3_p7, bit_met_p1_p7);
-    xmm2 = _mm_max_epi16(bit_met_m1_p7, bit_met_m3_p7);
-    xmm3 = _mm_max_epi16(bit_met_m5_p7, bit_met_m7_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-
-    y1r = _mm_subs_epi16(logmax_num_re0, logmax_den_re0);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p1, bit_met_p5_p1);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p1, bit_met_p1_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p1, bit_met_m3_p1);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p1, bit_met_m7_p1);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p3, bit_met_p5_p3);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p3, bit_met_p1_p3);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p3, bit_met_m3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p3, bit_met_m7_p3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p5, bit_met_p5_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p5, bit_met_p1_p5);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p5, bit_met_m3_p5);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p5, bit_met_m7_p5);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p7, bit_met_p5_p7);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p7, bit_met_p1_p7);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p7, bit_met_m3_p7);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p7, bit_met_m7_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+
+    y1r = simde_mm_subs_epi16(logmax_num_re0, logmax_den_re0);
 
     // Detection for 3rd bit (LTE mapping)
-    xmm0 = _mm_max_epi16(bit_met_m7_m7, bit_met_m7_m5);
-    xmm1 = _mm_max_epi16(bit_met_m7_m3, bit_met_m7_m1);
-    xmm2 = _mm_max_epi16(bit_met_m7_p1, bit_met_m7_p3);
-    xmm3 = _mm_max_epi16(bit_met_m7_p5, bit_met_m7_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_m5_m7, bit_met_m5_m5);
-    xmm1 = _mm_max_epi16(bit_met_m5_m3, bit_met_m5_m1);
-    xmm2 = _mm_max_epi16(bit_met_m5_p1, bit_met_m5_p3);
-    xmm3 = _mm_max_epi16(bit_met_m5_p5, bit_met_m5_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p5_m7, bit_met_p5_m5);
-    xmm1 = _mm_max_epi16(bit_met_p5_m3, bit_met_p5_m1);
-    xmm2 = _mm_max_epi16(bit_met_p5_p1, bit_met_p5_p3);
-    xmm3 = _mm_max_epi16(bit_met_p5_p5, bit_met_p5_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m7, bit_met_p7_m5);
-    xmm1 = _mm_max_epi16(bit_met_p7_m3, bit_met_p7_m1);
-    xmm2 = _mm_max_epi16(bit_met_p7_p1, bit_met_p7_p3);
-    xmm3 = _mm_max_epi16(bit_met_p7_p5, bit_met_p7_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-
-    xmm0 = _mm_max_epi16(bit_met_m3_m7, bit_met_m3_m5);
-    xmm1 = _mm_max_epi16(bit_met_m3_m3, bit_met_m3_m1);
-    xmm2 = _mm_max_epi16(bit_met_m3_p1, bit_met_m3_p3);
-    xmm3 = _mm_max_epi16(bit_met_m3_p5, bit_met_m3_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_m1_m7, bit_met_m1_m5);
-    xmm1 = _mm_max_epi16(bit_met_m1_m3, bit_met_m1_m1);
-    xmm2 = _mm_max_epi16(bit_met_m1_p1, bit_met_m1_p3);
-    xmm3 = _mm_max_epi16(bit_met_m1_p5, bit_met_m1_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p1_m7, bit_met_p1_m5);
-    xmm1 = _mm_max_epi16(bit_met_p1_m3, bit_met_p1_m1);
-    xmm2 = _mm_max_epi16(bit_met_p1_p1, bit_met_p1_p3);
-    xmm3 = _mm_max_epi16(bit_met_p1_p5, bit_met_p1_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p3_m7, bit_met_p3_m5);
-    xmm1 = _mm_max_epi16(bit_met_p3_m3, bit_met_p3_m1);
-    xmm2 = _mm_max_epi16(bit_met_p3_p1, bit_met_p3_p3);
-    xmm3 = _mm_max_epi16(bit_met_p3_p5, bit_met_p3_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-
-    simde__m128i y2r = _mm_subs_epi16(logmax_num_re0, logmax_den_re0);
+    xmm0 = simde_mm_max_epi16(bit_met_m7_m7, bit_met_m7_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_m7_m3, bit_met_m7_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m7_p1, bit_met_m7_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m7_p5, bit_met_m7_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m5_m7, bit_met_m5_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_m5_m3, bit_met_m5_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m5_p1, bit_met_m5_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p5, bit_met_m5_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p5_m7, bit_met_p5_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p5_m3, bit_met_p5_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p5_p1, bit_met_p5_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p5_p5, bit_met_p5_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m7, bit_met_p7_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p7_m3, bit_met_p7_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p7_p1, bit_met_p7_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p7_p5, bit_met_p7_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+
+    xmm0 = simde_mm_max_epi16(bit_met_m3_m7, bit_met_m3_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_m3_m3, bit_met_m3_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m3_p1, bit_met_m3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m3_p5, bit_met_m3_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m1_m7, bit_met_m1_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_m1_m3, bit_met_m1_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p1, bit_met_m1_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m1_p5, bit_met_m1_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p1_m7, bit_met_p1_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p1_m3, bit_met_p1_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p1_p1, bit_met_p1_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p1_p5, bit_met_p1_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p3_m7, bit_met_p3_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m3, bit_met_p3_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p3_p1, bit_met_p3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p3_p5, bit_met_p3_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+
+    simde__m128i y2r = simde_mm_subs_epi16(logmax_num_re0, logmax_den_re0);
 
     // Detection for 4th bit (LTE mapping)
-    xmm0 = _mm_max_epi16(bit_met_p7_p7, bit_met_p5_p7);
-    xmm1 = _mm_max_epi16(bit_met_p3_p7, bit_met_p1_p7);
-    xmm2 = _mm_max_epi16(bit_met_m1_p7, bit_met_m3_p7);
-    xmm3 = _mm_max_epi16(bit_met_m5_p7, bit_met_m7_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_p5, bit_met_p5_p5);
-    xmm1 = _mm_max_epi16(bit_met_p3_p5, bit_met_p1_p5);
-    xmm2 = _mm_max_epi16(bit_met_m1_p5, bit_met_m3_p5);
-    xmm3 = _mm_max_epi16(bit_met_m5_p5, bit_met_m5_p5);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m5, bit_met_p5_m5);
-    xmm1 = _mm_max_epi16(bit_met_p3_m5, bit_met_p1_m5);
-    xmm2 = _mm_max_epi16(bit_met_m1_m5, bit_met_m3_m5);
-    xmm3 = _mm_max_epi16(bit_met_m5_m5, bit_met_m7_m5);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m7, bit_met_p5_m7);
-    xmm1 = _mm_max_epi16(bit_met_p3_m7, bit_met_p1_m7);
-    xmm2 = _mm_max_epi16(bit_met_m1_m7, bit_met_m3_m7);
-    xmm3 = _mm_max_epi16(bit_met_m5_m7, bit_met_m7_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-
-    xmm0 = _mm_max_epi16(bit_met_p7_m1, bit_met_p5_m1);
-    xmm1 = _mm_max_epi16(bit_met_p3_m1, bit_met_p1_m1);
-    xmm2 = _mm_max_epi16(bit_met_m1_m1, bit_met_m3_m1);
-    xmm3 = _mm_max_epi16(bit_met_m5_m1, bit_met_m7_m1);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m3, bit_met_p5_m3);
-    xmm1 = _mm_max_epi16(bit_met_p3_m3, bit_met_p1_m3);
-    xmm2 = _mm_max_epi16(bit_met_m1_m3, bit_met_m3_m3);
-    xmm3 = _mm_max_epi16(bit_met_m5_m3, bit_met_m7_m3);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_p1, bit_met_p5_p1);
-    xmm1 = _mm_max_epi16(bit_met_p3_p1, bit_met_p1_p1);
-    xmm2 = _mm_max_epi16(bit_met_m1_p1, bit_met_m3_p1);
-    xmm3 = _mm_max_epi16(bit_met_m5_p1, bit_met_m7_p1);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_p3, bit_met_p5_p3);
-    xmm1 = _mm_max_epi16(bit_met_p3_p3, bit_met_p1_p3);
-    xmm2 = _mm_max_epi16(bit_met_m1_p3, bit_met_m3_p3);
-    xmm3 = _mm_max_epi16(bit_met_m5_p3, bit_met_m7_p3);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-
-    y0i = _mm_subs_epi16(logmax_num_re0, logmax_den_re0);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p7, bit_met_p5_p7);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p7, bit_met_p1_p7);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p7, bit_met_m3_p7);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p7, bit_met_m7_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p5, bit_met_p5_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p5, bit_met_p1_p5);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p5, bit_met_m3_p5);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p5, bit_met_m5_p5);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m5, bit_met_p5_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m5, bit_met_p1_m5);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m5, bit_met_m3_m5);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m5, bit_met_m7_m5);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m7, bit_met_p5_m7);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m7, bit_met_p1_m7);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m7, bit_met_m3_m7);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m7, bit_met_m7_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m1, bit_met_p5_m1);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m1, bit_met_p1_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m1, bit_met_m3_m1);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m1, bit_met_m7_m1);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m3, bit_met_p5_m3);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m3, bit_met_p1_m3);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m3, bit_met_m3_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m3, bit_met_m7_m3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p1, bit_met_p5_p1);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p1, bit_met_p1_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p1, bit_met_m3_p1);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p1, bit_met_m7_p1);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p3, bit_met_p5_p3);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p3, bit_met_p1_p3);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p3, bit_met_m3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p3, bit_met_m7_p3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+
+    y0i = simde_mm_subs_epi16(logmax_num_re0, logmax_den_re0);
 
 
     // Detection for 5th bit (LTE mapping)
-    xmm0 = _mm_max_epi16(bit_met_m7_m7, bit_met_m7_m5);
-    xmm1 = _mm_max_epi16(bit_met_m7_m3, bit_met_m7_m1);
-    xmm2 = _mm_max_epi16(bit_met_m7_p1, bit_met_m7_p3);
-    xmm3 = _mm_max_epi16(bit_met_m7_p5, bit_met_m7_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_m1_m7, bit_met_m1_m5);
-    xmm1 = _mm_max_epi16(bit_met_m1_m3, bit_met_m1_m1);
-    xmm2 = _mm_max_epi16(bit_met_m1_p1, bit_met_m1_p3);
-    xmm3 = _mm_max_epi16(bit_met_m1_p5, bit_met_m1_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p1_m7, bit_met_p1_m5);
-    xmm1 = _mm_max_epi16(bit_met_p1_m3, bit_met_p1_m1);
-    xmm2 = _mm_max_epi16(bit_met_p1_p1, bit_met_p1_p3);
-    xmm3 = _mm_max_epi16(bit_met_p1_p5, bit_met_p1_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m7, bit_met_p7_m5);
-    xmm1 = _mm_max_epi16(bit_met_p7_m3, bit_met_p7_m1);
-    xmm2 = _mm_max_epi16(bit_met_p7_p1, bit_met_p7_p3);
-    xmm3 = _mm_max_epi16(bit_met_p7_p5, bit_met_p7_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-
-    xmm0 = _mm_max_epi16(bit_met_m5_m7, bit_met_m5_m5);
-    xmm1 = _mm_max_epi16(bit_met_m5_m3, bit_met_m5_m1);
-    xmm2 = _mm_max_epi16(bit_met_m5_p1, bit_met_m5_p3);
-    xmm3 = _mm_max_epi16(bit_met_m5_p5, bit_met_m5_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_m3_m7, bit_met_m3_m5);
-    xmm1 = _mm_max_epi16(bit_met_m3_m3, bit_met_m3_m1);
-    xmm2 = _mm_max_epi16(bit_met_m3_p1, bit_met_m3_p3);
-    xmm3 = _mm_max_epi16(bit_met_m3_p5, bit_met_m3_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p3_m7, bit_met_p3_m5);
-    xmm1 = _mm_max_epi16(bit_met_p3_m3, bit_met_p3_m1);
-    xmm2 = _mm_max_epi16(bit_met_p3_p1, bit_met_p3_p3);
-    xmm3 = _mm_max_epi16(bit_met_p3_p5, bit_met_p3_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p5_m7, bit_met_p5_m5);
-    xmm1 = _mm_max_epi16(bit_met_p5_m3, bit_met_p5_m1);
-    xmm2 = _mm_max_epi16(bit_met_p5_p1, bit_met_p5_p3);
-    xmm3 = _mm_max_epi16(bit_met_p5_p5, bit_met_p5_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-
-    y1i = _mm_subs_epi16(logmax_num_re0, logmax_den_re0);
+    xmm0 = simde_mm_max_epi16(bit_met_m7_m7, bit_met_m7_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_m7_m3, bit_met_m7_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m7_p1, bit_met_m7_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m7_p5, bit_met_m7_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m1_m7, bit_met_m1_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_m1_m3, bit_met_m1_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p1, bit_met_m1_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m1_p5, bit_met_m1_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p1_m7, bit_met_p1_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p1_m3, bit_met_p1_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p1_p1, bit_met_p1_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p1_p5, bit_met_p1_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m7, bit_met_p7_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p7_m3, bit_met_p7_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p7_p1, bit_met_p7_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p7_p5, bit_met_p7_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+
+    xmm0 = simde_mm_max_epi16(bit_met_m5_m7, bit_met_m5_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_m5_m3, bit_met_m5_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m5_p1, bit_met_m5_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p5, bit_met_m5_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_m3_m7, bit_met_m3_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_m3_m3, bit_met_m3_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m3_p1, bit_met_m3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m3_p5, bit_met_m3_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p3_m7, bit_met_p3_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m3, bit_met_p3_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p3_p1, bit_met_p3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p3_p5, bit_met_p3_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p5_m7, bit_met_p5_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p5_m3, bit_met_p5_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_p5_p1, bit_met_p5_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_p5_p5, bit_met_p5_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+
+    y1i = simde_mm_subs_epi16(logmax_num_re0, logmax_den_re0);
 
     // Detection for 6th bit (LTE mapping)
-    xmm0 = _mm_max_epi16(bit_met_p7_p7, bit_met_p5_p7);
-    xmm1 = _mm_max_epi16(bit_met_p3_p7, bit_met_p1_p7);
-    xmm2 = _mm_max_epi16(bit_met_m1_p7, bit_met_m3_p7);
-    xmm3 = _mm_max_epi16(bit_met_m5_p7, bit_met_m7_p7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_p1, bit_met_p5_p1);
-    xmm1 = _mm_max_epi16(bit_met_p3_p1, bit_met_p1_p1);
-    xmm2 = _mm_max_epi16(bit_met_m1_p1, bit_met_m3_p1);
-    xmm3 = _mm_max_epi16(bit_met_m5_p1, bit_met_m5_p1);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m1, bit_met_p5_m1);
-    xmm1 = _mm_max_epi16(bit_met_p3_m1, bit_met_p1_m1);
-    xmm2 = _mm_max_epi16(bit_met_m1_m1, bit_met_m3_m1);
-    xmm3 = _mm_max_epi16(bit_met_m5_m1, bit_met_m7_m1);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m7, bit_met_p5_m7);
-    xmm1 = _mm_max_epi16(bit_met_p3_m7, bit_met_p1_m7);
-    xmm2 = _mm_max_epi16(bit_met_m1_m7, bit_met_m3_m7);
-    xmm3 = _mm_max_epi16(bit_met_m5_m7, bit_met_m7_m7);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm4);
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0, xmm5);
-
-    xmm0 = _mm_max_epi16(bit_met_p7_m5, bit_met_p5_m5);
-    xmm1 = _mm_max_epi16(bit_met_p3_m5, bit_met_p1_m5);
-    xmm2 = _mm_max_epi16(bit_met_m1_m5, bit_met_m3_m5);
-    xmm3 = _mm_max_epi16(bit_met_m5_m5, bit_met_m7_m5);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(xmm4, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_m3, bit_met_p5_m3);
-    xmm1 = _mm_max_epi16(bit_met_p3_m3, bit_met_p1_m3);
-    xmm2 = _mm_max_epi16(bit_met_m1_m3, bit_met_m3_m3);
-    xmm3 = _mm_max_epi16(bit_met_m5_m3, bit_met_m7_m3);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_p3, bit_met_p5_p3);
-    xmm1 = _mm_max_epi16(bit_met_p3_p3, bit_met_p1_p3);
-    xmm2 = _mm_max_epi16(bit_met_m1_p3, bit_met_m3_p3);
-    xmm3 = _mm_max_epi16(bit_met_m5_p3, bit_met_m7_p3);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-    xmm0 = _mm_max_epi16(bit_met_p7_p5, bit_met_p5_p5);
-    xmm1 = _mm_max_epi16(bit_met_p3_p5, bit_met_p1_p5);
-    xmm2 = _mm_max_epi16(bit_met_m1_p5, bit_met_m3_p5);
-    xmm3 = _mm_max_epi16(bit_met_m5_p5, bit_met_m7_p5);
-    xmm4 = _mm_max_epi16(xmm0, xmm1);
-    xmm5 = _mm_max_epi16(xmm2, xmm3);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm4);
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0, xmm5);
-
-    simde__m128i y2i = _mm_subs_epi16(logmax_num_re0, logmax_den_re0);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p7, bit_met_p5_p7);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p7, bit_met_p1_p7);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p7, bit_met_m3_p7);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p7, bit_met_m7_p7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p1, bit_met_p5_p1);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p1, bit_met_p1_p1);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p1, bit_met_m3_p1);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p1, bit_met_m5_p1);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m1, bit_met_p5_m1);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m1, bit_met_p1_m1);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m1, bit_met_m3_m1);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m1, bit_met_m7_m1);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m7, bit_met_p5_m7);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m7, bit_met_p1_m7);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m7, bit_met_m3_m7);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m7, bit_met_m7_m7);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm4);
+    logmax_den_re0 = simde_mm_max_epi16(logmax_den_re0, xmm5);
+
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m5, bit_met_p5_m5);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m5, bit_met_p1_m5);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m5, bit_met_m3_m5);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m5, bit_met_m7_m5);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(xmm4, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_m3, bit_met_p5_m3);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_m3, bit_met_p1_m3);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_m3, bit_met_m3_m3);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_m3, bit_met_m7_m3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p3, bit_met_p5_p3);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p3, bit_met_p1_p3);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p3, bit_met_m3_p3);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p3, bit_met_m7_p3);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+    xmm0 = simde_mm_max_epi16(bit_met_p7_p5, bit_met_p5_p5);
+    xmm1 = simde_mm_max_epi16(bit_met_p3_p5, bit_met_p1_p5);
+    xmm2 = simde_mm_max_epi16(bit_met_m1_p5, bit_met_m3_p5);
+    xmm3 = simde_mm_max_epi16(bit_met_m5_p5, bit_met_m7_p5);
+    xmm4 = simde_mm_max_epi16(xmm0, xmm1);
+    xmm5 = simde_mm_max_epi16(xmm2, xmm3);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm4);
+    logmax_num_re0 = simde_mm_max_epi16(logmax_num_re0, xmm5);
+
+    simde__m128i y2i = simde_mm_subs_epi16(logmax_num_re0, logmax_den_re0);
 
     // map to output stream, difficult to do in SIMD since we have 6 16bit LLRs
     // RE 1
@@ -8277,17 +7934,7 @@ void qam64_qam64(short *stream0_in,
     stream0_out[j + 45] = ((short *)&y0i)[7];
     stream0_out[j + 46] = ((short *)&y1i)[7];
     stream0_out[j + 47] = ((short *)&y2i)[7];
-
-#elif defined(__arm__) || defined(__aarch64__)
-
-#endif
-
   }
-
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
 }
 
 
diff --git a/openair1/PHY/LTE_UE_TRANSPORT/dlsch_llr_computation_avx2.c b/openair1/PHY/LTE_UE_TRANSPORT/dlsch_llr_computation_avx2.c
index 88d7dfef6b8d28e126ec3f22683badcd4349719d..bb800afe9c8df357cf43768eb952863a6d4ba334 100644
--- a/openair1/PHY/LTE_UE_TRANSPORT/dlsch_llr_computation_avx2.c
+++ b/openair1/PHY/LTE_UE_TRANSPORT/dlsch_llr_computation_avx2.c
@@ -52,15 +52,15 @@
                                                                   0xffff,
                                                                   0xffff};
 
- //==============================================================================================
+
  // Auxiliary Makros
 
  // calculate interference magnitude
-#define interference_abs_epi16(psi, int_ch_mag, int_mag, c1, c2)              \
-  tmp_result = simde_mm256_cmpgt_epi16(int_ch_mag, psi);                      \
-  tmp_result2 = simde_mm256_xor_si256(tmp_result, (*(__m256i *)&ones256[0])); \
-  tmp_result = simde_mm256_and_si256(tmp_result, c1);                         \
-  tmp_result2 = simde_mm256_and_si256(tmp_result2, c2);                       \
+#define interference_abs_epi16(psi, int_ch_mag, int_mag, c1, c2)                   \
+  tmp_result = simde_mm256_cmpgt_epi16(int_ch_mag, psi);                           \
+  tmp_result2 = simde_mm256_xor_si256(tmp_result, (*(simde__m256i *)&ones256[0])); \
+  tmp_result = simde_mm256_and_si256(tmp_result, c1);                              \
+  tmp_result2 = simde_mm256_and_si256(tmp_result2, c2);                            \
   const simde__m256i int_mag = simde_mm256_or_si256(tmp_result, tmp_result2);
 
  // calculate interference magnitude
@@ -68,7 +68,7 @@
  // interval x>6
 #define interference_abs_64qam_epi16(psi, int_ch_mag, int_two_ch_mag, int_three_ch_mag, a, c1, c3, c5, c7) \
   tmp_result = simde_mm256_cmpgt_epi16(int_two_ch_mag, psi);                                               \
-  tmp_result3 = simde_mm256_xor_si256(tmp_result, (*(__m256i *)&ones256[0]));                              \
+  tmp_result3 = simde_mm256_xor_si256(tmp_result, (*(simde__m256i *)&ones256[0]));                         \
   tmp_result2 = simde_mm256_cmpgt_epi16(int_ch_mag, psi);                                                  \
   tmp_result = simde_mm256_xor_si256(tmp_result, tmp_result2);                                             \
   tmp_result4 = simde_mm256_cmpgt_epi16(psi, int_three_ch_mag);                                            \
@@ -146,76 +146,71 @@ void qam64_qam16_avx2(short *stream0_in,
     stream0_out: output LLRs for 1st stream
   */
 
-#if defined(__x86_64__) || defined(__i386__)
-
-  __m256i *rho01_256i      = (__m256i *)rho01;
-  __m256i *stream0_256i_in = (__m256i *)stream0_in;
-  __m256i *stream1_256i_in = (__m256i *)stream1_in;
-  __m256i *ch_mag_256i     = (__m256i *)ch_mag;
-  __m256i *ch_mag_256i_i   = (__m256i *)ch_mag_i;
-
-  __m256i ONE_OVER_SQRT_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(10112)); // round(1/sqrt(42)*2^16)
-  __m256i THREE_OVER_SQRT_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(30337)); // round(3/sqrt(42)*2^16)
-  __m256i FIVE_OVER_SQRT_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(25281)); // round(5/sqrt(42)*2^15)
-  __m256i SEVEN_OVER_SQRT_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(17697)); // round(5/sqrt(42)*2^15)
-  __m256i FORTYNINE_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(30969)); // round(49/(4*sqrt(42))*2^14), Q2.14
-  __m256i THIRTYSEVEN_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(23385)); // round(37/(4*sqrt(42))*2^14), Q2.14
-  __m256i TWENTYFIVE_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(31601)); // round(25/(4*sqrt(42))*2^15)
-  __m256i TWENTYNINE_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(18329)); // round(29/(4*sqrt(42))*2^15), Q2.14
-  __m256i SEVENTEEN_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(21489)); // round(17/(4*sqrt(42))*2^15)
-  __m256i NINE_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(11376)); // round(9/(4*sqrt(42))*2^15)
-  __m256i THIRTEEN_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(16433)); // round(13/(4*sqrt(42))*2^15)
-  __m256i FIVE_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(6320)); // round(5/(4*sqrt(42))*2^15)
-  __m256i ONE_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(1264)); // round(1/(4*sqrt(42))*2^15)
-  __m256i ONE_OVER_SQRT_10_Q15 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(10362)); // round(1/sqrt(10)*2^15)
-  __m256i THREE_OVER_SQRT_10 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(31086)); // round(3/sqrt(10)*2^15)
-  __m256i SQRT_10_OVER_FOUR = simde_mm256_broadcastw_epi16(_mm_set1_epi16(25905)); // round(sqrt(10)/4*2^15)
-
-
-  __m256i ch_mag_int;
-  __m256i ch_mag_des;
-  __m256i ch_mag_98_over_42_with_sigma2;
-  __m256i ch_mag_74_over_42_with_sigma2;
-  __m256i ch_mag_58_over_42_with_sigma2;
-  __m256i ch_mag_50_over_42_with_sigma2;
-  __m256i ch_mag_34_over_42_with_sigma2;
-  __m256i ch_mag_18_over_42_with_sigma2;
-  __m256i ch_mag_26_over_42_with_sigma2;
-  __m256i ch_mag_10_over_42_with_sigma2;
-  __m256i ch_mag_2_over_42_with_sigma2;
-  __m256i  y0r_one_over_sqrt_21;
-  __m256i  y0r_three_over_sqrt_21;
-  __m256i  y0r_five_over_sqrt_21;
-  __m256i  y0r_seven_over_sqrt_21;
-  __m256i  y0i_one_over_sqrt_21;
-  __m256i  y0i_three_over_sqrt_21;
-  __m256i  y0i_five_over_sqrt_21;
-  __m256i  y0i_seven_over_sqrt_21;
-
-#elif defined(__arm__) || defined(__aarch64__)
-
-#endif
+
+  simde__m256i *rho01_256i      = (simde__m256i *)rho01;
+  simde__m256i *stream0_256i_in = (simde__m256i *)stream0_in;
+  simde__m256i *stream1_256i_in = (simde__m256i *)stream1_in;
+  simde__m256i *ch_mag_256i     = (simde__m256i *)ch_mag;
+  simde__m256i *ch_mag_256i_i   = (simde__m256i *)ch_mag_i;
+
+  simde__m256i ONE_OVER_SQRT_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(10112)); // round(1/sqrt(42)*2^16)
+  simde__m256i THREE_OVER_SQRT_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(30337)); // round(3/sqrt(42)*2^16)
+  simde__m256i FIVE_OVER_SQRT_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(25281)); // round(5/sqrt(42)*2^15)
+  simde__m256i SEVEN_OVER_SQRT_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(17697)); // round(5/sqrt(42)*2^15)
+  simde__m256i FORTYNINE_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(30969)); // round(49/(4*sqrt(42))*2^14), Q2.14
+  simde__m256i THIRTYSEVEN_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(23385)); // round(37/(4*sqrt(42))*2^14), Q2.14
+  simde__m256i TWENTYFIVE_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(31601)); // round(25/(4*sqrt(42))*2^15)
+  simde__m256i TWENTYNINE_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(18329)); // round(29/(4*sqrt(42))*2^15), Q2.14
+  simde__m256i SEVENTEEN_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(21489)); // round(17/(4*sqrt(42))*2^15)
+  simde__m256i NINE_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(11376)); // round(9/(4*sqrt(42))*2^15)
+  simde__m256i THIRTEEN_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(16433)); // round(13/(4*sqrt(42))*2^15)
+  simde__m256i FIVE_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(6320)); // round(5/(4*sqrt(42))*2^15)
+  simde__m256i ONE_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(1264)); // round(1/(4*sqrt(42))*2^15)
+  simde__m256i ONE_OVER_SQRT_10_Q15 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(10362)); // round(1/sqrt(10)*2^15)
+  simde__m256i THREE_OVER_SQRT_10 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(31086)); // round(3/sqrt(10)*2^15)
+  simde__m256i SQRT_10_OVER_FOUR = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(25905)); // round(sqrt(10)/4*2^15)
+
+
+  simde__m256i ch_mag_int;
+  simde__m256i ch_mag_des;
+  simde__m256i ch_mag_98_over_42_with_sigma2;
+  simde__m256i ch_mag_74_over_42_with_sigma2;
+  simde__m256i ch_mag_58_over_42_with_sigma2;
+  simde__m256i ch_mag_50_over_42_with_sigma2;
+  simde__m256i ch_mag_34_over_42_with_sigma2;
+  simde__m256i ch_mag_18_over_42_with_sigma2;
+  simde__m256i ch_mag_26_over_42_with_sigma2;
+  simde__m256i ch_mag_10_over_42_with_sigma2;
+  simde__m256i ch_mag_2_over_42_with_sigma2;
+  simde__m256i  y0r_one_over_sqrt_21;
+  simde__m256i  y0r_three_over_sqrt_21;
+  simde__m256i  y0r_five_over_sqrt_21;
+  simde__m256i  y0r_seven_over_sqrt_21;
+  simde__m256i  y0i_one_over_sqrt_21;
+  simde__m256i  y0i_three_over_sqrt_21;
+  simde__m256i  y0i_five_over_sqrt_21;
+  simde__m256i  y0i_seven_over_sqrt_21;
+
   int i,j;
   uint32_t len256 = (length)>>3;
 
   for (i=0; i<len256; i+=2) {
 
-#if defined(__x86_64__) || defined(__i386__)
     // Get rho
-      /*
-    xmm0 = rho01_128i[i];
-    xmm1 = rho01_128i[i+1];
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
-    //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    xmm2 = _mm_unpacklo_epi64(xmm0,xmm1); // Re(rho)
-    xmm3 = _mm_unpackhi_epi64(xmm0,xmm1); // Im(rho)
-      */
+    /*
+  xmm0 = rho01_128i[i];
+  xmm1 = rho01_128i[i+1];
+  xmm0 = simde_mm_shufflelo_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+  xmm0 = simde_mm_shufflehi_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+  xmm0 = simde_mm_shuffle_epi32(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+  xmm1 = simde_mm_shufflelo_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+  xmm1 = simde_mm_shufflehi_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+  xmm1 = simde_mm_shuffle_epi32(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+  //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
+  //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
+  xmm2 = simde_mm_unpacklo_epi64(xmm0,xmm1); // Re(rho)
+  xmm3 = simde_mm_unpackhi_epi64(xmm0,xmm1); // Im(rho)
+    */
     simde__m256i xmm0, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, xmm8;
     simde_mm256_separate_real_imag_parts(&xmm2, &xmm3, rho01_256i[i], rho01_256i[i+1]);
 
@@ -282,12 +277,12 @@ void qam64_qam16_avx2(short *stream0_in,
     /*
     xmm0 = stream1_128i_in[i];
     xmm1 = stream1_128i_in[i+1];
-    xmm0 = simde_mm256_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = simde_mm256_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = simde_mm256_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = simde_mm256_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = simde_mm256_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = simde_mm256_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm256_shufflelo_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm256_shufflehi_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm256_shuffle_epi32(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm256_shufflelo_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm256_shufflehi_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm256_shuffle_epi32(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
     y1r = simde_mm256_unpacklo_epi64(xmm0,xmm1); //[y1r(1),y1r(2),y1r(3),y1r(4)]
@@ -298,7 +293,7 @@ void qam64_qam16_avx2(short *stream0_in,
     simde_mm256_separate_real_imag_parts(&y1r, &y1i, stream1_256i_in[i], stream1_256i_in[i+1]);
 
     // Psi_r calculation from rho_rpi or rho_rmi
-    xmm0 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(0));// ZERO for abs_pi16
+    xmm0 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(0));// ZERO for abs_pi16
     xmm2 = simde_mm256_subs_epi16(rho_rpi_7_7, y1r);
     simde__m256i psi_r_p7_p7 = simde_mm256_abs_epi16(xmm2);
     xmm2 = simde_mm256_subs_epi16(rho_rpi_7_5, y1r);
@@ -582,12 +577,12 @@ void qam64_qam16_avx2(short *stream0_in,
     // Rearrange desired channel magnitudes
     xmm2 = ch_mag_128i[i]; // = [|h|^2(1),|h|^2(1),|h|^2(2),|h|^2(2)]*(2/sqrt(10))
     xmm3 = ch_mag_128i[i+1]; // = [|h|^2(3),|h|^2(3),|h|^2(4),|h|^2(4)]*(2/sqrt(10))
-    xmm2 = simde_mm256_shufflelo_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = simde_mm256_shufflehi_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = simde_mm256_shuffle_epi32(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = simde_mm256_shufflelo_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = simde_mm256_shufflehi_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = simde_mm256_shuffle_epi32(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm256_shufflelo_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm256_shufflehi_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm256_shuffle_epi32(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm256_shufflelo_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm256_shufflehi_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm256_shuffle_epi32(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
     ch_mag_des = simde_mm256_unpacklo_epi64(xmm2,xmm3);
     */
 
@@ -597,12 +592,12 @@ void qam64_qam16_avx2(short *stream0_in,
     /*
     xmm2 = ch_mag_128i_i[i];
     xmm3 = ch_mag_128i_i[i+1];
-    xmm2 = simde_mm256_shufflelo_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = simde_mm256_shufflehi_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = simde_mm256_shuffle_epi32(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = simde_mm256_shufflelo_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = simde_mm256_shufflehi_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = simde_mm256_shuffle_epi32(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm256_shufflelo_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm256_shufflehi_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm256_shuffle_epi32(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm256_shufflelo_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm256_shufflehi_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm256_shuffle_epi32(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
     ch_mag_int  = simde_mm256_unpacklo_epi64(xmm2,xmm3);
     */
 
@@ -1669,15 +1664,10 @@ void qam64_qam16_avx2(short *stream0_in,
     stream0_out[j + 94] = ((short *)&y1i)[15];
     stream0_out[j + 95] = ((short *)&y2i)[15];
 
-#elif defined(__arm__) || defined(__aarch64__)
-
-#endif
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 
 }
 
@@ -1706,78 +1696,73 @@ void qam64_qam64_avx2(int32_t *stream0_in,
     stream0_out: output LLRs for 1st stream
   */
 
-#if defined(__x86_64__) || defined(__i386__)
-
-  __m256i *rho01_256i      = (__m256i *)rho01;
-  __m256i *stream0_256i_in = (__m256i *)stream0_in;
-  __m256i *stream1_256i_in = (__m256i *)stream1_in;
-  __m256i *ch_mag_256i     = (__m256i *)ch_mag;
-  __m256i *ch_mag_256i_i   = (__m256i *)ch_mag_i;
-
-  __m256i ONE_OVER_SQRT_42              = simde_mm256_broadcastw_epi16(_mm_set1_epi16(10112)); // round(1/sqrt(42)*2^16)
-  __m256i THREE_OVER_SQRT_42            = simde_mm256_broadcastw_epi16(_mm_set1_epi16(30337)); // round(3/sqrt(42)*2^16)
-  __m256i FIVE_OVER_SQRT_42             = simde_mm256_broadcastw_epi16(_mm_set1_epi16(25281)); // round(5/sqrt(42)*2^15)
-  __m256i SEVEN_OVER_SQRT_42            = simde_mm256_broadcastw_epi16(_mm_set1_epi16(17697)); // round(7/sqrt(42)*2^14) Q2.14
-  __m256i ONE_OVER_SQRT_2               = simde_mm256_broadcastw_epi16(_mm_set1_epi16(23170)); // round(1/sqrt(2)*2^15)
-  __m256i ONE_OVER_SQRT_2_42            = simde_mm256_broadcastw_epi16(_mm_set1_epi16(3575));  // round(1/sqrt(2*42)*2^15)
-  __m256i THREE_OVER_SQRT_2_42          = simde_mm256_broadcastw_epi16(_mm_set1_epi16(10726)); // round(3/sqrt(2*42)*2^15)
-  __m256i FIVE_OVER_SQRT_2_42           = simde_mm256_broadcastw_epi16(_mm_set1_epi16(17876)); // round(5/sqrt(2*42)*2^15)
-  __m256i SEVEN_OVER_SQRT_2_42          = simde_mm256_broadcastw_epi16(_mm_set1_epi16(25027)); // round(7/sqrt(2*42)*2^15)
-  __m256i FORTYNINE_OVER_FOUR_SQRT_42   = simde_mm256_broadcastw_epi16(_mm_set1_epi16(30969)); // round(49/(4*sqrt(42))*2^14), Q2.14
-  __m256i THIRTYSEVEN_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(23385)); // round(37/(4*sqrt(42))*2^14), Q2.14
-  __m256i TWENTYFIVE_OVER_FOUR_SQRT_42  = simde_mm256_broadcastw_epi16(_mm_set1_epi16(31601)); // round(25/(4*sqrt(42))*2^15)
-  __m256i TWENTYNINE_OVER_FOUR_SQRT_42  = simde_mm256_broadcastw_epi16(_mm_set1_epi16(18329)); // round(29/(4*sqrt(42))*2^15), Q2.14
-  __m256i SEVENTEEN_OVER_FOUR_SQRT_42   = simde_mm256_broadcastw_epi16(_mm_set1_epi16(21489)); // round(17/(4*sqrt(42))*2^15)
-  __m256i NINE_OVER_FOUR_SQRT_42        = simde_mm256_broadcastw_epi16(_mm_set1_epi16(11376)); // round(9/(4*sqrt(42))*2^15)
-  __m256i THIRTEEN_OVER_FOUR_SQRT_42    = simde_mm256_broadcastw_epi16(_mm_set1_epi16(16433)); // round(13/(4*sqrt(42))*2^15)
-  __m256i FIVE_OVER_FOUR_SQRT_42        = simde_mm256_broadcastw_epi16(_mm_set1_epi16(6320));  // round(5/(4*sqrt(42))*2^15)
-  __m256i ONE_OVER_FOUR_SQRT_42         = simde_mm256_broadcastw_epi16(_mm_set1_epi16(1264));  // round(1/(4*sqrt(42))*2^15)
-  __m256i SQRT_42_OVER_FOUR             = simde_mm256_broadcastw_epi16(_mm_set1_epi16(13272)); // round(sqrt(42)/4*2^13), Q3.12
-
-  __m256i ch_mag_des;
-  __m256i ch_mag_int;
-  __m256i ch_mag_98_over_42_with_sigma2;
-  __m256i ch_mag_74_over_42_with_sigma2;
-  __m256i ch_mag_58_over_42_with_sigma2;
-  __m256i ch_mag_50_over_42_with_sigma2;
-  __m256i ch_mag_34_over_42_with_sigma2;
-  __m256i ch_mag_18_over_42_with_sigma2;
-  __m256i ch_mag_26_over_42_with_sigma2;
-  __m256i ch_mag_10_over_42_with_sigma2;
-  __m256i ch_mag_2_over_42_with_sigma2;
-  __m256i y0r_one_over_sqrt_21;
-  __m256i y0r_three_over_sqrt_21;
-  __m256i y0r_five_over_sqrt_21;
-  __m256i y0r_seven_over_sqrt_21;
-  __m256i y0i_one_over_sqrt_21;
-  __m256i y0i_three_over_sqrt_21;
-  __m256i y0i_five_over_sqrt_21;
-  __m256i y0i_seven_over_sqrt_21;
-  __m256i ch_mag_int_with_sigma2;
-  __m256i two_ch_mag_int_with_sigma2;
-  __m256i three_ch_mag_int_with_sigma2;
-#elif defined(__arm__) || defined(__aarch64__)
-
-#endif
+
+  simde__m256i *rho01_256i      = (simde__m256i *)rho01;
+  simde__m256i *stream0_256i_in = (simde__m256i *)stream0_in;
+  simde__m256i *stream1_256i_in = (simde__m256i *)stream1_in;
+  simde__m256i *ch_mag_256i     = (simde__m256i *)ch_mag;
+  simde__m256i *ch_mag_256i_i   = (simde__m256i *)ch_mag_i;
+
+  simde__m256i ONE_OVER_SQRT_42              = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(10112)); // round(1/sqrt(42)*2^16)
+  simde__m256i THREE_OVER_SQRT_42            = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(30337)); // round(3/sqrt(42)*2^16)
+  simde__m256i FIVE_OVER_SQRT_42             = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(25281)); // round(5/sqrt(42)*2^15)
+  simde__m256i SEVEN_OVER_SQRT_42            = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(17697)); // round(7/sqrt(42)*2^14) Q2.14
+  simde__m256i ONE_OVER_SQRT_2               = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(23170)); // round(1/sqrt(2)*2^15)
+  simde__m256i ONE_OVER_SQRT_2_42            = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(3575));  // round(1/sqrt(2*42)*2^15)
+  simde__m256i THREE_OVER_SQRT_2_42          = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(10726)); // round(3/sqrt(2*42)*2^15)
+  simde__m256i FIVE_OVER_SQRT_2_42           = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(17876)); // round(5/sqrt(2*42)*2^15)
+  simde__m256i SEVEN_OVER_SQRT_2_42          = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(25027)); // round(7/sqrt(2*42)*2^15)
+  simde__m256i FORTYNINE_OVER_FOUR_SQRT_42   = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(30969)); // round(49/(4*sqrt(42))*2^14), Q2.14
+  simde__m256i THIRTYSEVEN_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(23385)); // round(37/(4*sqrt(42))*2^14), Q2.14
+  simde__m256i TWENTYFIVE_OVER_FOUR_SQRT_42  = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(31601)); // round(25/(4*sqrt(42))*2^15)
+  simde__m256i TWENTYNINE_OVER_FOUR_SQRT_42  = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(18329)); // round(29/(4*sqrt(42))*2^15), Q2.14
+  simde__m256i SEVENTEEN_OVER_FOUR_SQRT_42   = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(21489)); // round(17/(4*sqrt(42))*2^15)
+  simde__m256i NINE_OVER_FOUR_SQRT_42        = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(11376)); // round(9/(4*sqrt(42))*2^15)
+  simde__m256i THIRTEEN_OVER_FOUR_SQRT_42    = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(16433)); // round(13/(4*sqrt(42))*2^15)
+  simde__m256i FIVE_OVER_FOUR_SQRT_42        = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(6320));  // round(5/(4*sqrt(42))*2^15)
+  simde__m256i ONE_OVER_FOUR_SQRT_42         = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(1264));  // round(1/(4*sqrt(42))*2^15)
+  simde__m256i SQRT_42_OVER_FOUR             = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(13272)); // round(sqrt(42)/4*2^13), Q3.12
+
+  simde__m256i ch_mag_des;
+  simde__m256i ch_mag_int;
+  simde__m256i ch_mag_98_over_42_with_sigma2;
+  simde__m256i ch_mag_74_over_42_with_sigma2;
+  simde__m256i ch_mag_58_over_42_with_sigma2;
+  simde__m256i ch_mag_50_over_42_with_sigma2;
+  simde__m256i ch_mag_34_over_42_with_sigma2;
+  simde__m256i ch_mag_18_over_42_with_sigma2;
+  simde__m256i ch_mag_26_over_42_with_sigma2;
+  simde__m256i ch_mag_10_over_42_with_sigma2;
+  simde__m256i ch_mag_2_over_42_with_sigma2;
+  simde__m256i y0r_one_over_sqrt_21;
+  simde__m256i y0r_three_over_sqrt_21;
+  simde__m256i y0r_five_over_sqrt_21;
+  simde__m256i y0r_seven_over_sqrt_21;
+  simde__m256i y0i_one_over_sqrt_21;
+  simde__m256i y0i_three_over_sqrt_21;
+  simde__m256i y0i_five_over_sqrt_21;
+  simde__m256i y0i_seven_over_sqrt_21;
+  simde__m256i ch_mag_int_with_sigma2;
+  simde__m256i two_ch_mag_int_with_sigma2;
+  simde__m256i three_ch_mag_int_with_sigma2;
 
   int i,j;
   uint32_t len256 = (length)>>3;
 
   for (i=0; i<len256; i+=2) {
 
-#if defined(__x86_64__) || defined(__i386__)
 
     // Get rho
       /*
     xmm0 = rho01_256i[i];
     xmm1 = rho01_256i[i+1];
-    xmm0 = simde_mm256_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = simde_mm256_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = simde_mm256_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm256_shufflelo_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm256_shufflehi_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm256_shuffle_epi32(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
 
-    xmm1 = simde_mm256_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = simde_mm256_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = simde_mm256_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm256_shufflelo_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm256_shufflehi_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm256_shuffle_epi32(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
 
     //xmm0 = [Re(0,1,2,3)   Im(0,1,2,3)   Re(4,5,6,7)     Im(4,5,6,7)]
     //xmm0 = [Re(8,9,10,11) Im(8,9,10,11) Re(12,13,14,15) Im(12,13,14,15)]
@@ -1856,13 +1841,13 @@ void qam64_qam64_avx2(int32_t *stream0_in,
     /*
     xmm0 = stream1_256i_in[i];
     xmm1 = stream1_256i_in[i+1];
-    xmm0 = simde_mm256_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = simde_mm256_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = simde_mm256_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm256_shufflelo_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm256_shufflehi_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm256_shuffle_epi32(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
 
-    xmm1 = simde_mm256_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = simde_mm256_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = simde_mm256_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm256_shufflelo_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm256_shufflehi_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm256_shuffle_epi32(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
 
     y1r = simde_mm256_unpacklo_epi64(xmm0, xmm1);
     y1r = simde_mm256_permute4x64_epi64(y1r,0xd8); // Re(y1)
@@ -1874,7 +1859,7 @@ void qam64_qam64_avx2(int32_t *stream0_in,
     simde_mm256_separate_real_imag_parts(&y1r, &y1i, stream1_256i_in[i], stream1_256i_in[i+1]);
 
     // Psi_r calculation from rho_rpi or rho_rmi
-    xmm0 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(0));// ZERO for abs_pi16
+    xmm0 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(0));// ZERO for abs_pi16
     xmm2 = simde_mm256_subs_epi16(rho_rpi_7_7, y1r);
 
     simde__m256i psi_r_p7_p7 = simde_mm256_abs_epi16(xmm2);
@@ -2141,16 +2126,16 @@ void qam64_qam64_avx2(int32_t *stream0_in,
     // Rearrange desired MF output
     xmm0 = stream0_256i_in[i];
     xmm1 = stream0_256i_in[i+1];
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflelo_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shufflehi_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 = simde_mm_shuffle_epi32(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflelo_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shufflehi_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 = simde_mm_shuffle_epi32(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    y0r = _mm_unpacklo_epi64(xmm0,xmm1); // = [y0r(1),y0r(2),y0r(3),y0r(4)]
-    y0i = _mm_unpackhi_epi64(xmm0,xmm1);
+    y0r = simde_mm_unpacklo_epi64(xmm0,xmm1); // = [y0r(1),y0r(2),y0r(3),y0r(4)]
+    y0i = simde_mm_unpackhi_epi64(xmm0,xmm1);
     */
     simde__m256i y0r, y0i;
     simde_mm256_separate_real_imag_parts(&y0r, &y0i, stream0_256i_in[i], stream0_256i_in[i+1]);
@@ -2160,13 +2145,13 @@ void qam64_qam64_avx2(int32_t *stream0_in,
     /*
     xmm2 = ch_mag_256i[i];
     xmm3 = ch_mag_256i[i+1];
-    xmm2 = _mm_shufflelo_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = _mm_shufflehi_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = _mm_shuffle_epi32(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shufflelo_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shufflehi_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shuffle_epi32(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    ch_mag_des = _mm_unpacklo_epi64(xmm2,xmm3);
+    xmm2 = simde_mm_shufflelo_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shufflehi_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shuffle_epi32(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shufflelo_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shufflehi_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shuffle_epi32(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    ch_mag_des = simde_mm_unpacklo_epi64(xmm2,xmm3);
     */
     // xmm2 is dummy variable that contains the same values as ch_mag_des
     simde_mm256_separate_real_imag_parts(&ch_mag_des, &xmm2, ch_mag_256i[i], ch_mag_256i[i+1]);
@@ -2176,13 +2161,13 @@ void qam64_qam64_avx2(int32_t *stream0_in,
     /*
     xmm2 = ch_mag_256i_i[i];
     xmm3 = ch_mag_256i_i[i+1];
-    xmm2 = _mm_shufflelo_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = _mm_shufflehi_epi16(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm2 = _mm_shuffle_epi32(xmm2,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shufflelo_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shufflehi_epi16(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm3 = _mm_shuffle_epi32(xmm3,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    ch_mag_int  = _mm_unpacklo_epi64(xmm2,xmm3);
+    xmm2 = simde_mm_shufflelo_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shufflehi_epi16(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm2 = simde_mm_shuffle_epi32(xmm2,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shufflelo_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shufflehi_epi16(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm3 = simde_mm_shuffle_epi32(xmm3,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    ch_mag_int  = simde_mm_unpacklo_epi64(xmm2,xmm3);
     */
     simde_mm256_separate_real_imag_parts(&ch_mag_int, &xmm2, ch_mag_256i_i[i], ch_mag_256i_i[i+1]);
 
@@ -3509,14 +3494,9 @@ void qam64_qam64_avx2(int32_t *stream0_in,
     stream0_out[j + 94] = ((short *)&y1i)[15];
     stream0_out[j + 95] = ((short *)&y2i)[15];
 
-#elif defined(__arm__) || defined(__aarch64__)
-
-#endif
 
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
diff --git a/openair1/PHY/LTE_UE_TRANSPORT/pbch_ue.c b/openair1/PHY/LTE_UE_TRANSPORT/pbch_ue.c
index 108680c640abed805ff3617b2358f5b55b8f32de..cf62a3a945136fc17ff97dee85defa7b388c7359 100644
--- a/openair1/PHY/LTE_UE_TRANSPORT/pbch_ue.c
+++ b/openair1/PHY/LTE_UE_TRANSPORT/pbch_ue.c
@@ -147,37 +147,23 @@ int pbch_channel_level(int **dl_ch_estimates_ext,
                        LTE_DL_FRAME_PARMS *frame_parms,
                        uint32_t symbol) {
   int16_t rb, nb_rb=6;
-  uint8_t aatx,aarx;
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i avg128;
-  __m128i *dl_ch128;
-#elif defined(__arm__) || defined(__aarch64__)
-  int32x4_t avg128;
-  int16x8_t *dl_ch128;
-#endif
+  uint8_t aatx, aarx;
+  simde__m128i avg128;
+  simde__m128i *dl_ch128;
   int avg1=0,avg2=0;
   uint32_t nsymb = (frame_parms->Ncp==0) ? 7:6;
   uint32_t symbol_mod = symbol % nsymb;
 
   for (aatx=0; aatx<4; aatx++) //frame_parms->nb_antenna_ports_eNB;aatx++)
     for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-      //clear average level
-#if defined(__x86_64__) || defined(__i386__)
-      avg128 = _mm_setzero_si128();
-      dl_ch128=(__m128i *)&dl_ch_estimates_ext[(aatx<<1)+aarx][symbol_mod*6*12];
-#elif defined(__arm__) || defined(__aarch64__)
-      avg128 = vdupq_n_s32(0);
-      dl_ch128=(int16x8_t *)&dl_ch_estimates_ext[(aatx<<1)+aarx][symbol_mod*6*12];
-#endif
-
-      for (rb=0; rb<nb_rb; rb++) {
-#if defined(__x86_64__) || defined(__i386__)
-        avg128 = _mm_add_epi32(avg128,_mm_madd_epi16(dl_ch128[0],dl_ch128[0]));
-        avg128 = _mm_add_epi32(avg128,_mm_madd_epi16(dl_ch128[1],dl_ch128[1]));
-        avg128 = _mm_add_epi32(avg128,_mm_madd_epi16(dl_ch128[2],dl_ch128[2]));
-#elif defined(__arm__) || defined(__aarch64__)
-        // to be filled in
-#endif
+      // clear average level
+      avg128 = simde_mm_setzero_si128();
+      dl_ch128 = (simde__m128i *)&dl_ch_estimates_ext[(aatx << 1) + aarx][symbol_mod * 6 * 12];
+
+      for (rb = 0; rb < nb_rb; rb++) {
+        avg128 = simde_mm_add_epi32(avg128, simde_mm_madd_epi16(dl_ch128[0], dl_ch128[0]));
+        avg128 = simde_mm_add_epi32(avg128, simde_mm_madd_epi16(dl_ch128[1], dl_ch128[1]));
+        avg128 = simde_mm_add_epi32(avg128, simde_mm_madd_epi16(dl_ch128[2], dl_ch128[2]));
         dl_ch128+=3;
         /*
           if (rb==0) {
@@ -199,18 +185,12 @@ int pbch_channel_level(int **dl_ch_estimates_ext,
       //msg("Channel level : %d, %d\n",avg1, avg2);
     }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
   return(avg2);
 }
 
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i mmtmpP0,mmtmpP1,mmtmpP2,mmtmpP3;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t mmtmpP0,mmtmpP1,mmtmpP2,mmtmpP3;
-#endif
+simde__m128i mmtmpP0, mmtmpP1, mmtmpP2, mmtmpP3;
 void pbch_channel_compensation(int **rxdataF_ext,
                                int **dl_ch_estimates_ext,
                                int **rxdataF_comp,
@@ -218,79 +198,71 @@ void pbch_channel_compensation(int **rxdataF_ext,
                                uint8_t symbol,
                                uint8_t output_shift) {
   uint16_t rb,nb_rb=6;
-  uint8_t aatx,aarx,symbol_mod;
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *dl_ch128,*rxdataF128,*rxdataF_comp128;
-#elif defined(__arm__) || defined(__aarch64__)
-#endif
+  uint8_t aatx, aarx, symbol_mod;
+  simde__m128i *dl_ch128, *rxdataF128, *rxdataF_comp128;
   symbol_mod = (symbol>=(7-frame_parms->Ncp)) ? symbol-(7-frame_parms->Ncp) : symbol;
 
   for (aatx=0; aatx<4; aatx++) //frame_parms->nb_antenna_ports_eNB;aatx++)
-    for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-#if defined(__x86_64__) || defined(__i386__)
-      dl_ch128          = (__m128i *)&dl_ch_estimates_ext[(aatx<<1)+aarx][symbol_mod*6*12];
-      rxdataF128        = (__m128i *)&rxdataF_ext[aarx][symbol_mod*6*12];
-      rxdataF_comp128   = (__m128i *)&rxdataF_comp[(aatx<<1)+aarx][symbol_mod*6*12];
-#elif defined(__arm__) || defined(__aarch64__)
-      // to be filled in
-#endif
+    for (aarx = 0; aarx < frame_parms->nb_antennas_rx; aarx++) {
+      dl_ch128 = (simde__m128i *)&dl_ch_estimates_ext[(aatx << 1) + aarx][symbol_mod * 6 * 12];
+      rxdataF128 = (simde__m128i *)&rxdataF_ext[aarx][symbol_mod * 6 * 12];
+      rxdataF_comp128 = (simde__m128i *)&rxdataF_comp[(aatx << 1) + aarx][symbol_mod * 6 * 12];
 
       for (rb=0; rb<nb_rb; rb++) {
-        //printf("rb %d\n",rb);
-#if defined(__x86_64__) || defined(__i386__)
-        // multiply by conjugated channel
-        mmtmpP0 = _mm_madd_epi16(dl_ch128[0],rxdataF128[0]);
+        // printf("rb %d\n",rb);
+        //  multiply by conjugated channel
+        mmtmpP0 = simde_mm_madd_epi16(dl_ch128[0], rxdataF128[0]);
         //  print_ints("re",&mmtmpP0);
         // mmtmpP0 contains real part of 4 consecutive outputs (32-bit)
-        mmtmpP1 = _mm_shufflelo_epi16(dl_ch128[0],_MM_SHUFFLE(2,3,0,1));
-        mmtmpP1 = _mm_shufflehi_epi16(mmtmpP1,_MM_SHUFFLE(2,3,0,1));
-        mmtmpP1 = _mm_sign_epi16(mmtmpP1,*(__m128i *)&conjugate[0]);
+        mmtmpP1 = simde_mm_shufflelo_epi16(dl_ch128[0], SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+        mmtmpP1 = simde_mm_shufflehi_epi16(mmtmpP1, SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+        mmtmpP1 = simde_mm_sign_epi16(mmtmpP1, *(simde__m128i *)&conjugate[0]);
         //  print_ints("im",&mmtmpP1);
-        mmtmpP1 = _mm_madd_epi16(mmtmpP1,rxdataF128[0]);
+        mmtmpP1 = simde_mm_madd_epi16(mmtmpP1, rxdataF128[0]);
         // mmtmpP1 contains imag part of 4 consecutive outputs (32-bit)
-        mmtmpP0 = _mm_srai_epi32(mmtmpP0,output_shift);
+        mmtmpP0 = simde_mm_srai_epi32(mmtmpP0, output_shift);
         //  print_ints("re(shift)",&mmtmpP0);
-        mmtmpP1 = _mm_srai_epi32(mmtmpP1,output_shift);
+        mmtmpP1 = simde_mm_srai_epi32(mmtmpP1, output_shift);
         //  print_ints("im(shift)",&mmtmpP1);
-        mmtmpP2 = _mm_unpacklo_epi32(mmtmpP0,mmtmpP1);
-        mmtmpP3 = _mm_unpackhi_epi32(mmtmpP0,mmtmpP1);
+        mmtmpP2 = simde_mm_unpacklo_epi32(mmtmpP0, mmtmpP1);
+        mmtmpP3 = simde_mm_unpackhi_epi32(mmtmpP0, mmtmpP1);
         //      print_ints("c0",&mmtmpP2);
         //  print_ints("c1",&mmtmpP3);
-        rxdataF_comp128[0] = _mm_packs_epi32(mmtmpP2,mmtmpP3);
+        rxdataF_comp128[0] = simde_mm_packs_epi32(mmtmpP2, mmtmpP3);
         //  print_shorts("rx:",rxdataF128);
         //  print_shorts("ch:",dl_ch128);
         //  print_shorts("pack:",rxdataF_comp128);
         // multiply by conjugated channel
-        mmtmpP0 = _mm_madd_epi16(dl_ch128[1],rxdataF128[1]);
+        mmtmpP0 = simde_mm_madd_epi16(dl_ch128[1], rxdataF128[1]);
         // mmtmpP0 contains real part of 4 consecutive outputs (32-bit)
-        mmtmpP1 = _mm_shufflelo_epi16(dl_ch128[1],_MM_SHUFFLE(2,3,0,1));
-        mmtmpP1 = _mm_shufflehi_epi16(mmtmpP1,_MM_SHUFFLE(2,3,0,1));
-        mmtmpP1 = _mm_sign_epi16(mmtmpP1,*(__m128i *)&conjugate[0]);
-        mmtmpP1 = _mm_madd_epi16(mmtmpP1,rxdataF128[1]);
+        mmtmpP1 = simde_mm_shufflelo_epi16(dl_ch128[1], SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+        mmtmpP1 = simde_mm_shufflehi_epi16(mmtmpP1, SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+        mmtmpP1 = simde_mm_sign_epi16(mmtmpP1, *(simde__m128i *)&conjugate[0]);
+        mmtmpP1 = simde_mm_madd_epi16(mmtmpP1, rxdataF128[1]);
         // mmtmpP1 contains imag part of 4 consecutive outputs (32-bit)
-        mmtmpP0 = _mm_srai_epi32(mmtmpP0,output_shift);
-        mmtmpP1 = _mm_srai_epi32(mmtmpP1,output_shift);
-        mmtmpP2 = _mm_unpacklo_epi32(mmtmpP0,mmtmpP1);
-        mmtmpP3 = _mm_unpackhi_epi32(mmtmpP0,mmtmpP1);
-        rxdataF_comp128[1] = _mm_packs_epi32(mmtmpP2,mmtmpP3);
+        mmtmpP0 = simde_mm_srai_epi32(mmtmpP0, output_shift);
+        mmtmpP1 = simde_mm_srai_epi32(mmtmpP1, output_shift);
+        mmtmpP2 = simde_mm_unpacklo_epi32(mmtmpP0, mmtmpP1);
+        mmtmpP3 = simde_mm_unpackhi_epi32(mmtmpP0, mmtmpP1);
+        rxdataF_comp128[1] = simde_mm_packs_epi32(mmtmpP2, mmtmpP3);
         //  print_shorts("rx:",rxdataF128+1);
         //  print_shorts("ch:",dl_ch128+1);
         //  print_shorts("pack:",rxdataF_comp128+1);
 
         if (symbol_mod>1) {
           // multiply by conjugated channel
-          mmtmpP0 = _mm_madd_epi16(dl_ch128[2],rxdataF128[2]);
+          mmtmpP0 = simde_mm_madd_epi16(dl_ch128[2], rxdataF128[2]);
           // mmtmpP0 contains real part of 4 consecutive outputs (32-bit)
-          mmtmpP1 = _mm_shufflelo_epi16(dl_ch128[2],_MM_SHUFFLE(2,3,0,1));
-          mmtmpP1 = _mm_shufflehi_epi16(mmtmpP1,_MM_SHUFFLE(2,3,0,1));
-          mmtmpP1 = _mm_sign_epi16(mmtmpP1,*(__m128i *)&conjugate[0]);
-          mmtmpP1 = _mm_madd_epi16(mmtmpP1,rxdataF128[2]);
+          mmtmpP1 = simde_mm_shufflelo_epi16(dl_ch128[2], SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+          mmtmpP1 = simde_mm_shufflehi_epi16(mmtmpP1, SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+          mmtmpP1 = simde_mm_sign_epi16(mmtmpP1, *(simde__m128i *)&conjugate[0]);
+          mmtmpP1 = simde_mm_madd_epi16(mmtmpP1, rxdataF128[2]);
           // mmtmpP1 contains imag part of 4 consecutive outputs (32-bit)
-          mmtmpP0 = _mm_srai_epi32(mmtmpP0,output_shift);
-          mmtmpP1 = _mm_srai_epi32(mmtmpP1,output_shift);
-          mmtmpP2 = _mm_unpacklo_epi32(mmtmpP0,mmtmpP1);
-          mmtmpP3 = _mm_unpackhi_epi32(mmtmpP0,mmtmpP1);
-          rxdataF_comp128[2] = _mm_packs_epi32(mmtmpP2,mmtmpP3);
+          mmtmpP0 = simde_mm_srai_epi32(mmtmpP0, output_shift);
+          mmtmpP1 = simde_mm_srai_epi32(mmtmpP1, output_shift);
+          mmtmpP2 = simde_mm_unpacklo_epi32(mmtmpP0, mmtmpP1);
+          mmtmpP3 = simde_mm_unpackhi_epi32(mmtmpP0, mmtmpP1);
+          rxdataF_comp128[2] = simde_mm_packs_epi32(mmtmpP2, mmtmpP3);
           //  print_shorts("rx:",rxdataF128+2);
           //  print_shorts("ch:",dl_ch128+2);
           //      print_shorts("pack:",rxdataF_comp128+2);
@@ -302,56 +274,36 @@ void pbch_channel_compensation(int **rxdataF_ext,
           rxdataF128+=2;
           rxdataF_comp128+=2;
         }
-
-#elif defined(__arm__) || defined(__aarch64__)
-        // to be filled in
-#endif
       }
     }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void pbch_detection_mrc(LTE_DL_FRAME_PARMS *frame_parms,
                         int **rxdataF_comp,
                         uint8_t symbol) {
   uint8_t aatx, symbol_mod;
-  int i, nb_rb=6;
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *rxdataF_comp128_0,*rxdataF_comp128_1;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *rxdataF_comp128_0,*rxdataF_comp128_1;
-#endif
+  int i, nb_rb = 6;
+  simde__m128i *rxdataF_comp128_0, *rxdataF_comp128_1;
   symbol_mod = (symbol>=(7-frame_parms->Ncp)) ? symbol-(7-frame_parms->Ncp) : symbol;
 
   if (frame_parms->nb_antennas_rx>1) {
-    for (aatx=0; aatx<4; aatx++) { //frame_parms->nb_antenna_ports_eNB;aatx++) {
-#if defined(__x86_64__) || defined(__i386__)
-      rxdataF_comp128_0   = (__m128i *)&rxdataF_comp[(aatx<<1)][symbol_mod*6*12];
-      rxdataF_comp128_1   = (__m128i *)&rxdataF_comp[(aatx<<1)+1][symbol_mod*6*12];
-#elif defined(__arm__) || defined(__aarch64__)
-      rxdataF_comp128_0   = (int16x8_t *)&rxdataF_comp[(aatx<<1)][symbol_mod*6*12];
-      rxdataF_comp128_1   = (int16x8_t *)&rxdataF_comp[(aatx<<1)+1][symbol_mod*6*12];
-#endif
+    for (aatx = 0; aatx < 4; aatx++) { // frame_parms->nb_antenna_ports_eNB;aatx++) {
+      rxdataF_comp128_0 = (simde__m128i *)&rxdataF_comp[(aatx << 1)][symbol_mod * 6 * 12];
+      rxdataF_comp128_1 = (simde__m128i *)&rxdataF_comp[(aatx << 1) + 1][symbol_mod * 6 * 12];
 
       // MRC on each re of rb, both on MF output and magnitude (for 16QAM/64QAM llr computation)
-      for (i=0; i<nb_rb*3; i++) {
-#if defined(__x86_64__) || defined(__i386__)
-        rxdataF_comp128_0[i] = _mm_adds_epi16(_mm_srai_epi16(rxdataF_comp128_0[i],1),_mm_srai_epi16(rxdataF_comp128_1[i],1));
-#elif defined(__arm__) || defined(__aarch64__)
-        rxdataF_comp128_0[i] = vhaddq_s16(rxdataF_comp128_0[i],rxdataF_comp128_1[i]);
-#endif
+      for (i = 0; i < nb_rb * 3; i++) {
+        rxdataF_comp128_0[i] =
+            simde_mm_adds_epi16(simde_mm_srai_epi16(rxdataF_comp128_0[i], 1), simde_mm_srai_epi16(rxdataF_comp128_1[i], 1));
       }
     }
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void pbch_unscrambling(LTE_DL_FRAME_PARMS *frame_parms,
@@ -385,7 +337,7 @@ void pbch_alamouti(LTE_DL_FRAME_PARMS *frame_parms,
                    int **rxdataF_comp,
                    uint8_t symbol) {
   int16_t *rxF0,*rxF1;
-  //  __m128i *ch_mag0,*ch_mag1,*ch_mag0b,*ch_mag1b;
+  //  simde__m128i *ch_mag0,*ch_mag1,*ch_mag0b,*ch_mag1b;
   uint8_t rb,re,symbol_mod;
   int jj;
   //  printf("Doing alamouti\n");
diff --git a/openair1/PHY/LTE_UE_TRANSPORT/pmch_ue.c b/openair1/PHY/LTE_UE_TRANSPORT/pmch_ue.c
index fe82120e2f6e8dd30031c986e1aed586caaf74f8..a3b33cf54b87adf7128130c4a880130e1c9863c0 100644
--- a/openair1/PHY/LTE_UE_TRANSPORT/pmch_ue.c
+++ b/openair1/PHY/LTE_UE_TRANSPORT/pmch_ue.c
@@ -217,32 +217,23 @@ void mch_channel_level(int **dl_ch_estimates_ext,
                        int *avg,
                        uint8_t symbol,
                        unsigned short nb_rb) {
-  int i,aarx,nre;
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *dl_ch128,avg128;
-#elif defined(__arm__) || defined(__aarch64__)
-  int32x4_t avg128;
-#endif
+  int i, aarx, nre;
+  simde__m128i *dl_ch128, avg128;
 
-  for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-#if defined(__x86_64__) || defined(__i386__)
+  for (aarx = 0; aarx < frame_parms->nb_antennas_rx; aarx++) {
     //clear average level
-    avg128 = _mm_setzero_si128();
+    avg128 = simde_mm_setzero_si128();
     // 5 is always a symbol with no pilots for both normal and extended prefix
-    dl_ch128=(__m128i *)&dl_ch_estimates_ext[aarx][symbol*frame_parms->N_RB_DL*12];
-#elif defined(__arm__) || defined(__aarch64__)
-#endif
+    dl_ch128 = (simde__m128i *)&dl_ch_estimates_ext[aarx][symbol * frame_parms->N_RB_DL * 12];
 
     if ((symbol == 2) || (symbol == 6) || (symbol == 10))
       nre = (frame_parms->N_RB_DL*6);
     else
       nre = (frame_parms->N_RB_DL*12);
 
-    for (i=0; i<(nre>>2); i++) {
-#if defined(__x86_64__) || defined(__i386__)
-      avg128 = _mm_add_epi32(avg128,_mm_srai_epi32(_mm_madd_epi16(dl_ch128[0],dl_ch128[0]),log2_approx(nre>>2)-1));
-#elif defined(__arm__) || defined(__aarch64__)
-#endif
+    for (i = 0; i < (nre >> 2); i++) {
+      avg128 =
+          simde_mm_add_epi32(avg128, simde_mm_srai_epi32(simde_mm_madd_epi16(dl_ch128[0], dl_ch128[0]), log2_approx(nre >> 2) - 1));
     }
 
     avg[aarx] = (((((int*)&avg128)[0] +
@@ -253,10 +244,8 @@ void mch_channel_level(int **dl_ch_estimates_ext,
     //            printf("Channel level : %d\n",avg[(aatx<<1)+aarx]);
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void mch_channel_level_khz_1dot25(int **dl_ch_estimates_ext,
@@ -264,21 +253,14 @@ void mch_channel_level_khz_1dot25(int **dl_ch_estimates_ext,
                                   int *avg,
                                   /*uint8_t symbol,*/
                                   unsigned short nb_rb) {
-  int i,aarx,nre;
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *dl_ch128,avg128;
-#elif defined(__arm__) || defined(__aarch64__)
-  int32x4_t avg128;
-#endif
+  int i, aarx, nre;
+  simde__m128i *dl_ch128, avg128;
 
-  for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-#if defined(__x86_64__) || defined(__i386__)
+  for (aarx = 0; aarx < frame_parms->nb_antennas_rx; aarx++) {
     //clear average level
-    avg128 = _mm_setzero_si128();
+    avg128 = simde_mm_setzero_si128();
     // 5 is always a symbol with no pilots for both normal and extended prefix
-    dl_ch128=(__m128i *)&dl_ch_estimates_ext[aarx][0/*symbol*frame_parms->N_RB_DL*12*/];
-#elif defined(__arm__) || defined(__aarch64__)
-#endif
+    dl_ch128 = (simde__m128i *)&dl_ch_estimates_ext[aarx][0 /*symbol*frame_parms->N_RB_DL*12*/];
     /*if ((symbol == 2) || (symbol == 6) || (symbol == 10))
       nre = (frame_parms->N_RB_DL*6);
     else
@@ -286,12 +268,10 @@ void mch_channel_level_khz_1dot25(int **dl_ch_estimates_ext,
     nre = frame_parms->N_RB_DL*12*10;
     //nre = frame_parms->N_RB_DL*12;
 
-    for (i=0; i<(nre>>2); i++) {
-#if defined(__x86_64__) || defined(__i386__)
-      //avg128 = _mm_add_epi32(avg128,_mm_madd_epi16(dl_ch128[0],dl_ch128[0]));
-      avg128 = _mm_add_epi32(avg128,_mm_srai_epi32(_mm_madd_epi16(dl_ch128[0],dl_ch128[0]),log2_approx(nre>>2)-1));
-#elif defined(__arm__) || defined(__aarch64__)
-#endif
+    for (i = 0; i < (nre >> 2); i++) {
+      // avg128 = simde_mm_add_epi32(avg128,simde_mm_madd_epi16(dl_ch128[0],dl_ch128[0]));
+      avg128 =
+          simde_mm_add_epi32(avg128, simde_mm_srai_epi32(simde_mm_madd_epi16(dl_ch128[0], dl_ch128[0]), log2_approx(nre >> 2) - 1));
     }
 
    // avg[aarx] = (((int*)&avg128)[0] +
@@ -305,10 +285,8 @@ void mch_channel_level_khz_1dot25(int **dl_ch_estimates_ext,
                 //printf("Channel level : %d\n",avg[(aatx<<1)+aarx]);
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 
@@ -322,103 +300,88 @@ void mch_channel_compensation(int **rxdataF_ext,
                               unsigned char symbol,
                               unsigned char mod_order,
                               unsigned char output_shift) {
-  int aarx,nre,i;
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *dl_ch128,*dl_ch_mag128,*dl_ch_mag128b,*rxdataF128,*rxdataF_comp128;
-  __m128i mmtmpD0,mmtmpD1,mmtmpD2,mmtmpD3,QAM_amp128={0},QAM_amp128b={0};
-#elif defined(__arm__) || defined(__aarch64__)
-#endif
+  int aarx, nre, i;
+  simde__m128i *dl_ch128, *dl_ch_mag128, *dl_ch_mag128b, *rxdataF128, *rxdataF_comp128;
+  simde__m128i mmtmpD0, mmtmpD1, mmtmpD2, mmtmpD3, QAM_amp128 = {0}, QAM_amp128b = {0};
 
   if ((symbol == 2) || (symbol == 6) || (symbol == 10))
     nre = frame_parms->N_RB_DL*6;
   else
     nre = frame_parms->N_RB_DL*12;
 
-#if defined(__x86_64__) || defined(__i386__)
-
   if (mod_order == 4) {
-    QAM_amp128 = _mm_set1_epi16(QAM16_n1);  // 2/sqrt(10)
-    QAM_amp128b = _mm_setzero_si128();
+    QAM_amp128 = simde_mm_set1_epi16(QAM16_n1); // 2/sqrt(10)
+    QAM_amp128b = simde_mm_setzero_si128();
   } else if (mod_order == 6) {
-    QAM_amp128  = _mm_set1_epi16(QAM64_n1); //
-    QAM_amp128b = _mm_set1_epi16(QAM64_n2);
+    QAM_amp128 = simde_mm_set1_epi16(QAM64_n1); //
+    QAM_amp128b = simde_mm_set1_epi16(QAM64_n2);
   }
 
-#elif defined(__arm__) || defined(__aarch64__)
-#endif
-
-  for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-#if defined(__x86_64__) || defined(__i386__)
-    dl_ch128          = (__m128i *)&dl_ch_estimates_ext[aarx][symbol*frame_parms->N_RB_DL*12];
-    dl_ch_mag128      = (__m128i *)&dl_ch_mag[aarx][symbol*frame_parms->N_RB_DL*12];
-    dl_ch_mag128b     = (__m128i *)&dl_ch_magb[aarx][symbol*frame_parms->N_RB_DL*12];
-    rxdataF128        = (__m128i *)&rxdataF_ext[aarx][symbol*frame_parms->N_RB_DL*12];
-    rxdataF_comp128   = (__m128i *)&rxdataF_comp[aarx][symbol*frame_parms->N_RB_DL*12];
-#elif defined(__arm__) || defined(__aarch64__)
-#endif
+  for (aarx = 0; aarx < frame_parms->nb_antennas_rx; aarx++) {
+    dl_ch128 = (simde__m128i *)&dl_ch_estimates_ext[aarx][symbol * frame_parms->N_RB_DL * 12];
+    dl_ch_mag128 = (simde__m128i *)&dl_ch_mag[aarx][symbol * frame_parms->N_RB_DL * 12];
+    dl_ch_mag128b = (simde__m128i *)&dl_ch_magb[aarx][symbol * frame_parms->N_RB_DL * 12];
+    rxdataF128 = (simde__m128i *)&rxdataF_ext[aarx][symbol * frame_parms->N_RB_DL * 12];
+    rxdataF_comp128 = (simde__m128i *)&rxdataF_comp[aarx][symbol * frame_parms->N_RB_DL * 12];
 
     for (i=0; i<(nre>>2); i+=2) {
       if (mod_order>2) {
         // get channel amplitude if not QPSK
-#if defined(__x86_64__) || defined(__i386__)
-        mmtmpD0 = _mm_madd_epi16(dl_ch128[0],dl_ch128[0]);
-        mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-        mmtmpD1 = _mm_madd_epi16(dl_ch128[1],dl_ch128[1]);
-        mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
-        mmtmpD0 = _mm_packs_epi32(mmtmpD0,mmtmpD1);
+        mmtmpD0 = simde_mm_madd_epi16(dl_ch128[0], dl_ch128[0]);
+        mmtmpD0 = simde_mm_srai_epi32(mmtmpD0, output_shift);
+        mmtmpD1 = simde_mm_madd_epi16(dl_ch128[1], dl_ch128[1]);
+        mmtmpD1 = simde_mm_srai_epi32(mmtmpD1, output_shift);
+        mmtmpD0 = simde_mm_packs_epi32(mmtmpD0, mmtmpD1);
         // store channel magnitude here in a new field of dlsch
-        dl_ch_mag128[0] = _mm_unpacklo_epi16(mmtmpD0,mmtmpD0);
+        dl_ch_mag128[0] = simde_mm_unpacklo_epi16(mmtmpD0, mmtmpD0);
         dl_ch_mag128b[0] = dl_ch_mag128[0];
-        dl_ch_mag128[0] = _mm_mulhi_epi16(dl_ch_mag128[0],QAM_amp128);
-        dl_ch_mag128[0] = _mm_slli_epi16(dl_ch_mag128[0],1);
-        dl_ch_mag128[1] = _mm_unpackhi_epi16(mmtmpD0,mmtmpD0);
+        dl_ch_mag128[0] = simde_mm_mulhi_epi16(dl_ch_mag128[0], QAM_amp128);
+        dl_ch_mag128[0] = simde_mm_slli_epi16(dl_ch_mag128[0], 1);
+        dl_ch_mag128[1] = simde_mm_unpackhi_epi16(mmtmpD0, mmtmpD0);
         dl_ch_mag128b[1] = dl_ch_mag128[1];
-        dl_ch_mag128[1] = _mm_mulhi_epi16(dl_ch_mag128[1],QAM_amp128);
-        dl_ch_mag128[1] = _mm_slli_epi16(dl_ch_mag128[1],1);
-        dl_ch_mag128b[0] = _mm_mulhi_epi16(dl_ch_mag128b[0],QAM_amp128b);
-        dl_ch_mag128b[0] = _mm_slli_epi16(dl_ch_mag128b[0],1);
-        dl_ch_mag128b[1] = _mm_mulhi_epi16(dl_ch_mag128b[1],QAM_amp128b);
-        dl_ch_mag128b[1] = _mm_slli_epi16(dl_ch_mag128b[1],1);
-#elif defined(__arm__) || defined(__aarch64__)
-#endif
+        dl_ch_mag128[1] = simde_mm_mulhi_epi16(dl_ch_mag128[1], QAM_amp128);
+        dl_ch_mag128[1] = simde_mm_slli_epi16(dl_ch_mag128[1], 1);
+        dl_ch_mag128b[0] = simde_mm_mulhi_epi16(dl_ch_mag128b[0], QAM_amp128b);
+        dl_ch_mag128b[0] = simde_mm_slli_epi16(dl_ch_mag128b[0], 1);
+        dl_ch_mag128b[1] = simde_mm_mulhi_epi16(dl_ch_mag128b[1], QAM_amp128b);
+        dl_ch_mag128b[1] = simde_mm_slli_epi16(dl_ch_mag128b[1], 1);
       }
 
-#if defined(__x86_64__) || defined(__i386__)
       // multiply by conjugated channel
-      mmtmpD0 = _mm_madd_epi16(dl_ch128[0],rxdataF128[0]);
+      mmtmpD0 = simde_mm_madd_epi16(dl_ch128[0], rxdataF128[0]);
       //  print_ints("re",&mmtmpD0);
       // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-      mmtmpD1 = _mm_shufflelo_epi16(dl_ch128[0],_MM_SHUFFLE(2,3,0,1));
-      mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-      mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i *)&conjugate[0]);
+      mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch128[0], SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+      mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+      mmtmpD1 = simde_mm_sign_epi16(mmtmpD1, *(simde__m128i *)&conjugate[0]);
       //  print_ints("im",&mmtmpD1);
-      mmtmpD1 = _mm_madd_epi16(mmtmpD1,rxdataF128[0]);
+      mmtmpD1 = simde_mm_madd_epi16(mmtmpD1, rxdataF128[0]);
       // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-      mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
+      mmtmpD0 = simde_mm_srai_epi32(mmtmpD0, output_shift);
       //  print_ints("re(shift)",&mmtmpD0);
-      mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
+      mmtmpD1 = simde_mm_srai_epi32(mmtmpD1, output_shift);
       //  print_ints("im(shift)",&mmtmpD1);
-      mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-      mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+      mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0, mmtmpD1);
+      mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0, mmtmpD1);
       //        print_ints("c0",&mmtmpD2);
       //  print_ints("c1",&mmtmpD3);
-      rxdataF_comp128[0] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+      rxdataF_comp128[0] = simde_mm_packs_epi32(mmtmpD2, mmtmpD3);
       //  print_shorts("rx:",rxdataF128);
       //  print_shorts("ch:",dl_ch128);
       //  print_shorts("pack:",rxdataF_comp128);
       // multiply by conjugated channel
-      mmtmpD0 = _mm_madd_epi16(dl_ch128[1],rxdataF128[1]);
+      mmtmpD0 = simde_mm_madd_epi16(dl_ch128[1], rxdataF128[1]);
       // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-      mmtmpD1 = _mm_shufflelo_epi16(dl_ch128[1],_MM_SHUFFLE(2,3,0,1));
-      mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-      mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i *)conjugate);
-      mmtmpD1 = _mm_madd_epi16(mmtmpD1,rxdataF128[1]);
+      mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch128[1], SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+      mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+      mmtmpD1 = simde_mm_sign_epi16(mmtmpD1, *(simde__m128i *)conjugate);
+      mmtmpD1 = simde_mm_madd_epi16(mmtmpD1, rxdataF128[1]);
       // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-      mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-      mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
-      mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-      mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
-      rxdataF_comp128[1] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+      mmtmpD0 = simde_mm_srai_epi32(mmtmpD0, output_shift);
+      mmtmpD1 = simde_mm_srai_epi32(mmtmpD1, output_shift);
+      mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0, mmtmpD1);
+      mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0, mmtmpD1);
+      rxdataF_comp128[1] = simde_mm_packs_epi32(mmtmpD2, mmtmpD3);
       //  print_shorts("rx:",rxdataF128+1);
       //  print_shorts("ch:",dl_ch128+1);
       //  print_shorts("pack:",rxdataF_comp128+1);
@@ -426,16 +389,12 @@ void mch_channel_compensation(int **rxdataF_ext,
       dl_ch_mag128+=2;
       dl_ch_mag128b+=2;
       rxdataF128+=2;
-      rxdataF_comp128+=2;
-#elif defined(__arm__) || defined(__aarch64__)
-#endif
+      rxdataF_comp128 += 2;
     }
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 
@@ -449,102 +408,88 @@ void mch_channel_compensation_khz_1dot25(int **rxdataF_ext,
     /*unsigned char symbol,*/
     unsigned char mod_order,
     unsigned char output_shift) {
-  int aarx,nre,i;
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *dl_ch128,*dl_ch_mag128,*dl_ch_mag128b,*rxdataF128,*rxdataF_comp128;
-  __m128i mmtmpD0,mmtmpD1,mmtmpD2,mmtmpD3,QAM_amp128={0},QAM_amp128b={0};
-#elif defined(__arm__) || defined(__aarch64__)
-#endif
+  int aarx, nre, i;
+  simde__m128i *dl_ch128, *dl_ch_mag128, *dl_ch_mag128b, *rxdataF128, *rxdataF_comp128;
+  simde__m128i mmtmpD0, mmtmpD1, mmtmpD2, mmtmpD3, QAM_amp128 = {0}, QAM_amp128b = {0};
   /*if ((symbol == 2) || (symbol == 6) || (symbol == 10))
     nre = frame_parms->N_RB_DL*6;
   else
     nre = frame_parms->N_RB_DL*12;*/
-  nre = frame_parms->N_RB_DL*12*10;
-#if defined(__x86_64__) || defined(__i386__)
+  nre = frame_parms->N_RB_DL * 12 * 10;
 
   if (mod_order == 4) {
-    QAM_amp128 = _mm_set1_epi16(QAM16_n1);  // 2/sqrt(10)
-    QAM_amp128b = _mm_setzero_si128();
+    QAM_amp128 = simde_mm_set1_epi16(QAM16_n1); // 2/sqrt(10)
+    QAM_amp128b = simde_mm_setzero_si128();
   } else if (mod_order == 6) {
-    QAM_amp128  = _mm_set1_epi16(QAM64_n1); //
-    QAM_amp128b = _mm_set1_epi16(QAM64_n2);
+    QAM_amp128 = simde_mm_set1_epi16(QAM64_n1); //
+    QAM_amp128b = simde_mm_set1_epi16(QAM64_n2);
   }
 
-#elif defined(__arm__) || defined(__aarch64__)
-#endif
-
-  for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-#if defined(__x86_64__) || defined(__i386__)
-    dl_ch128          = (__m128i *)&dl_ch_estimates_ext[aarx][0];
-    dl_ch_mag128      = (__m128i *)&dl_ch_mag[aarx][0];
-    dl_ch_mag128b     = (__m128i *)&dl_ch_magb[aarx][0];
-    rxdataF128        = (__m128i *)&rxdataF_ext[aarx][0];
-    rxdataF_comp128   = (__m128i *)&rxdataF_comp[aarx][0];
-#elif defined(__arm__) || defined(__aarch64__)
-#endif
+  for (aarx = 0; aarx < frame_parms->nb_antennas_rx; aarx++) {
+    dl_ch128 = (simde__m128i *)&dl_ch_estimates_ext[aarx][0];
+    dl_ch_mag128 = (simde__m128i *)&dl_ch_mag[aarx][0];
+    dl_ch_mag128b = (simde__m128i *)&dl_ch_magb[aarx][0];
+    rxdataF128 = (simde__m128i *)&rxdataF_ext[aarx][0];
+    rxdataF_comp128 = (simde__m128i *)&rxdataF_comp[aarx][0];
 
     for (i=0; i<(nre>>2); i+=2) {
       if (mod_order>2) {
         // get channel amplitude if not QPSK
-#if defined(__x86_64__) || defined(__i386__)
-        mmtmpD0 = _mm_madd_epi16(dl_ch128[0],dl_ch128[0]);
-        mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-        mmtmpD1 = _mm_madd_epi16(dl_ch128[1],dl_ch128[1]);
-        mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
-        mmtmpD0 = _mm_packs_epi32(mmtmpD0,mmtmpD1);
+        mmtmpD0 = simde_mm_madd_epi16(dl_ch128[0], dl_ch128[0]);
+        mmtmpD0 = simde_mm_srai_epi32(mmtmpD0, output_shift);
+        mmtmpD1 = simde_mm_madd_epi16(dl_ch128[1], dl_ch128[1]);
+        mmtmpD1 = simde_mm_srai_epi32(mmtmpD1, output_shift);
+        mmtmpD0 = simde_mm_packs_epi32(mmtmpD0, mmtmpD1);
         // store channel magnitude here in a new field of dlsch
-        dl_ch_mag128[0] = _mm_unpacklo_epi16(mmtmpD0,mmtmpD0);
+        dl_ch_mag128[0] = simde_mm_unpacklo_epi16(mmtmpD0, mmtmpD0);
         dl_ch_mag128b[0] = dl_ch_mag128[0];
-        dl_ch_mag128[0] = _mm_mulhi_epi16(dl_ch_mag128[0],QAM_amp128);
-        dl_ch_mag128[0] = _mm_slli_epi16(dl_ch_mag128[0],1);
-        dl_ch_mag128[1] = _mm_unpackhi_epi16(mmtmpD0,mmtmpD0);
+        dl_ch_mag128[0] = simde_mm_mulhi_epi16(dl_ch_mag128[0], QAM_amp128);
+        dl_ch_mag128[0] = simde_mm_slli_epi16(dl_ch_mag128[0], 1);
+        dl_ch_mag128[1] = simde_mm_unpackhi_epi16(mmtmpD0, mmtmpD0);
         dl_ch_mag128b[1] = dl_ch_mag128[1];
-        dl_ch_mag128[1] = _mm_mulhi_epi16(dl_ch_mag128[1],QAM_amp128);
-        dl_ch_mag128[1] = _mm_slli_epi16(dl_ch_mag128[1],1);
-        dl_ch_mag128b[0] = _mm_mulhi_epi16(dl_ch_mag128b[0],QAM_amp128b);
-        dl_ch_mag128b[0] = _mm_slli_epi16(dl_ch_mag128b[0],1);
-        dl_ch_mag128b[1] = _mm_mulhi_epi16(dl_ch_mag128b[1],QAM_amp128b);
-        dl_ch_mag128b[1] = _mm_slli_epi16(dl_ch_mag128b[1],1);
-#elif defined(__arm__) || defined(__aarch64__)
-#endif
+        dl_ch_mag128[1] = simde_mm_mulhi_epi16(dl_ch_mag128[1], QAM_amp128);
+        dl_ch_mag128[1] = simde_mm_slli_epi16(dl_ch_mag128[1], 1);
+        dl_ch_mag128b[0] = simde_mm_mulhi_epi16(dl_ch_mag128b[0], QAM_amp128b);
+        dl_ch_mag128b[0] = simde_mm_slli_epi16(dl_ch_mag128b[0], 1);
+        dl_ch_mag128b[1] = simde_mm_mulhi_epi16(dl_ch_mag128b[1], QAM_amp128b);
+        dl_ch_mag128b[1] = simde_mm_slli_epi16(dl_ch_mag128b[1], 1);
       }
 
-#if defined(__x86_64__) || defined(__i386__)
       // multiply by conjugated channel
-      mmtmpD0 = _mm_madd_epi16(dl_ch128[0],rxdataF128[0]);
+      mmtmpD0 = simde_mm_madd_epi16(dl_ch128[0], rxdataF128[0]);
       //  print_ints("re",&mmtmpD0);
       // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-      mmtmpD1 = _mm_shufflelo_epi16(dl_ch128[0],_MM_SHUFFLE(2,3,0,1));
-      mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-      mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i *)&conjugate[0]);
+      mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch128[0], SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+      mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+      mmtmpD1 = simde_mm_sign_epi16(mmtmpD1, *(simde__m128i *)&conjugate[0]);
       //  print_ints("im",&mmtmpD1);
-      mmtmpD1 = _mm_madd_epi16(mmtmpD1,rxdataF128[0]);
+      mmtmpD1 = simde_mm_madd_epi16(mmtmpD1, rxdataF128[0]);
       // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-      mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
+      mmtmpD0 = simde_mm_srai_epi32(mmtmpD0, output_shift);
       //  print_ints("re(shift)",&mmtmpD0);
-      mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
+      mmtmpD1 = simde_mm_srai_epi32(mmtmpD1, output_shift);
       //  print_ints("im(shift)",&mmtmpD1);
-      mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-      mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+      mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0, mmtmpD1);
+      mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0, mmtmpD1);
       //        print_ints("c0",&mmtmpD2);
       //  print_ints("c1",&mmtmpD3);
-      rxdataF_comp128[0] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+      rxdataF_comp128[0] = simde_mm_packs_epi32(mmtmpD2, mmtmpD3);
       //  print_shorts("rx:",rxdataF128);
       //  print_shorts("ch:",dl_ch128);
       //  print_shorts("pack:",rxdataF_comp128);
       // multiply by conjugated channel
-      mmtmpD0 = _mm_madd_epi16(dl_ch128[1],rxdataF128[1]);
+      mmtmpD0 = simde_mm_madd_epi16(dl_ch128[1], rxdataF128[1]);
       // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-      mmtmpD1 = _mm_shufflelo_epi16(dl_ch128[1],_MM_SHUFFLE(2,3,0,1));
-      mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-      mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i *)conjugate);
-      mmtmpD1 = _mm_madd_epi16(mmtmpD1,rxdataF128[1]);
+      mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch128[1], SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+      mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+      mmtmpD1 = simde_mm_sign_epi16(mmtmpD1, *(simde__m128i *)conjugate);
+      mmtmpD1 = simde_mm_madd_epi16(mmtmpD1, rxdataF128[1]);
       // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-      mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-      mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
-      mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-      mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
-      rxdataF_comp128[1] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+      mmtmpD0 = simde_mm_srai_epi32(mmtmpD0, output_shift);
+      mmtmpD1 = simde_mm_srai_epi32(mmtmpD1, output_shift);
+      mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0, mmtmpD1);
+      mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0, mmtmpD1);
+      rxdataF_comp128[1] = simde_mm_packs_epi32(mmtmpD2, mmtmpD3);
       //      print_shorts("rx:",rxdataF128+1);
       //     print_shorts("ch:",dl_ch128+1);
       //      print_shorts("pack:",rxdataF_comp128+1);
@@ -552,16 +497,12 @@ void mch_channel_compensation_khz_1dot25(int **rxdataF_ext,
       dl_ch_mag128+=2;
       dl_ch_mag128b+=2;
       rxdataF128+=2;
-      rxdataF_comp128+=2;
-#elif defined(__arm__) || defined(__aarch64__)
-#endif
+      rxdataF_comp128 += 2;
     }
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 
@@ -572,47 +513,28 @@ void mch_detection_mrc(LTE_DL_FRAME_PARMS *frame_parms,
                        int **dl_ch_magb,
                        unsigned char symbol) {
   int i;
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *rxdataF_comp128_0,*rxdataF_comp128_1,*dl_ch_mag128_0,*dl_ch_mag128_1,*dl_ch_mag128_0b,*dl_ch_mag128_1b;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *rxdataF_comp128_0,*rxdataF_comp128_1,*dl_ch_mag128_0,*dl_ch_mag128_1,*dl_ch_mag128_0b,*dl_ch_mag128_1b;
-#endif
-
-  if (frame_parms->nb_antennas_rx>1) {
-#if defined(__x86_64__) || defined(__i386__)
-    rxdataF_comp128_0   = (__m128i *)&rxdataF_comp[0][symbol*frame_parms->N_RB_DL*12];
-    rxdataF_comp128_1   = (__m128i *)&rxdataF_comp[1][symbol*frame_parms->N_RB_DL*12];
-    dl_ch_mag128_0      = (__m128i *)&dl_ch_mag[0][symbol*frame_parms->N_RB_DL*12];
-    dl_ch_mag128_1      = (__m128i *)&dl_ch_mag[1][symbol*frame_parms->N_RB_DL*12];
-    dl_ch_mag128_0b     = (__m128i *)&dl_ch_magb[0][symbol*frame_parms->N_RB_DL*12];
-    dl_ch_mag128_1b     = (__m128i *)&dl_ch_magb[1][symbol*frame_parms->N_RB_DL*12];
-#elif defined(__arm__) || defined(__aarch64__)
-    rxdataF_comp128_0   = (int16x8_t *)&rxdataF_comp[0][symbol*frame_parms->N_RB_DL*12];
-    rxdataF_comp128_1   = (int16x8_t *)&rxdataF_comp[1][symbol*frame_parms->N_RB_DL*12];
-    dl_ch_mag128_0      = (int16x8_t *)&dl_ch_mag[0][symbol*frame_parms->N_RB_DL*12];
-    dl_ch_mag128_1      = (int16x8_t *)&dl_ch_mag[1][symbol*frame_parms->N_RB_DL*12];
-    dl_ch_mag128_0b     = (int16x8_t *)&dl_ch_magb[0][symbol*frame_parms->N_RB_DL*12];
-    dl_ch_mag128_1b     = (int16x8_t *)&dl_ch_magb[1][symbol*frame_parms->N_RB_DL*12];
-#endif
+  simde__m128i *rxdataF_comp128_0, *rxdataF_comp128_1, *dl_ch_mag128_0, *dl_ch_mag128_1, *dl_ch_mag128_0b, *dl_ch_mag128_1b;
+
+  if (frame_parms->nb_antennas_rx > 1) {
+    rxdataF_comp128_0 = (simde__m128i *)&rxdataF_comp[0][symbol * frame_parms->N_RB_DL * 12];
+    rxdataF_comp128_1 = (simde__m128i *)&rxdataF_comp[1][symbol * frame_parms->N_RB_DL * 12];
+    dl_ch_mag128_0 = (simde__m128i *)&dl_ch_mag[0][symbol * frame_parms->N_RB_DL * 12];
+    dl_ch_mag128_1 = (simde__m128i *)&dl_ch_mag[1][symbol * frame_parms->N_RB_DL * 12];
+    dl_ch_mag128_0b = (simde__m128i *)&dl_ch_magb[0][symbol * frame_parms->N_RB_DL * 12];
+    dl_ch_mag128_1b = (simde__m128i *)&dl_ch_magb[1][symbol * frame_parms->N_RB_DL * 12];
 
     // MRC on each re of rb, both on MF output and magnitude (for 16QAM/64QAM llr computation)
-    for (i=0; i<frame_parms->N_RB_DL*3; i++) {
-#if defined(__x86_64__) || defined(__i386__)
-      rxdataF_comp128_0[i] = _mm_adds_epi16(_mm_srai_epi16(rxdataF_comp128_0[i],1),_mm_srai_epi16(rxdataF_comp128_1[i],1));
-      dl_ch_mag128_0[i]    = _mm_adds_epi16(_mm_srai_epi16(dl_ch_mag128_0[i],1),_mm_srai_epi16(dl_ch_mag128_1[i],1));
-      dl_ch_mag128_0b[i]   = _mm_adds_epi16(_mm_srai_epi16(dl_ch_mag128_0b[i],1),_mm_srai_epi16(dl_ch_mag128_1b[i],1));
-#elif defined(__arm__) || defined(__aarch64__)
-      rxdataF_comp128_0[i] = vhaddq_s16(rxdataF_comp128_0[i],rxdataF_comp128_1[i]);
-      dl_ch_mag128_0[i]    = vhaddq_s16(dl_ch_mag128_0[i],dl_ch_mag128_1[i]);
-      dl_ch_mag128_0b[i]   = vhaddq_s16(dl_ch_mag128_0b[i],dl_ch_mag128_1b[i]);
-#endif
+    for (i = 0; i < frame_parms->N_RB_DL * 3; i++) {
+      rxdataF_comp128_0[i] =
+          simde_mm_adds_epi16(simde_mm_srai_epi16(rxdataF_comp128_0[i], 1), simde_mm_srai_epi16(rxdataF_comp128_1[i], 1));
+      dl_ch_mag128_0[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(dl_ch_mag128_0[i], 1), simde_mm_srai_epi16(dl_ch_mag128_1[i], 1));
+      dl_ch_mag128_0b[i] =
+          simde_mm_adds_epi16(simde_mm_srai_epi16(dl_ch_mag128_0b[i], 1), simde_mm_srai_epi16(dl_ch_mag128_1b[i], 1));
     }
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 
@@ -622,47 +544,28 @@ void mch_detection_mrc_khz_1dot25(LTE_DL_FRAME_PARMS *frame_parms,
                                   int **dl_ch_magb/*,
                        unsigned char symbol*/) {
   int i;
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *rxdataF_comp128_0,*rxdataF_comp128_1,*dl_ch_mag128_0,*dl_ch_mag128_1,*dl_ch_mag128_0b,*dl_ch_mag128_1b;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *rxdataF_comp128_0,*rxdataF_comp128_1,*dl_ch_mag128_0,*dl_ch_mag128_1,*dl_ch_mag128_0b,*dl_ch_mag128_1b;
-#endif
-
-  if (frame_parms->nb_antennas_rx>1) {
-#if defined(__x86_64__) || defined(__i386__)
-    rxdataF_comp128_0   = (__m128i *)&rxdataF_comp[0][0];
-    rxdataF_comp128_1   = (__m128i *)&rxdataF_comp[1][0];
-    dl_ch_mag128_0      = (__m128i *)&dl_ch_mag[0][0];
-    dl_ch_mag128_1      = (__m128i *)&dl_ch_mag[1][0];
-    dl_ch_mag128_0b     = (__m128i *)&dl_ch_magb[0][0];
-    dl_ch_mag128_1b     = (__m128i *)&dl_ch_magb[1][0];
-#elif defined(__arm__) || defined(__aarch64__)
-    rxdataF_comp128_0   = (int16x8_t *)&rxdataF_comp[0][0];
-    rxdataF_comp128_1   = (int16x8_t *)&rxdataF_comp[1][0];
-    dl_ch_mag128_0      = (int16x8_t *)&dl_ch_mag[0][0];
-    dl_ch_mag128_1      = (int16x8_t *)&dl_ch_mag[1][0];
-    dl_ch_mag128_0b     = (int16x8_t *)&dl_ch_magb[0][0];
-    dl_ch_mag128_1b     = (int16x8_t *)&dl_ch_magb[1][0];
-#endif
+  simde__m128i *rxdataF_comp128_0, *rxdataF_comp128_1, *dl_ch_mag128_0, *dl_ch_mag128_1, *dl_ch_mag128_0b, *dl_ch_mag128_1b;
+
+  if (frame_parms->nb_antennas_rx > 1) {
+    rxdataF_comp128_0 = (simde__m128i *)&rxdataF_comp[0][0];
+    rxdataF_comp128_1 = (simde__m128i *)&rxdataF_comp[1][0];
+    dl_ch_mag128_0 = (simde__m128i *)&dl_ch_mag[0][0];
+    dl_ch_mag128_1 = (simde__m128i *)&dl_ch_mag[1][0];
+    dl_ch_mag128_0b = (simde__m128i *)&dl_ch_magb[0][0];
+    dl_ch_mag128_1b = (simde__m128i *)&dl_ch_magb[1][0];
 
     // MRC on each re of rb, both on MF output and magnitude (for 16QAM/64QAM llr computation)
-    for (i=0; i<frame_parms->N_RB_DL*30; i++) {
-#if defined(__x86_64__) || defined(__i386__)
-      rxdataF_comp128_0[i] = _mm_adds_epi16(_mm_srai_epi16(rxdataF_comp128_0[i],1),_mm_srai_epi16(rxdataF_comp128_1[i],1));
-      dl_ch_mag128_0[i]    = _mm_adds_epi16(_mm_srai_epi16(dl_ch_mag128_0[i],1),_mm_srai_epi16(dl_ch_mag128_1[i],1));
-      dl_ch_mag128_0b[i]   = _mm_adds_epi16(_mm_srai_epi16(dl_ch_mag128_0b[i],1),_mm_srai_epi16(dl_ch_mag128_1b[i],1));
-#elif defined(__arm__) || defined(__aarch64__)
-      rxdataF_comp128_0[i] = vhaddq_s16(rxdataF_comp128_0[i],rxdataF_comp128_1[i]);
-      dl_ch_mag128_0[i]    = vhaddq_s16(dl_ch_mag128_0[i],dl_ch_mag128_1[i]);
-      dl_ch_mag128_0b[i]   = vhaddq_s16(dl_ch_mag128_0b[i],dl_ch_mag128_1b[i]);
-#endif
+    for (i = 0; i < frame_parms->N_RB_DL * 30; i++) {
+      rxdataF_comp128_0[i] =
+          simde_mm_adds_epi16(simde_mm_srai_epi16(rxdataF_comp128_0[i], 1), simde_mm_srai_epi16(rxdataF_comp128_1[i], 1));
+      dl_ch_mag128_0[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(dl_ch_mag128_0[i], 1), simde_mm_srai_epi16(dl_ch_mag128_1[i], 1));
+      dl_ch_mag128_0b[i] =
+          simde_mm_adds_epi16(simde_mm_srai_epi16(dl_ch_mag128_0b[i], 1), simde_mm_srai_epi16(dl_ch_mag128_1b[i], 1));
     }
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 
@@ -700,10 +603,8 @@ int mch_qpsk_llr(LTE_DL_FRAME_PARMS *frame_parms,
   }
 
   *llr32p = (short *)llr32;
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
   return(0);
 }
 
@@ -733,10 +634,8 @@ int mch_qpsk_llr_khz_1dot25(LTE_DL_FRAME_PARMS *frame_parms,
   }
 
   *llr32p = (short *)llr32;
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
   return(0);
 }
 
@@ -751,21 +650,14 @@ void mch_16qam_llr(LTE_DL_FRAME_PARMS *frame_parms,
                    short *dlsch_llr,
                    int **dl_ch_mag,
                    unsigned char symbol,
-                   int16_t **llr32p) {
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *rxF = (__m128i *)&rxdataF_comp[0][(symbol*frame_parms->N_RB_DL*12)];
-  __m128i *ch_mag;
-  __m128i llr128[2],xmm0;
+                   int16_t **llr32p)
+{
+  simde__m128i *rxF = (simde__m128i *)&rxdataF_comp[0][(symbol * frame_parms->N_RB_DL * 12)];
+  simde__m128i *ch_mag;
+  simde__m128i llr128[2], xmm0;
   uint32_t *llr32;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *rxF = (int16x8_t *)&rxdataF_comp[0][(symbol*frame_parms->N_RB_DL*12)];
-  int16x8_t *ch_mag;
-  int16x8_t llr128[2],xmm0;
-  int16_t *llr16;
-#endif
   int i,len;
-  unsigned char len_mod4=0;
-#if defined(__x86_64__) || defined(__i386__)
+  unsigned char len_mod4 = 0;
 
   if (symbol==2) {
     llr32 = (uint32_t *)dlsch_llr;
@@ -773,20 +665,7 @@ void mch_16qam_llr(LTE_DL_FRAME_PARMS *frame_parms,
     llr32 = (uint32_t *)*llr32p;
   }
 
-#elif defined(__arm__) || defined(__aarch64__)
-
-  if (symbol==2) {
-    llr16 = (int16_t *)dlsch_llr;
-  } else {
-    llr16 = (int16_t *)*llr32p;
-  }
-
-#endif
-#if defined(__x86_64__) || defined(__i386__)
-  ch_mag = (__m128i *)&dl_ch_mag[0][(symbol*frame_parms->N_RB_DL*12)];
-#elif defined(__arm__) || defined(__aarch64__)
-  ch_mag = (int16x8_t *)&dl_ch_mag[0][(symbol*frame_parms->N_RB_DL*12)];
-#endif
+  ch_mag = (simde__m128i *)&dl_ch_mag[0][(symbol * frame_parms->N_RB_DL * 12)];
 
   if ((symbol==2) || (symbol==6) || (symbol==10)) {
     len = frame_parms->N_RB_DL*6;
@@ -804,13 +683,12 @@ void mch_16qam_llr(LTE_DL_FRAME_PARMS *frame_parms,
   len>>=2;  // length in quad words (4 REs)
   len+=(len_mod4==0 ? 0 : 1);
 
-  for (i=0; i<len; i++) {
-#if defined(__x86_64__) || defined(__i386__)
-    xmm0 = _mm_abs_epi16(rxF[i]);
-    xmm0 = _mm_subs_epi16(ch_mag[i],xmm0);
+  for (i = 0; i < len; i++) {
+    xmm0 = simde_mm_abs_epi16(rxF[i]);
+    xmm0 = simde_mm_subs_epi16(ch_mag[i], xmm0);
     // lambda_1=y_R, lambda_2=|y_R|-|h|^2, lamda_3=y_I, lambda_4=|y_I|-|h|^2
-    llr128[0] = _mm_unpacklo_epi32(rxF[i],xmm0);
-    llr128[1] = _mm_unpackhi_epi32(rxF[i],xmm0);
+    llr128[0] = simde_mm_unpacklo_epi32(rxF[i], xmm0);
+    llr128[1] = simde_mm_unpackhi_epi32(rxF[i], xmm0);
     llr32[0] = ((uint32_t *)&llr128[0])[0];
     llr32[1] = ((uint32_t *)&llr128[0])[1];
     llr32[2] = ((uint32_t *)&llr128[0])[2];
@@ -819,75 +697,32 @@ void mch_16qam_llr(LTE_DL_FRAME_PARMS *frame_parms,
     llr32[5] = ((uint32_t *)&llr128[1])[1];
     llr32[6] = ((uint32_t *)&llr128[1])[2];
     llr32[7] = ((uint32_t *)&llr128[1])[3];
-    llr32+=8;
-#elif defined(__arm__) || defined(__aarch64__)
-    xmm0 = vabsq_s16(rxF[i]);
-    xmm0 = vsubq_s16(ch_mag[i],xmm0);
-    // lambda_1=y_R, lambda_2=|y_R|-|h|^2, lamda_3=y_I, lambda_4=|y_I|-|h|^2
-    llr16[0] = vgetq_lane_s16(rxF[i],0);
-    llr16[1] = vgetq_lane_s16(xmm0,0);
-    llr16[2] = vgetq_lane_s16(rxF[i],1);
-    llr16[3] = vgetq_lane_s16(xmm0,1);
-    llr16[4] = vgetq_lane_s16(rxF[i],2);
-    llr16[5] = vgetq_lane_s16(xmm0,2);
-    llr16[6] = vgetq_lane_s16(rxF[i],2);
-    llr16[7] = vgetq_lane_s16(xmm0,3);
-    llr16[8] = vgetq_lane_s16(rxF[i],4);
-    llr16[9] = vgetq_lane_s16(xmm0,4);
-    llr16[10] = vgetq_lane_s16(rxF[i],5);
-    llr16[11] = vgetq_lane_s16(xmm0,5);
-    llr16[12] = vgetq_lane_s16(rxF[i],6);
-    llr16[13] = vgetq_lane_s16(xmm0,6);
-    llr16[14] = vgetq_lane_s16(rxF[i],7);
-    llr16[15] = vgetq_lane_s16(xmm0,7);
-    llr16+=16;
-#endif
+    llr32 += 8;
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
-
 void mch_16qam_llr_khz_1dot25(LTE_DL_FRAME_PARMS *frame_parms,
                               int **rxdataF_comp,
                               short *dlsch_llr,
                               int **dl_ch_mag,
                               /*unsigned char symbol,*/
-                              int16_t **llr32p) {
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *rxF = (__m128i *)&rxdataF_comp[0][0];
-  __m128i *ch_mag;
-  __m128i llr128[2],xmm0;
+                              int16_t **llr32p)
+{
+  simde__m128i *rxF = (simde__m128i *)&rxdataF_comp[0][0];
+  simde__m128i *ch_mag;
+  simde__m128i llr128[2], xmm0;
   uint32_t *llr32;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *rxF = (int16x8_t *)&rxdataF_comp[0][0];
-  int16x8_t *ch_mag;
-  int16x8_t llr128[2],xmm0;
-  int16_t *llr16;
-#endif
   int i,len;
-  unsigned char len_mod4=0;
-#if defined(__x86_64__) || defined(__i386__)
+  unsigned char len_mod4 = 0;
   //if (symbol==2) {
   llr32 = (uint32_t *)dlsch_llr;
   //} else {
-  //llr32 = (uint32_t*)*llr32p;
-  //}
-#elif defined(__arm__) || defined(__aarch64__)
-  //if (symbol==2) {
-  llr16 = (int16_t *)dlsch_llr;
-  //} else {
-  //  llr16 = (int16_t*)*llr32p;
+  // llr32 = (uint32_t*)*llr32p;
   //}
-#endif
-#if defined(__x86_64__) || defined(__i386__)
-  ch_mag = (__m128i *)&dl_ch_mag[0][0];
-#elif defined(__arm__) || defined(__aarch64__)
-  ch_mag = (int16x8_t *)&dl_ch_mag[0][0];
-#endif
+  ch_mag = (simde__m128i *)&dl_ch_mag[0][0];
   len = frame_parms->N_RB_DL*12*10;
   // update output pointer according to number of REs in this symbol (<<2 because 4 bits per RE)
   //if (symbol==2)
@@ -898,13 +733,12 @@ void mch_16qam_llr_khz_1dot25(LTE_DL_FRAME_PARMS *frame_parms,
   len>>=2;  // length in quad words (4 REs)
   len+=(len_mod4==0 ? 0 : 1);
 
-  for (i=0; i<len; i++) {
-#if defined(__x86_64__) || defined(__i386__)
-    xmm0 = _mm_abs_epi16(rxF[i]);
-    xmm0 = _mm_subs_epi16(ch_mag[i],xmm0);
+  for (i = 0; i < len; i++) {
+    xmm0 = simde_mm_abs_epi16(rxF[i]);
+    xmm0 = simde_mm_subs_epi16(ch_mag[i], xmm0);
     // lambda_1=y_R, lambda_2=|y_R|-|h|^2, lamda_3=y_I, lambda_4=|y_I|-|h|^2
-    llr128[0] = _mm_unpacklo_epi32(rxF[i],xmm0);
-    llr128[1] = _mm_unpackhi_epi32(rxF[i],xmm0);
+    llr128[0] = simde_mm_unpacklo_epi32(rxF[i], xmm0);
+    llr128[1] = simde_mm_unpackhi_epi32(rxF[i], xmm0);
     llr32[0] = ((uint32_t *)&llr128[0])[0];
     llr32[1] = ((uint32_t *)&llr128[0])[1];
     llr32[2] = ((uint32_t *)&llr128[0])[2];
@@ -913,38 +747,13 @@ void mch_16qam_llr_khz_1dot25(LTE_DL_FRAME_PARMS *frame_parms,
     llr32[5] = ((uint32_t *)&llr128[1])[1];
     llr32[6] = ((uint32_t *)&llr128[1])[2];
     llr32[7] = ((uint32_t *)&llr128[1])[3];
-    llr32+=8;
-#elif defined(__arm__) || defined(__aarch64__)
-    xmm0 = vabsq_s16(rxF[i]);
-    xmm0 = vsubq_s16(ch_mag[i],xmm0);
-    // lambda_1=y_R, lambda_2=|y_R|-|h|^2, lamda_3=y_I, lambda_4=|y_I|-|h|^2
-    llr16[0] = vgetq_lane_s16(rxF[i],0);
-    llr16[1] = vgetq_lane_s16(xmm0,0);
-    llr16[2] = vgetq_lane_s16(rxF[i],1);
-    llr16[3] = vgetq_lane_s16(xmm0,1);
-    llr16[4] = vgetq_lane_s16(rxF[i],2);
-    llr16[5] = vgetq_lane_s16(xmm0,2);
-    llr16[6] = vgetq_lane_s16(rxF[i],2);
-    llr16[7] = vgetq_lane_s16(xmm0,3);
-    llr16[8] = vgetq_lane_s16(rxF[i],4);
-    llr16[9] = vgetq_lane_s16(xmm0,4);
-    llr16[10] = vgetq_lane_s16(rxF[i],5);
-    llr16[11] = vgetq_lane_s16(xmm0,5);
-    llr16[12] = vgetq_lane_s16(rxF[i],6);
-    llr16[13] = vgetq_lane_s16(xmm0,6);
-    llr16[14] = vgetq_lane_s16(rxF[i],7);
-    llr16[15] = vgetq_lane_s16(xmm0,7);
-    llr16+=16;
-#endif
+    llr32 += 8;
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
-
 //----------------------------------------------------------------------------------------------
 // 64-QAM
 //----------------------------------------------------------------------------------------------
@@ -955,14 +764,10 @@ void mch_64qam_llr(LTE_DL_FRAME_PARMS *frame_parms,
                    int **dl_ch_mag,
                    int **dl_ch_magb,
                    unsigned char symbol,
-                   short **llr_save) {
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i xmm1,xmm2,*ch_mag,*ch_magb;
-  __m128i *rxF = (__m128i *)&rxdataF_comp[0][(symbol*frame_parms->N_RB_DL*12)];
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t xmm1,xmm2,*ch_mag,*ch_magb;
-  int16x8_t *rxF = (int16x8_t *)&rxdataF_comp[0][(symbol*frame_parms->N_RB_DL*12)];
-#endif
+                   short **llr_save)
+{
+  simde__m128i xmm1, xmm2, *ch_mag, *ch_magb;
+  simde__m128i *rxF = (simde__m128i *)&rxdataF_comp[0][(symbol * frame_parms->N_RB_DL * 12)];
   int i,len,len2;
   //   int j=0;
   unsigned char len_mod4;
@@ -974,13 +779,8 @@ void mch_64qam_llr(LTE_DL_FRAME_PARMS *frame_parms,
   else
     llr = *llr_save;
 
-#if defined(__x86_64__) || defined(__i386__)
-  ch_mag = (__m128i *)&dl_ch_mag[0][(symbol*frame_parms->N_RB_DL*12)];
-  ch_magb = (__m128i *)&dl_ch_magb[0][(symbol*frame_parms->N_RB_DL*12)];
-#elif defined(__arm__) || defined(__aarch64__)
-  ch_mag = (int16x8_t *)&dl_ch_mag[0][(symbol*frame_parms->N_RB_DL*12)];
-  ch_magb = (int16x8_t *)&dl_ch_magb[0][(symbol*frame_parms->N_RB_DL*12)];
-#endif
+  ch_mag = (simde__m128i *)&dl_ch_mag[0][(symbol * frame_parms->N_RB_DL * 12)];
+  ch_magb = (simde__m128i *)&dl_ch_magb[0][(symbol * frame_parms->N_RB_DL * 12)];
 
   if ((symbol==2) || (symbol==6) || (symbol==10)) {
     len = frame_parms->N_RB_DL*6;
@@ -994,94 +794,57 @@ void mch_64qam_llr(LTE_DL_FRAME_PARMS *frame_parms,
   len2=len>>2;  // length in quad words (4 REs)
   len2+=(len_mod4?0:1);
 
-  for (i=0; i<len2; i++) {
-#if defined(__x86_64__) || defined(__i386__)
-    xmm1 = _mm_abs_epi16(rxF[i]);
-    xmm1  = _mm_subs_epi16(ch_mag[i],xmm1);
-    xmm2 = _mm_abs_epi16(xmm1);
-    xmm2 = _mm_subs_epi16(ch_magb[i],xmm2);
-#elif defined(__arm__) || defined(__aarch64__)
-    xmm1 = vabsq_s16(rxF[i]);
-    xmm1 = vsubq_s16(ch_mag[i],xmm1);
-    xmm2 = vabsq_s16(xmm1);
-    xmm2 = vsubq_s16(ch_magb[i],xmm2);
-#endif
+  for (i = 0; i < len2; i++) {
+    xmm1 = simde_mm_abs_epi16(rxF[i]);
+    xmm1 = simde_mm_subs_epi16(ch_mag[i], xmm1);
+    xmm2 = simde_mm_abs_epi16(xmm1);
+    xmm2 = simde_mm_subs_epi16(ch_magb[i], xmm2);
     // loop over all LLRs in quad word (24 coded bits)
     /*
     for (j=0;j<8;j+=2) {
       llr2[0] = ((short *)&rxF[i])[j];
       llr2[1] = ((short *)&rxF[i])[j+1];
-      llr2[2] = _mm_extract_epi16(xmm1,j);
-      llr2[3] = _mm_extract_epi16(xmm1,j+1);//((short *)&xmm1)[j+1];
-      llr2[4] = _mm_extract_epi16(xmm2,j);//((short *)&xmm2)[j];
-      llr2[5] = _mm_extract_epi16(xmm2,j+1);//((short *)&xmm2)[j+1];
+      llr2[2] = simde_mm_extract_epi16(xmm1,j);
+      llr2[3] = simde_mm_extract_epi16(xmm1,j+1);//((short *)&xmm1)[j+1];
+      llr2[4] = simde_mm_extract_epi16(xmm2,j);//((short *)&xmm2)[j];
+      llr2[5] = simde_mm_extract_epi16(xmm2,j+1);//((short *)&xmm2)[j+1];
 
       llr2+=6;
     }
     */
     llr2[0] = ((short *)&rxF[i])[0];
     llr2[1] = ((short *)&rxF[i])[1];
-#if defined(__x86_64__) || defined(__i386__)
-    llr2[2] = _mm_extract_epi16(xmm1,0);
-    llr2[3] = _mm_extract_epi16(xmm1,1);//((short *)&xmm1)[j+1];
-    llr2[4] = _mm_extract_epi16(xmm2,0);//((short *)&xmm2)[j];
-    llr2[5] = _mm_extract_epi16(xmm2,1);//((short *)&xmm2)[j+1];
-#elif defined(__arm__) || defined(__aarch64__)
-    llr2[2] = vgetq_lane_s16(xmm1,0);
-    llr2[3] = vgetq_lane_s16(xmm1,1);//((short *)&xmm1)[j+1];
-    llr2[4] = vgetq_lane_s16(xmm2,0);//((short *)&xmm2)[j];
-    llr2[5] = vgetq_lane_s16(xmm2,1);//((short *)&xmm2)[j+1];
-#endif
+    llr2[2] = simde_mm_extract_epi16(xmm1, 0);
+    llr2[3] = simde_mm_extract_epi16(xmm1, 1); //((short *)&xmm1)[j+1];
+    llr2[4] = simde_mm_extract_epi16(xmm2, 0); //((short *)&xmm2)[j];
+    llr2[5] = simde_mm_extract_epi16(xmm2, 1); //((short *)&xmm2)[j+1];
     llr2+=6;
     llr2[0] = ((short *)&rxF[i])[2];
     llr2[1] = ((short *)&rxF[i])[3];
-#if defined(__x86_64__) || defined(__i386__)
-    llr2[2] = _mm_extract_epi16(xmm1,2);
-    llr2[3] = _mm_extract_epi16(xmm1,3);//((short *)&xmm1)[j+1];
-    llr2[4] = _mm_extract_epi16(xmm2,2);//((short *)&xmm2)[j];
-    llr2[5] = _mm_extract_epi16(xmm2,3);//((short *)&xmm2)[j+1];
-#elif defined(__arm__) || defined(__aarch64__)
-    llr2[2] = vgetq_lane_s16(xmm1,2);
-    llr2[3] = vgetq_lane_s16(xmm1,3);//((short *)&xmm1)[j+1];
-    llr2[4] = vgetq_lane_s16(xmm2,2);//((short *)&xmm2)[j];
-    llr2[5] = vgetq_lane_s16(xmm2,3);//((short *)&xmm2)[j+1];
-#endif
+    llr2[2] = simde_mm_extract_epi16(xmm1, 2);
+    llr2[3] = simde_mm_extract_epi16(xmm1, 3); //((short *)&xmm1)[j+1];
+    llr2[4] = simde_mm_extract_epi16(xmm2, 2); //((short *)&xmm2)[j];
+    llr2[5] = simde_mm_extract_epi16(xmm2, 3); //((short *)&xmm2)[j+1];
     llr2+=6;
     llr2[0] = ((short *)&rxF[i])[4];
     llr2[1] = ((short *)&rxF[i])[5];
-#if defined(__x86_64__) || defined(__i386__)
-    llr2[2] = _mm_extract_epi16(xmm1,4);
-    llr2[3] = _mm_extract_epi16(xmm1,5);//((short *)&xmm1)[j+1];
-    llr2[4] = _mm_extract_epi16(xmm2,4);//((short *)&xmm2)[j];
-    llr2[5] = _mm_extract_epi16(xmm2,5);//((short *)&xmm2)[j+1];
-#elif defined(__arm__) || defined(__aarch64__)
-    llr2[2] = vgetq_lane_s16(xmm1,4);
-    llr2[3] = vgetq_lane_s16(xmm1,5);//((short *)&xmm1)[j+1];
-    llr2[4] = vgetq_lane_s16(xmm2,4);//((short *)&xmm2)[j];
-    llr2[5] = vgetq_lane_s16(xmm2,5);//((short *)&xmm2)[j+1];
-#endif
+    llr2[2] = simde_mm_extract_epi16(xmm1, 4);
+    llr2[3] = simde_mm_extract_epi16(xmm1, 5); //((short *)&xmm1)[j+1];
+    llr2[4] = simde_mm_extract_epi16(xmm2, 4); //((short *)&xmm2)[j];
+    llr2[5] = simde_mm_extract_epi16(xmm2, 5); //((short *)&xmm2)[j+1];
     llr2+=6;
     llr2[0] = ((short *)&rxF[i])[6];
     llr2[1] = ((short *)&rxF[i])[7];
-#if defined(__x86_64__) || defined(__i386__)
-    llr2[2] = _mm_extract_epi16(xmm1,6);
-    llr2[3] = _mm_extract_epi16(xmm1,7);//((short *)&xmm1)[j+1];
-    llr2[4] = _mm_extract_epi16(xmm2,6);//((short *)&xmm2)[j];
-    llr2[5] = _mm_extract_epi16(xmm2,7);//((short *)&xmm2)[j+1];
-#elif defined(__arm__) || defined(__aarch64__)
-    llr2[2] = vgetq_lane_s16(xmm1,6);
-    llr2[3] = vgetq_lane_s16(xmm1,7);//((short *)&xmm1)[j+1];
-    llr2[4] = vgetq_lane_s16(xmm2,6);//((short *)&xmm2)[j];
-    llr2[5] = vgetq_lane_s16(xmm2,7);//((short *)&xmm2)[j+1];
-#endif
+    llr2[2] = simde_mm_extract_epi16(xmm1, 6);
+    llr2[3] = simde_mm_extract_epi16(xmm1, 7); //((short *)&xmm1)[j+1];
+    llr2[4] = simde_mm_extract_epi16(xmm2, 6); //((short *)&xmm2)[j];
+    llr2[5] = simde_mm_extract_epi16(xmm2, 7); //((short *)&xmm2)[j+1];
     llr2+=6;
   }
 
   *llr_save = llr;
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void mch_64qam_llr_khz_1dot25(LTE_DL_FRAME_PARMS *frame_parms,
@@ -1090,14 +853,10 @@ void mch_64qam_llr_khz_1dot25(LTE_DL_FRAME_PARMS *frame_parms,
                               int **dl_ch_mag,
                               int **dl_ch_magb,
                               /*unsigned char symbol,*/
-                              short **llr_save) {
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i xmm1,xmm2,*ch_mag,*ch_magb;
-  __m128i *rxF = (__m128i *)&rxdataF_comp[0][0];
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t xmm1,xmm2,*ch_mag,*ch_magb;
-  int16x8_t *rxF = (int16x8_t *)&rxdataF_comp[0][0];
-#endif
+                              short **llr_save)
+{
+  simde__m128i xmm1, xmm2, *ch_mag, *ch_magb;
+  simde__m128i *rxF = (simde__m128i *)&rxdataF_comp[0][0];
   int i,len,len2;
   //   int j=0;
   unsigned char len_mod4;
@@ -1105,15 +864,10 @@ void mch_64qam_llr_khz_1dot25(LTE_DL_FRAME_PARMS *frame_parms,
   int16_t *llr2;
   //if (symbol==2)
   llr = dlsch_llr;
-  //else
-  //llr = *llr_save;
-#if defined(__x86_64__) || defined(__i386__)
-  ch_mag = (__m128i *)&dl_ch_mag[0][0];
-  ch_magb = (__m128i *)&dl_ch_magb[0][0];
-#elif defined(__arm__) || defined(__aarch64__)
-  ch_mag = (int16x8_t *)&dl_ch_mag[0][0];
-  ch_magb = (int16x8_t *)&dl_ch_magb[0][0];
-#endif
+  // else
+  // llr = *llr_save;
+  ch_mag = (simde__m128i *)&dl_ch_mag[0][0];
+  ch_magb = (simde__m128i *)&dl_ch_magb[0][0];
   len = frame_parms->N_RB_DL*12*10;
   llr2 = llr;
   llr += (len*6);
@@ -1121,98 +875,59 @@ void mch_64qam_llr_khz_1dot25(LTE_DL_FRAME_PARMS *frame_parms,
   len2=len>>2;  // length in quad words (4 REs)
   len2+=(len_mod4?0:1);
 
-  for (i=0; i<len2; i++) {
-#if defined(__x86_64__) || defined(__i386__)
-    xmm1 = _mm_abs_epi16(rxF[i]);
-    xmm1  = _mm_subs_epi16(ch_mag[i],xmm1);
-    xmm2 = _mm_abs_epi16(xmm1);
-    xmm2 = _mm_subs_epi16(ch_magb[i],xmm2);
-#elif defined(__arm__) || defined(__aarch64__)
-    xmm1 = vabsq_s16(rxF[i]);
-    xmm1 = vsubq_s16(ch_mag[i],xmm1);
-    xmm2 = vabsq_s16(xmm1);
-    xmm2 = vsubq_s16(ch_magb[i],xmm2);
-#endif
+  for (i = 0; i < len2; i++) {
+    xmm1 = simde_mm_abs_epi16(rxF[i]);
+    xmm1 = simde_mm_subs_epi16(ch_mag[i], xmm1);
+    xmm2 = simde_mm_abs_epi16(xmm1);
+    xmm2 = simde_mm_subs_epi16(ch_magb[i], xmm2);
     // loop over all LLRs in quad word (24 coded bits)
     /*
     for (j=0;j<8;j+=2) {
       llr2[0] = ((short *)&rxF[i])[j];
       llr2[1] = ((short *)&rxF[i])[j+1];
-      llr2[2] = _mm_extract_epi16(xmm1,j);
-      llr2[3] = _mm_extract_epi16(xmm1,j+1);//((short *)&xmm1)[j+1];
-      llr2[4] = _mm_extract_epi16(xmm2,j);//((short *)&xmm2)[j];
-      llr2[5] = _mm_extract_epi16(xmm2,j+1);//((short *)&xmm2)[j+1];
+      llr2[2] = simde_mm_extract_epi16(xmm1,j);
+      llr2[3] = simde_mm_extract_epi16(xmm1,j+1);//((short *)&xmm1)[j+1];
+      llr2[4] = simde_mm_extract_epi16(xmm2,j);//((short *)&xmm2)[j];
+      llr2[5] = simde_mm_extract_epi16(xmm2,j+1);//((short *)&xmm2)[j+1];
 
       llr2+=6;
     }
     */
     llr2[0] = ((short *)&rxF[i])[0];
     llr2[1] = ((short *)&rxF[i])[1];
-#if defined(__x86_64__) || defined(__i386__)
-    llr2[2] = _mm_extract_epi16(xmm1,0);
-    llr2[3] = _mm_extract_epi16(xmm1,1);//((short *)&xmm1)[j+1];
-    llr2[4] = _mm_extract_epi16(xmm2,0);//((short *)&xmm2)[j];
-    llr2[5] = _mm_extract_epi16(xmm2,1);//((short *)&xmm2)[j+1];
-#elif defined(__arm__) || defined(__aarch64__)
-    llr2[2] = vgetq_lane_s16(xmm1,0);
-    llr2[3] = vgetq_lane_s16(xmm1,1);//((short *)&xmm1)[j+1];
-    llr2[4] = vgetq_lane_s16(xmm2,0);//((short *)&xmm2)[j];
-    llr2[5] = vgetq_lane_s16(xmm2,1);//((short *)&xmm2)[j+1];
-#endif
+    llr2[2] = simde_mm_extract_epi16(xmm1, 0);
+    llr2[3] = simde_mm_extract_epi16(xmm1, 1); //((short *)&xmm1)[j+1];
+    llr2[4] = simde_mm_extract_epi16(xmm2, 0); //((short *)&xmm2)[j];
+    llr2[5] = simde_mm_extract_epi16(xmm2, 1); //((short *)&xmm2)[j+1];
     llr2+=6;
     llr2[0] = ((short *)&rxF[i])[2];
     llr2[1] = ((short *)&rxF[i])[3];
-#if defined(__x86_64__) || defined(__i386__)
-    llr2[2] = _mm_extract_epi16(xmm1,2);
-    llr2[3] = _mm_extract_epi16(xmm1,3);//((short *)&xmm1)[j+1];
-    llr2[4] = _mm_extract_epi16(xmm2,2);//((short *)&xmm2)[j];
-    llr2[5] = _mm_extract_epi16(xmm2,3);//((short *)&xmm2)[j+1];
-#elif defined(__arm__) || defined(__aarch64__)
-    llr2[2] = vgetq_lane_s16(xmm1,2);
-    llr2[3] = vgetq_lane_s16(xmm1,3);//((short *)&xmm1)[j+1];
-    llr2[4] = vgetq_lane_s16(xmm2,2);//((short *)&xmm2)[j];
-    llr2[5] = vgetq_lane_s16(xmm2,3);//((short *)&xmm2)[j+1];
-#endif
+    llr2[2] = simde_mm_extract_epi16(xmm1, 2);
+    llr2[3] = simde_mm_extract_epi16(xmm1, 3); //((short *)&xmm1)[j+1];
+    llr2[4] = simde_mm_extract_epi16(xmm2, 2); //((short *)&xmm2)[j];
+    llr2[5] = simde_mm_extract_epi16(xmm2, 3); //((short *)&xmm2)[j+1];
     llr2+=6;
     llr2[0] = ((short *)&rxF[i])[4];
     llr2[1] = ((short *)&rxF[i])[5];
-#if defined(__x86_64__) || defined(__i386__)
-    llr2[2] = _mm_extract_epi16(xmm1,4);
-    llr2[3] = _mm_extract_epi16(xmm1,5);//((short *)&xmm1)[j+1];
-    llr2[4] = _mm_extract_epi16(xmm2,4);//((short *)&xmm2)[j];
-    llr2[5] = _mm_extract_epi16(xmm2,5);//((short *)&xmm2)[j+1];
-#elif defined(__arm__) || defined(__aarch64__)
-    llr2[2] = vgetq_lane_s16(xmm1,4);
-    llr2[3] = vgetq_lane_s16(xmm1,5);//((short *)&xmm1)[j+1];
-    llr2[4] = vgetq_lane_s16(xmm2,4);//((short *)&xmm2)[j];
-    llr2[5] = vgetq_lane_s16(xmm2,5);//((short *)&xmm2)[j+1];
-#endif
+    llr2[2] = simde_mm_extract_epi16(xmm1, 4);
+    llr2[3] = simde_mm_extract_epi16(xmm1, 5); //((short *)&xmm1)[j+1];
+    llr2[4] = simde_mm_extract_epi16(xmm2, 4); //((short *)&xmm2)[j];
+    llr2[5] = simde_mm_extract_epi16(xmm2, 5); //((short *)&xmm2)[j+1];
     llr2+=6;
     llr2[0] = ((short *)&rxF[i])[6];
     llr2[1] = ((short *)&rxF[i])[7];
-#if defined(__x86_64__) || defined(__i386__)
-    llr2[2] = _mm_extract_epi16(xmm1,6);
-    llr2[3] = _mm_extract_epi16(xmm1,7);//((short *)&xmm1)[j+1];
-    llr2[4] = _mm_extract_epi16(xmm2,6);//((short *)&xmm2)[j];
-    llr2[5] = _mm_extract_epi16(xmm2,7);//((short *)&xmm2)[j+1];
-#elif defined(__arm__) || defined(__aarch64__)
-    llr2[2] = vgetq_lane_s16(xmm1,6);
-    llr2[3] = vgetq_lane_s16(xmm1,7);//((short *)&xmm1)[j+1];
-    llr2[4] = vgetq_lane_s16(xmm2,6);//((short *)&xmm2)[j];
-    llr2[5] = vgetq_lane_s16(xmm2,7);//((short *)&xmm2)[j+1];
-#endif
+    llr2[2] = simde_mm_extract_epi16(xmm1, 6);
+    llr2[3] = simde_mm_extract_epi16(xmm1, 7); //((short *)&xmm1)[j+1];
+    llr2[4] = simde_mm_extract_epi16(xmm2, 6); //((short *)&xmm2)[j];
+    llr2[5] = simde_mm_extract_epi16(xmm2, 7); //((short *)&xmm2)[j+1];
     llr2+=6;
   }
 
   *llr_save = llr;
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
-
-
 int avg_pmch[4];
 int rx_pmch(PHY_VARS_UE *ue,
             unsigned char eNB_id,
diff --git a/openair1/PHY/LTE_UE_TRANSPORT/ulsch_modulation.c b/openair1/PHY/LTE_UE_TRANSPORT/ulsch_modulation.c
index 521bbe6babef1b4ccd5e36d4bbc3b6803796fe2a..fb70eb6196b963c609baf6f3af68ad412822ea03 100644
--- a/openair1/PHY/LTE_UE_TRANSPORT/ulsch_modulation.c
+++ b/openair1/PHY/LTE_UE_TRANSPORT/ulsch_modulation.c
@@ -45,12 +45,7 @@
 
 void dft_lte(int32_t *z,struct complex16 *input, int32_t Msc_PUSCH, uint8_t Nsymb)
 {
-
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i dft_in128[4][1200],dft_out128[4][1200];
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t dft_in128[4][1200],dft_out128[4][1200];
-#endif
+  simde__m128i dft_in128[4][1200], dft_out128[4][1200];
   uint32_t *dft_in0=(uint32_t*)dft_in128[0],*dft_out0=(uint32_t*)dft_out128[0];
   uint32_t *dft_in1=(uint32_t*)dft_in128[1],*dft_out1=(uint32_t*)dft_out128[1];
   uint32_t *dft_in2=(uint32_t*)dft_in128[2],*dft_out2=(uint32_t*)dft_out128[2];
@@ -59,12 +54,8 @@ void dft_lte(int32_t *z,struct complex16 *input, int32_t Msc_PUSCH, uint8_t Nsym
   uint32_t *d0,*d1,*d2,*d3,*d4,*d5,*d6,*d7,*d8,*d9,*d10,*d11;
 
   uint32_t *z0,*z1,*z2,*z3,*z4,*z5,*z6,*z7,*z8,*z9,*z10,*z11;
-  uint32_t i,ip;
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i norm128;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t norm128;
-#endif
+  uint32_t i, ip;
+  simde__m128i norm128;
   //  printf("Doing lte_dft for Msc_PUSCH %d\n",Msc_PUSCH);
 
   d0 = (uint32_t *)input;
@@ -109,30 +100,38 @@ void dft_lte(int32_t *z,struct complex16 *input, int32_t Msc_PUSCH, uint8_t Nsym
     dft(DFT_12,(int16_t *)dft_in2,(int16_t *)dft_out2,0);
 
     /*
-    dft12f(&((__m128i *)dft_in0)[0],&((__m128i *)dft_in0)[1],&((__m128i *)dft_in0)[2],&((__m128i *)dft_in0)[3],&((__m128i *)dft_in0)[4],&((__m128i *)dft_in0)[5],&((__m128i *)dft_in0)[6],&((__m128i *)dft_in0)[7],&((__m128i *)dft_in0)[8],&((__m128i *)dft_in0)[9],&((__m128i *)dft_in0)[10],&((__m128i *)dft_in0)[11],
-    &((__m128i *)dft_out0)[0],&((__m128i *)dft_out0)[1],&((__m128i *)dft_out0)[2],&((__m128i *)dft_out0)[3],&((__m128i *)dft_out0)[4],&((__m128i *)dft_out0)[5],&((__m128i *)dft_out0)[6],&((__m128i *)dft_out0)[7],&((__m128i *)dft_out0)[8],&((__m128i *)dft_out0)[9],&((__m128i *)dft_out0)[10],&((__m128i *)dft_out0)[11]);
-
-    dft12f(&((__m128i *)dft_in1)[0],&((__m128i *)dft_in1)[1],&((__m128i *)dft_in1)[2],&((__m128i *)dft_in1)[3],&((__m128i *)dft_in1)[4],&((__m128i *)dft_in1)[5],&((__m128i *)dft_in1)[6],&((__m128i *)dft_in1)[7],&((__m128i *)dft_in1)[8],&((__m128i *)dft_in1)[9],&((__m128i *)dft_in1)[10],&((__m128i *)dft_in1)[11],
-    &((__m128i *)dft_out1)[0],&((__m128i *)dft_out1)[1],&((__m128i *)dft_out1)[2],&((__m128i *)dft_out1)[3],&((__m128i *)dft_out1)[4],&((__m128i *)dft_out1)[5],&((__m128i *)dft_out1)[6],&((__m128i *)dft_out1)[7],&((__m128i *)dft_out1)[8],&((__m128i *)dft_out1)[9],&((__m128i *)dft_out1)[10],&((__m128i *)dft_out1)[11]);
-
-    dft12f(&((__m128i *)dft_in2)[0],&((__m128i *)dft_in2)[1],&((__m128i *)dft_in2)[2],&((__m128i *)dft_in2)[3],&((__m128i *)dft_in2)[4],&((__m128i *)dft_in2)[5],&((__m128i *)dft_in2)[6],&((__m128i *)dft_in2)[7],&((__m128i *)dft_in2)[8],&((__m128i *)dft_in2)[9],&((__m128i *)dft_in2)[10],&((__m128i *)dft_in2)[11],
-    &((__m128i *)dft_out2)[0],&((__m128i *)dft_out2)[1],&((__m128i *)dft_out2)[2],&((__m128i *)dft_out2)[3],&((__m128i *)dft_out2)[4],&((__m128i *)dft_out2)[5],&((__m128i *)dft_out2)[6],&((__m128i *)dft_out2)[7],&((__m128i *)dft_out2)[8],&((__m128i *)dft_out2)[9],&((__m128i *)dft_out2)[10],&((__m128i *)dft_out2)[11]);
+    dft12f(&((simde__m128i *)dft_in0)[0],&((simde__m128i *)dft_in0)[1],&((simde__m128i *)dft_in0)[2],&((simde__m128i
+    *)dft_in0)[3],&((simde__m128i *)dft_in0)[4],&((simde__m128i *)dft_in0)[5],&((simde__m128i *)dft_in0)[6],&((simde__m128i
+    *)dft_in0)[7],&((simde__m128i *)dft_in0)[8],&((simde__m128i *)dft_in0)[9],&((simde__m128i *)dft_in0)[10],&((simde__m128i
+    *)dft_in0)[11],
+    &((simde__m128i *)dft_out0)[0],&((simde__m128i *)dft_out0)[1],&((simde__m128i *)dft_out0)[2],&((simde__m128i
+    *)dft_out0)[3],&((simde__m128i *)dft_out0)[4],&((simde__m128i *)dft_out0)[5],&((simde__m128i *)dft_out0)[6],&((simde__m128i
+    *)dft_out0)[7],&((simde__m128i *)dft_out0)[8],&((simde__m128i *)dft_out0)[9],&((simde__m128i *)dft_out0)[10],&((simde__m128i
+    *)dft_out0)[11]);
+
+    dft12f(&((simde__m128i *)dft_in1)[0],&((simde__m128i *)dft_in1)[1],&((simde__m128i *)dft_in1)[2],&((simde__m128i
+    *)dft_in1)[3],&((simde__m128i *)dft_in1)[4],&((simde__m128i *)dft_in1)[5],&((simde__m128i *)dft_in1)[6],&((simde__m128i
+    *)dft_in1)[7],&((simde__m128i *)dft_in1)[8],&((simde__m128i *)dft_in1)[9],&((simde__m128i *)dft_in1)[10],&((simde__m128i
+    *)dft_in1)[11],
+    &((simde__m128i *)dft_out1)[0],&((simde__m128i *)dft_out1)[1],&((simde__m128i *)dft_out1)[2],&((simde__m128i
+    *)dft_out1)[3],&((simde__m128i *)dft_out1)[4],&((simde__m128i *)dft_out1)[5],&((simde__m128i *)dft_out1)[6],&((simde__m128i
+    *)dft_out1)[7],&((simde__m128i *)dft_out1)[8],&((simde__m128i *)dft_out1)[9],&((simde__m128i *)dft_out1)[10],&((simde__m128i
+    *)dft_out1)[11]);
+
+    dft12f(&((simde__m128i *)dft_in2)[0],&((simde__m128i *)dft_in2)[1],&((simde__m128i *)dft_in2)[2],&((simde__m128i
+    *)dft_in2)[3],&((simde__m128i *)dft_in2)[4],&((simde__m128i *)dft_in2)[5],&((simde__m128i *)dft_in2)[6],&((simde__m128i
+    *)dft_in2)[7],&((simde__m128i *)dft_in2)[8],&((simde__m128i *)dft_in2)[9],&((simde__m128i *)dft_in2)[10],&((simde__m128i
+    *)dft_in2)[11],
+    &((simde__m128i *)dft_out2)[0],&((simde__m128i *)dft_out2)[1],&((simde__m128i *)dft_out2)[2],&((simde__m128i
+    *)dft_out2)[3],&((simde__m128i *)dft_out2)[4],&((simde__m128i *)dft_out2)[5],&((simde__m128i *)dft_out2)[6],&((simde__m128i
+    *)dft_out2)[7],&((simde__m128i *)dft_out2)[8],&((simde__m128i *)dft_out2)[9],&((simde__m128i *)dft_out2)[10],&((simde__m128i
+    *)dft_out2)[11]);
     */
-#if defined(__x86_64__) || defined(__i386__)
-    norm128 = _mm_set1_epi16(9459);
-#elif defined(__arm__) || defined(__aarch64__)
-    norm128 = vdupq_n_s16(9459);
-#endif
-    for (i=0; i<12; i++) {
-#if defined(__x86_64__) || defined(__i386__)
-      ((__m128i*)dft_out0)[i] = _mm_slli_epi16(_mm_mulhi_epi16(((__m128i*)dft_out0)[i],norm128),1);
-      ((__m128i*)dft_out1)[i] = _mm_slli_epi16(_mm_mulhi_epi16(((__m128i*)dft_out1)[i],norm128),1);
-      ((__m128i*)dft_out2)[i] = _mm_slli_epi16(_mm_mulhi_epi16(((__m128i*)dft_out2)[i],norm128),1);
-#elif defined(__arm__) || defined(__aarch64__)
-      ((int16x8_t*)dft_out0)[i] = vqdmulhq_s16(((int16x8_t*)dft_out0)[i],norm128);
-      ((int16x8_t*)dft_out1)[i] = vqdmulhq_s16(((int16x8_t*)dft_out1)[i],norm128);
-      ((int16x8_t*)dft_out2)[i] = vqdmulhq_s16(((int16x8_t*)dft_out2)[i],norm128);
-#endif
+    norm128 = simde_mm_set1_epi16(9459);
+    for (i = 0; i < 12; i++) {
+      ((simde__m128i *)dft_out0)[i] = simde_mm_slli_epi16(simde_mm_mulhi_epi16(((simde__m128i *)dft_out0)[i], norm128), 1);
+      ((simde__m128i *)dft_out1)[i] = simde_mm_slli_epi16(simde_mm_mulhi_epi16(((simde__m128i *)dft_out1)[i], norm128), 1);
+      ((simde__m128i *)dft_out2)[i] = simde_mm_slli_epi16(simde_mm_mulhi_epi16(((simde__m128i *)dft_out2)[i], norm128), 1);
     }
 
     break;
diff --git a/openair1/PHY/MODULATION/nr_modulation.c b/openair1/PHY/MODULATION/nr_modulation.c
index 7396b2af8f13d982e0700d944e5d80e1278335be..2b617fc5a0cb2dd0607eb17eb9679ae71ff2ab39 100644
--- a/openair1/PHY/MODULATION/nr_modulation.c
+++ b/openair1/PHY/MODULATION/nr_modulation.c
@@ -124,8 +124,8 @@ void nr_modulation(uint32_t *in,
   uint32_t i;
 
 #if defined(__SSE2__)
-  __m128i *nr_mod_table128;
-  __m128i *out128;
+  simde__m128i *nr_mod_table128;
+  simde__m128i *out128;
 #endif
 
   LOG_D(PHY,"nr_modulation: length %d, mod_order %d\n",length,mod_order);
@@ -134,8 +134,8 @@ void nr_modulation(uint32_t *in,
 
 #if defined(__SSE2__)
   case 2:
-    nr_mod_table128 = (__m128i*) nr_qpsk_byte_mod_table;
-    out128 = (__m128i*) out;
+    nr_mod_table128 = (simde__m128i *)nr_qpsk_byte_mod_table;
+    out128 = (simde__m128i *)out;
     for (i=0; i<length/8; i++)
       out128[i] = nr_mod_table128[in_bytes[i]];
     // the bits that are left out
@@ -329,20 +329,12 @@ void nr_ue_layer_mapping(int16_t *mod_symbs,
 
 void nr_dft(int32_t *z, int32_t *d, uint32_t Msc_PUSCH)
 {
-#if defined(__x86_64__) || +defined(__i386__)
-  __m128i dft_in128[1][3240], dft_out128[1][3240];
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t dft_in128[1][3240], dft_out128[1][3240];
-#endif
+  simde__m128i dft_in128[1][3240], dft_out128[1][3240];
   uint32_t *dft_in0 = (uint32_t*)dft_in128[0], *dft_out0 = (uint32_t*)dft_out128[0];
 
   uint32_t i, ip;
 
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i norm128;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t norm128;
-#endif
+  simde__m128i norm128;
 
   if ((Msc_PUSCH % 1536) > 0) {
     for (i = 0, ip = 0; i < Msc_PUSCH; i++, ip+=4) {
@@ -354,17 +346,9 @@ void nr_dft(int32_t *z, int32_t *d, uint32_t Msc_PUSCH)
     case 12:
       dft(DFT_12,(int16_t *)dft_in0, (int16_t *)dft_out0,0);
 
-#if defined(__x86_64__) || defined(__i386__)
-      norm128 = _mm_set1_epi16(9459);
-#elif defined(__arm__) || defined(__aarch64__)
-      norm128 = vdupq_n_s16(9459);
-#endif
+      norm128 = simde_mm_set1_epi16(9459);
       for (i=0; i<12; i++) {
-#if defined(__x86_64__) || defined(__i386__)
-        ((__m128i*)dft_out0)[i] = _mm_slli_epi16(_mm_mulhi_epi16(((__m128i*)dft_out0)[i], norm128), 1);
-#elif defined(__arm__) || defined(__aarch64__)
-        ((int16x8_t*)dft_out0)[i] = vqdmulhq_s16(((int16x8_t*)dft_out0)[i], norm128);
-#endif
+        ((simde__m128i*)dft_out0)[i] = simde_mm_slli_epi16(simde_mm_mulhi_epi16(((simde__m128i*)dft_out0)[i], norm128), 1);
       }
 
       break;
diff --git a/openair1/PHY/MODULATION/ul_7_5_kHz.c b/openair1/PHY/MODULATION/ul_7_5_kHz.c
index a5e6122071b6d85b9e9b17e87849e4eb1a65f634..db3fd533eb07b77e03248cff769ed37ad25360f9 100644
--- a/openair1/PHY/MODULATION/ul_7_5_kHz.c
+++ b/openair1/PHY/MODULATION/ul_7_5_kHz.c
@@ -32,14 +32,7 @@ void remove_7_5_kHz(RU_t *ru,uint8_t slot)
   int32_t **rxdata_7_5kHz=ru->common.rxdata_7_5kHz;
   uint16_t len;
   uint32_t *kHz7_5ptr;
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *rxptr128,*rxptr128_7_5kHz,*kHz7_5ptr128,kHz7_5_2,mmtmp_re,mmtmp_im,mmtmp_re2,mmtmp_im2;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *rxptr128,*kHz7_5ptr128,*rxptr128_7_5kHz;
-  int32x4_t mmtmp_re,mmtmp_im;
-  int32x4_t mmtmp0,mmtmp1;
-
-#endif
+  simde__m128i *rxptr128, *rxptr128_7_5kHz, *kHz7_5ptr128, kHz7_5_2, mmtmp_re, mmtmp_im, mmtmp_re2, mmtmp_im2;
   uint32_t slot_offset,slot_offset2;
   uint8_t aa;
   uint32_t i;
@@ -83,65 +76,29 @@ void remove_7_5_kHz(RU_t *ru,uint8_t slot)
   len = frame_parms->samples_per_tti/2;
 
   for (aa=0; aa<ru->nb_rx; aa++) {
-
-#if defined(__x86_64__) || defined(__i386__)
-    rxptr128        = (__m128i *)&rxdata[aa][slot_offset];
-    rxptr128_7_5kHz = (__m128i *)&rxdata_7_5kHz[aa][slot_offset2];
-    kHz7_5ptr128    = (__m128i *)kHz7_5ptr;
-#elif defined(__arm__) || defined(__aarch64__)
-    rxptr128        = (int16x8_t *)&rxdata[aa][slot_offset];
-    rxptr128_7_5kHz = (int16x8_t *)&rxdata_7_5kHz[aa][slot_offset2];
-    kHz7_5ptr128    = (int16x8_t *)kHz7_5ptr;
-#endif
+    rxptr128 = (simde__m128i *)&rxdata[aa][slot_offset];
+    rxptr128_7_5kHz = (simde__m128i *)&rxdata_7_5kHz[aa][slot_offset2];
+    kHz7_5ptr128 = (simde__m128i *)kHz7_5ptr;
     // apply 7.5 kHz
 
     //      if (((slot>>1)&1) == 0) { // apply the sinusoid from the table directly
     for (i=0; i<(len>>2); i++) {
-
-#if defined(__x86_64__) || defined(__i386__)
-      kHz7_5_2 = _mm_sign_epi16(*kHz7_5ptr128,*(__m128i*)&conjugate75_2[0]);
-      mmtmp_re = _mm_madd_epi16(*rxptr128,kHz7_5_2);
+      kHz7_5_2 = simde_mm_sign_epi16(*kHz7_5ptr128, *(simde__m128i *)&conjugate75_2[0]);
+      mmtmp_re = simde_mm_madd_epi16(*rxptr128, kHz7_5_2);
       // Real part of complex multiplication (note: 7_5kHz signal is conjugated for this to work)
-      mmtmp_im = _mm_shufflelo_epi16(kHz7_5_2,_MM_SHUFFLE(2,3,0,1));
-      mmtmp_im = _mm_shufflehi_epi16(mmtmp_im,_MM_SHUFFLE(2,3,0,1));
-      mmtmp_im = _mm_sign_epi16(mmtmp_im,*(__m128i*)&conjugate75[0]);
-      mmtmp_im = _mm_madd_epi16(mmtmp_im,rxptr128[0]);
-      mmtmp_re = _mm_srai_epi32(mmtmp_re,15);
-      mmtmp_im = _mm_srai_epi32(mmtmp_im,15);
-      mmtmp_re2 = _mm_unpacklo_epi32(mmtmp_re,mmtmp_im);
-      mmtmp_im2 = _mm_unpackhi_epi32(mmtmp_re,mmtmp_im);
-
-      rxptr128_7_5kHz[0] = _mm_packs_epi32(mmtmp_re2,mmtmp_im2);
+      mmtmp_im = simde_mm_shufflelo_epi16(kHz7_5_2, SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+      mmtmp_im = simde_mm_shufflehi_epi16(mmtmp_im, SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+      mmtmp_im = simde_mm_sign_epi16(mmtmp_im, *(simde__m128i *)&conjugate75[0]);
+      mmtmp_im = simde_mm_madd_epi16(mmtmp_im, rxptr128[0]);
+      mmtmp_re = simde_mm_srai_epi32(mmtmp_re, 15);
+      mmtmp_im = simde_mm_srai_epi32(mmtmp_im, 15);
+      mmtmp_re2 = simde_mm_unpacklo_epi32(mmtmp_re, mmtmp_im);
+      mmtmp_im2 = simde_mm_unpackhi_epi32(mmtmp_re, mmtmp_im);
+
+      rxptr128_7_5kHz[0] = simde_mm_packs_epi32(mmtmp_re2, mmtmp_im2);
       rxptr128++;
       rxptr128_7_5kHz++;
       kHz7_5ptr128++;
-
-#elif defined(__arm__) || defined(__aarch64__)
-
-      kHz7_5ptr128[0] = vmulq_s16(kHz7_5ptr128[0],((int16x8_t*)conjugate75_2)[0]);
-      mmtmp0 = vmull_s16(((int16x4_t*)rxptr128)[0],((int16x4_t*)kHz7_5ptr128)[0]);
-        //mmtmp0 = [Re(ch[0])Re(rx[0]) Im(ch[0])Im(ch[0]) Re(ch[1])Re(rx[1]) Im(ch[1])Im(ch[1])]
-      mmtmp1 = vmull_s16(((int16x4_t*)rxptr128)[1],((int16x4_t*)kHz7_5ptr128)[1]);
-        //mmtmp1 = [Re(ch[2])Re(rx[2]) Im(ch[2])Im(ch[2]) Re(ch[3])Re(rx[3]) Im(ch[3])Im(ch[3])]
-      mmtmp_re = vcombine_s32(vpadd_s32(vget_low_s32(mmtmp0),vget_high_s32(mmtmp0)),
-                              vpadd_s32(vget_low_s32(mmtmp1),vget_high_s32(mmtmp1)));
-        //mmtmp_re = [Re(ch[0])Re(rx[0])+Im(ch[0])Im(ch[0]) Re(ch[1])Re(rx[1])+Im(ch[1])Im(ch[1]) Re(ch[2])Re(rx[2])+Im(ch[2])Im(ch[2]) Re(ch[3])Re(rx[3])+Im(ch[3])Im(ch[3])]
-
-      mmtmp0 = vmull_s16(vrev32_s16(vmul_s16(((int16x4_t*)rxptr128)[0],*(int16x4_t*)conjugate75_2)), ((int16x4_t*)kHz7_5ptr128)[0]);
-        //mmtmp0 = [-Im(ch[0])Re(rx[0]) Re(ch[0])Im(rx[0]) -Im(ch[1])Re(rx[1]) Re(ch[1])Im(rx[1])]
-      mmtmp1 = vmull_s16(vrev32_s16(vmul_s16(((int16x4_t*)rxptr128)[1],*(int16x4_t*)conjugate75_2)), ((int16x4_t*)kHz7_5ptr128)[1]);
-        //mmtmp1 = [-Im(ch[2])Re(rx[2]) Re(ch[2])Im(rx[2]) -Im(ch[3])Re(rx[3]) Re(ch[3])Im(rx[3])]
-      mmtmp_im = vcombine_s32(vpadd_s32(vget_low_s32(mmtmp0),vget_high_s32(mmtmp0)),
-                              vpadd_s32(vget_low_s32(mmtmp1),vget_high_s32(mmtmp1)));
-        //mmtmp_im = [-Im(ch[0])Re(rx[0])+Re(ch[0])Im(rx[0]) -Im(ch[1])Re(rx[1])+Re(ch[1])Im(rx[1]) -Im(ch[2])Re(rx[2])+Re(ch[2])Im(rx[2]) -Im(ch[3])Re(rx[3])+Re(ch[3])Im(rx[3])]
-
-      rxptr128_7_5kHz[0] = vcombine_s16(vmovn_s32(mmtmp_re),vmovn_s32(mmtmp_im));
-      rxptr128_7_5kHz++;
-      rxptr128++;
-      kHz7_5ptr128++;
-
-
-#endif
     }
     // undo 7.5 kHz offset for symbol 3 in case RU is slave (for OTA synchronization)
     if (ru->is_slave == 1 && slot == 2){
diff --git a/openair1/PHY/MODULATION/ul_7_5_kHz_ue.c b/openair1/PHY/MODULATION/ul_7_5_kHz_ue.c
index c6ddbe61979643248272bd002088e258e9ff1ad5..81d01e2dc12e691d72b4ac49df8934b0b08e07db 100644
--- a/openair1/PHY/MODULATION/ul_7_5_kHz_ue.c
+++ b/openair1/PHY/MODULATION/ul_7_5_kHz_ue.c
@@ -31,13 +31,7 @@ void apply_7_5_kHz(PHY_VARS_UE *ue,int32_t*txdata,uint8_t slot)
 
   uint16_t len;
   uint32_t *kHz7_5ptr;
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *txptr128,*kHz7_5ptr128,mmtmp_re,mmtmp_im,mmtmp_re2,mmtmp_im2;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *txptr128,*kHz7_5ptr128;
-  int32x4_t mmtmp_re,mmtmp_im;
-  int32x4_t mmtmp0,mmtmp1;
-#endif
+  simde__m128i *txptr128, *kHz7_5ptr128, mmtmp_re, mmtmp_im, mmtmp_re2, mmtmp_im2;
   uint32_t slot_offset;
   //   uint8_t aa;
   uint32_t i;
@@ -77,53 +71,25 @@ void apply_7_5_kHz(PHY_VARS_UE *ue,int32_t*txdata,uint8_t slot)
   slot_offset = (uint32_t)slot * frame_parms->samples_per_tti/2;
   len = frame_parms->samples_per_tti/2;
 
-#if defined(__x86_64__) || defined(__i386__)
-  txptr128 = (__m128i *)&txdata[slot_offset];
-  kHz7_5ptr128 = (__m128i *)kHz7_5ptr;
-#elif defined(__arm__) || defined(__aarch64__)
-  txptr128 = (int16x8_t*)&txdata[slot_offset];
-  kHz7_5ptr128 = (int16x8_t*)kHz7_5ptr;
-#endif
+  txptr128 = (simde__m128i *)&txdata[slot_offset];
+  kHz7_5ptr128 = (simde__m128i *)kHz7_5ptr;
   // apply 7.5 kHz
 
-  for (i=0; i<(len>>2); i++) {
-#if defined(__x86_64__) || defined(__i386__)
-    mmtmp_re = _mm_madd_epi16(*txptr128,*kHz7_5ptr128);
+  for (i = 0; i < (len >> 2); i++) {
+    mmtmp_re = simde_mm_madd_epi16(*txptr128, *kHz7_5ptr128);
     // Real part of complex multiplication (note: 7_5kHz signal is conjugated for this to work)
-    mmtmp_im = _mm_shufflelo_epi16(*kHz7_5ptr128,_MM_SHUFFLE(2,3,0,1));
-    mmtmp_im = _mm_shufflehi_epi16(mmtmp_im,_MM_SHUFFLE(2,3,0,1));
-    mmtmp_im = _mm_sign_epi16(mmtmp_im,*(__m128i*)&conjugate75[0]);
-    mmtmp_im = _mm_madd_epi16(mmtmp_im,txptr128[0]);
-    mmtmp_re = _mm_srai_epi32(mmtmp_re,15);
-    mmtmp_im = _mm_srai_epi32(mmtmp_im,15);
-    mmtmp_re2 = _mm_unpacklo_epi32(mmtmp_re,mmtmp_im);
-    mmtmp_im2 = _mm_unpackhi_epi32(mmtmp_re,mmtmp_im);
-
-    txptr128[0] = _mm_packs_epi32(mmtmp_re2,mmtmp_im2);
-    txptr128++;
-    kHz7_5ptr128++;  
-#elif defined(__arm__) || defined(__aarch64__)
-
-    mmtmp0 = vmull_s16(((int16x4_t*)txptr128)[0],((int16x4_t*)kHz7_5ptr128)[0]);
-        //mmtmp0 = [Re(ch[0])Re(rx[0]) Im(ch[0])Im(ch[0]) Re(ch[1])Re(rx[1]) Im(ch[1])Im(ch[1])] 
-    mmtmp1 = vmull_s16(((int16x4_t*)txptr128)[1],((int16x4_t*)kHz7_5ptr128)[1]);
-        //mmtmp1 = [Re(ch[2])Re(rx[2]) Im(ch[2])Im(ch[2]) Re(ch[3])Re(rx[3]) Im(ch[3])Im(ch[3])] 
-    mmtmp_re = vcombine_s32(vpadd_s32(vget_low_s32(mmtmp0),vget_high_s32(mmtmp0)),
-                            vpadd_s32(vget_low_s32(mmtmp1),vget_high_s32(mmtmp1)));
-        //mmtmp_re = [Re(ch[0])Re(rx[0])+Im(ch[0])Im(ch[0]) Re(ch[1])Re(rx[1])+Im(ch[1])Im(ch[1]) Re(ch[2])Re(rx[2])+Im(ch[2])Im(ch[2]) Re(ch[3])Re(rx[3])+Im(ch[3])Im(ch[3])] 
-
-    mmtmp0 = vmull_s16(vrev32_s16(vmul_s16(((int16x4_t*)txptr128)[0],*(int16x4_t*)conjugate75_2)),((int16x4_t*)kHz7_5ptr128)[0]);
-        //mmtmp0 = [-Im(ch[0])Re(rx[0]) Re(ch[0])Im(rx[0]) -Im(ch[1])Re(rx[1]) Re(ch[1])Im(rx[1])]
-    mmtmp1 = vmull_s16(vrev32_s16(vmul_s16(((int16x4_t*)txptr128)[1],*(int16x4_t*)conjugate75_2)), ((int16x4_t*)kHz7_5ptr128)[1]);
-        //mmtmp0 = [-Im(ch[2])Re(rx[2]) Re(ch[2])Im(rx[2]) -Im(ch[3])Re(rx[3]) Re(ch[3])Im(rx[3])]
-    mmtmp_im = vcombine_s32(vpadd_s32(vget_low_s32(mmtmp0),vget_high_s32(mmtmp0)),
-                            vpadd_s32(vget_low_s32(mmtmp1),vget_high_s32(mmtmp1)));
-        //mmtmp_im = [-Im(ch[0])Re(rx[0])+Re(ch[0])Im(rx[0]) -Im(ch[1])Re(rx[1])+Re(ch[1])Im(rx[1]) -Im(ch[2])Re(rx[2])+Re(ch[2])Im(rx[2]) -Im(ch[3])Re(rx[3])+Re(ch[3])Im(rx[3])]
-
-    txptr128[0] = vcombine_s16(vmovn_s32(mmtmp_re),vmovn_s32(mmtmp_im));
+    mmtmp_im = simde_mm_shufflelo_epi16(*kHz7_5ptr128, SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+    mmtmp_im = simde_mm_shufflehi_epi16(mmtmp_im, SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+    mmtmp_im = simde_mm_sign_epi16(mmtmp_im, *(simde__m128i *)&conjugate75[0]);
+    mmtmp_im = simde_mm_madd_epi16(mmtmp_im, txptr128[0]);
+    mmtmp_re = simde_mm_srai_epi32(mmtmp_re, 15);
+    mmtmp_im = simde_mm_srai_epi32(mmtmp_im, 15);
+    mmtmp_re2 = simde_mm_unpacklo_epi32(mmtmp_re, mmtmp_im);
+    mmtmp_im2 = simde_mm_unpackhi_epi32(mmtmp_re, mmtmp_im);
+
+    txptr128[0] = simde_mm_packs_epi32(mmtmp_re2, mmtmp_im2);
     txptr128++;
     kHz7_5ptr128++;
-#endif
   }
 
   //}
diff --git a/openair1/PHY/NR_REFSIG/dmrs_nr.c b/openair1/PHY/NR_REFSIG/dmrs_nr.c
index 179c5d9cbd320375480eca104814194cef7c9759..275df1a0d48921a2b44f5073602e1ff06ce1c159 100644
--- a/openair1/PHY/NR_REFSIG/dmrs_nr.c
+++ b/openair1/PHY/NR_REFSIG/dmrs_nr.c
@@ -347,8 +347,8 @@ void nr_chest_time_domain_avg(NR_DL_FRAME_PARMS *frame_parms,
                               uint16_t dmrs_bitmap,
                               uint16_t num_rbs)
 {
-  __m128i *ul_ch128_0;
-  __m128i *ul_ch128_1;
+  simde__m128i *ul_ch128_0;
+  simde__m128i *ul_ch128_1;
   int16_t *ul_ch16_0;
   int total_symbols = start_symbol + num_symbols;
   int num_dmrs_symb = get_dmrs_symbols_in_slot(dmrs_bitmap, total_symbols);
@@ -356,31 +356,31 @@ void nr_chest_time_domain_avg(NR_DL_FRAME_PARMS *frame_parms,
   AssertFatal(first_dmrs_symb > -1, "No DMRS symbol present in this slot\n");
   for (int aarx = 0; aarx < frame_parms->nb_antennas_rx; aarx++) {
     for (int symb = first_dmrs_symb+1; symb < total_symbols; symb++) {
-      ul_ch128_0 = (__m128i *)&ch_estimates[aarx][first_dmrs_symb*frame_parms->ofdm_symbol_size];
+      ul_ch128_0 = (simde__m128i *)&ch_estimates[aarx][first_dmrs_symb*frame_parms->ofdm_symbol_size];
       if ((dmrs_bitmap >> symb) & 0x01) {
-        ul_ch128_1 = (__m128i *)&ch_estimates[aarx][symb*frame_parms->ofdm_symbol_size];
+        ul_ch128_1 = (simde__m128i *)&ch_estimates[aarx][symb*frame_parms->ofdm_symbol_size];
         for (int rbIdx = 0; rbIdx < num_rbs; rbIdx++) {
-          ul_ch128_0[0] = _mm_adds_epi16(ul_ch128_0[0], ul_ch128_1[0]);
-          ul_ch128_0[1] = _mm_adds_epi16(ul_ch128_0[1], ul_ch128_1[1]);
-          ul_ch128_0[2] = _mm_adds_epi16(ul_ch128_0[2], ul_ch128_1[2]);
+          ul_ch128_0[0] = simde_mm_adds_epi16(ul_ch128_0[0], ul_ch128_1[0]);
+          ul_ch128_0[1] = simde_mm_adds_epi16(ul_ch128_0[1], ul_ch128_1[1]);
+          ul_ch128_0[2] = simde_mm_adds_epi16(ul_ch128_0[2], ul_ch128_1[2]);
           ul_ch128_0 += 3;
           ul_ch128_1 += 3;
         }
       }
     }
-    ul_ch128_0 = (__m128i *)&ch_estimates[aarx][first_dmrs_symb*frame_parms->ofdm_symbol_size];
+    ul_ch128_0 = (simde__m128i *)&ch_estimates[aarx][first_dmrs_symb*frame_parms->ofdm_symbol_size];
     if (num_dmrs_symb == 2) {
       for (int rbIdx = 0; rbIdx < num_rbs; rbIdx++) {
-        ul_ch128_0[0] = _mm_srai_epi16(ul_ch128_0[0], 1);
-        ul_ch128_0[1] = _mm_srai_epi16(ul_ch128_0[1], 1);
-        ul_ch128_0[2] = _mm_srai_epi16(ul_ch128_0[2], 1);
+        ul_ch128_0[0] = simde_mm_srai_epi16(ul_ch128_0[0], 1);
+        ul_ch128_0[1] = simde_mm_srai_epi16(ul_ch128_0[1], 1);
+        ul_ch128_0[2] = simde_mm_srai_epi16(ul_ch128_0[2], 1);
         ul_ch128_0 += 3;
       }
     } else if (num_dmrs_symb == 4) {
       for (int rbIdx = 0; rbIdx < num_rbs; rbIdx++) {
-        ul_ch128_0[0] = _mm_srai_epi16(ul_ch128_0[0], 2);
-        ul_ch128_0[1] = _mm_srai_epi16(ul_ch128_0[1], 2);
-        ul_ch128_0[2] = _mm_srai_epi16(ul_ch128_0[2], 2);
+        ul_ch128_0[0] = simde_mm_srai_epi16(ul_ch128_0[0], 2);
+        ul_ch128_0[1] = simde_mm_srai_epi16(ul_ch128_0[1], 2);
+        ul_ch128_0[2] = simde_mm_srai_epi16(ul_ch128_0[2], 2);
         ul_ch128_0 += 3;
       }
     } else if (num_dmrs_symb == 3) {
@@ -415,4 +415,3 @@ void nr_chest_time_domain_avg(NR_DL_FRAME_PARMS *frame_parms,
     } else AssertFatal((num_dmrs_symb < 5) && (num_dmrs_symb > 0), "Illegal number of DMRS symbols in the slot\n");
   }
 }
-
diff --git a/openair1/PHY/NR_REFSIG/nr_gen_mod_table.c b/openair1/PHY/NR_REFSIG/nr_gen_mod_table.c
index 9ed2bd949f8b68c774d3f30536799b3f8a26e7b5..c9d8786b6e62c514fe99c76b9555b97e6e8f45b6 100644
--- a/openair1/PHY/NR_REFSIG/nr_gen_mod_table.c
+++ b/openair1/PHY/NR_REFSIG/nr_gen_mod_table.c
@@ -24,9 +24,7 @@
 short nr_qpsk_mod_table[8];
 
 int32_t nr_16qam_mod_table[16];
-#if defined(__SSE2__)
-__m128i nr_qpsk_byte_mod_table[2048];
-#endif
+simde__m128i nr_qpsk_byte_mod_table[2048];
 
 int64_t nr_16qam_byte_mod_table[1024];
 
@@ -50,7 +48,6 @@ void nr_generate_modulation_table() {
     //printf("%d j%d\n",nr_qpsk_mod_table[i*2],nr_qpsk_mod_table[i*2+1]);
   }
 
-#if defined(__SSE2__)
   //QPSK m128
   table = (short*) nr_qpsk_byte_mod_table;
   for (i=0; i<256; i++) {
@@ -60,7 +57,6 @@ void nr_generate_modulation_table() {
       //printf("%d j%d\n",nr_qpsk_byte_mod_table[i*8+(j*2)],nr_qpsk_byte_mod_table[i*8+(j*2)+1]);
     }
   }
-#endif
 
   //16QAM
   table = (short*) nr_16qam_byte_mod_table;
diff --git a/openair1/PHY/NR_REFSIG/nr_mod_table.h b/openair1/PHY/NR_REFSIG/nr_mod_table.h
index 2c6964fe96e65d7d933e36ce05bf824408648a29..4ac83ee79c261119202ab4e8016bd9dacb478c36 100644
--- a/openair1/PHY/NR_REFSIG/nr_mod_table.h
+++ b/openair1/PHY/NR_REFSIG/nr_mod_table.h
@@ -33,7 +33,7 @@ extern short nr_qpsk_mod_table[8];
 
 extern int32_t nr_16qam_mod_table[16];
 #if defined(__SSE2__)
-extern __m128i nr_qpsk_byte_mod_table[2048];
+extern simde__m128i nr_qpsk_byte_mod_table[2048];
 #endif
 
 extern int64_t nr_16qam_byte_mod_table[1024];
diff --git a/openair1/PHY/NR_REFSIG/nr_refsig.h b/openair1/PHY/NR_REFSIG/nr_refsig.h
index fd89f122ce909879a399cbc04d7fc4f7b4d2f210..e2db900912d696b0b8c53b29e0dcfd390cfb56dc 100644
--- a/openair1/PHY/NR_REFSIG/nr_refsig.h
+++ b/openair1/PHY/NR_REFSIG/nr_refsig.h
@@ -64,9 +64,9 @@ int nr_pusch_dmrs_rx(PHY_VARS_gNB *gNB,
 void init_scrambling_luts(void);
 void nr_generate_modulation_table(void);
 
-extern __m64 byte2m64_re[256];
-extern __m64 byte2m64_im[256];
-extern __m128i byte2m128i[256];
+extern simde__m64 byte2m64_re[256];
+extern simde__m64 byte2m64_im[256];
+extern simde__m128i byte2m128i[256];
 
 
 
diff --git a/openair1/PHY/NR_REFSIG/scrambling_luts.c b/openair1/PHY/NR_REFSIG/scrambling_luts.c
index 2a37a125390e1ff63ee85bf7adea5201bfe14195..39130b5696eb1c5972f4a297da0370b0fc2440ef 100644
--- a/openair1/PHY/NR_REFSIG/scrambling_luts.c
+++ b/openair1/PHY/NR_REFSIG/scrambling_luts.c
@@ -29,22 +29,22 @@
 #include "PHY/sse_intrin.h"
 #include <common/utils/LOG/log.h>
 
-__m64 byte2m64_re[256];
-__m64 byte2m64_im[256];
+simde__m64 byte2m64_re[256];
+simde__m64 byte2m64_im[256];
 
-__m128i byte2m128i[256];
+simde__m128i byte2m128i[256];
 
 void init_byte2m64(void) {
 
   for (int s=0;s<256;s++) {
-    byte2m64_re[s] = _mm_insert_pi16(byte2m64_re[s],(1-2*(s&1)),0);
-    byte2m64_im[s] = _mm_insert_pi16(byte2m64_im[s],(1-2*((s>>1)&1)),0);
-    byte2m64_re[s] = _mm_insert_pi16(byte2m64_re[s],(1-2*((s>>2)&1)),1);
-    byte2m64_im[s] = _mm_insert_pi16(byte2m64_im[s],(1-2*((s>>3)&1)),1);
-    byte2m64_re[s] = _mm_insert_pi16(byte2m64_re[s],(1-2*((s>>4)&1)),2);
-    byte2m64_im[s] = _mm_insert_pi16(byte2m64_im[s],(1-2*((s>>5)&1)),2);
-    byte2m64_re[s] = _mm_insert_pi16(byte2m64_re[s],(1-2*((s>>6)&1)),3);
-    byte2m64_im[s] = _mm_insert_pi16(byte2m64_im[s],(1-2*((s>>7)&1)),3);
+    byte2m64_re[s] = simde_mm_insert_pi16(byte2m64_re[s],(1-2*(s&1)),0);
+    byte2m64_im[s] = simde_mm_insert_pi16(byte2m64_im[s],(1-2*((s>>1)&1)),0);
+    byte2m64_re[s] = simde_mm_insert_pi16(byte2m64_re[s],(1-2*((s>>2)&1)),1);
+    byte2m64_im[s] = simde_mm_insert_pi16(byte2m64_im[s],(1-2*((s>>3)&1)),1);
+    byte2m64_re[s] = simde_mm_insert_pi16(byte2m64_re[s],(1-2*((s>>4)&1)),2);
+    byte2m64_im[s] = simde_mm_insert_pi16(byte2m64_im[s],(1-2*((s>>5)&1)),2);
+    byte2m64_re[s] = simde_mm_insert_pi16(byte2m64_re[s],(1-2*((s>>6)&1)),3);
+    byte2m64_im[s] = simde_mm_insert_pi16(byte2m64_im[s],(1-2*((s>>7)&1)),3);
      LOG_T(PHY,"init_scrambling_luts: s %x (%d) ((%d,%d),(%d,%d),(%d,%d),(%d,%d))\n",
 	    ((uint16_t*)&s)[0],
 	    (1-2*(s&1)),
@@ -59,14 +59,14 @@ void init_byte2m64(void) {
 void init_byte2m128i(void) {
 
   for (int s=0;s<256;s++) {
-    byte2m128i[s] = _mm_insert_epi16(byte2m128i[s],(1-2*(s&1)),0);
-    byte2m128i[s] = _mm_insert_epi16(byte2m128i[s],(1-2*((s>>1)&1)),1);
-    byte2m128i[s] = _mm_insert_epi16(byte2m128i[s],(1-2*((s>>2)&1)),2);
-    byte2m128i[s] = _mm_insert_epi16(byte2m128i[s],(1-2*((s>>3)&1)),3);
-    byte2m128i[s] = _mm_insert_epi16(byte2m128i[s],(1-2*((s>>4)&1)),4);
-    byte2m128i[s] = _mm_insert_epi16(byte2m128i[s],(1-2*((s>>5)&1)),5);
-    byte2m128i[s] = _mm_insert_epi16(byte2m128i[s],(1-2*((s>>6)&1)),6);
-    byte2m128i[s] = _mm_insert_epi16(byte2m128i[s],(1-2*((s>>7)&1)),7);
+    byte2m128i[s] = simde_mm_insert_epi16(byte2m128i[s],(1-2*(s&1)),0);
+    byte2m128i[s] = simde_mm_insert_epi16(byte2m128i[s],(1-2*((s>>1)&1)),1);
+    byte2m128i[s] = simde_mm_insert_epi16(byte2m128i[s],(1-2*((s>>2)&1)),2);
+    byte2m128i[s] = simde_mm_insert_epi16(byte2m128i[s],(1-2*((s>>3)&1)),3);
+    byte2m128i[s] = simde_mm_insert_epi16(byte2m128i[s],(1-2*((s>>4)&1)),4);
+    byte2m128i[s] = simde_mm_insert_epi16(byte2m128i[s],(1-2*((s>>5)&1)),5);
+    byte2m128i[s] = simde_mm_insert_epi16(byte2m128i[s],(1-2*((s>>6)&1)),6);
+    byte2m128i[s] = simde_mm_insert_epi16(byte2m128i[s],(1-2*((s>>7)&1)),7);
   }
 }
 
diff --git a/openair1/PHY/NR_TRANSPORT/nr_dlsch.c b/openair1/PHY/NR_TRANSPORT/nr_dlsch.c
index 30359a25a956d0f908d1525919e6528a0697de0d..717b7b8155f45af7502056902644a5f71ccdcc18 100644
--- a/openair1/PHY/NR_TRANSPORT/nr_dlsch.c
+++ b/openair1/PHY/NR_TRANSPORT/nr_dlsch.c
@@ -387,31 +387,31 @@ void nr_generate_pdsch(processingData_L1tx_t *msgTx,
           }
           // fix the alignment issues later, use 64-bit SIMD below instead of 128.
           if (0/*(frame_parms->N_RB_DL&1)==0*/) {
-            __m128i *txF=(__m128i*)&txdataF_precoding[nl][((l*frame_parms->ofdm_symbol_size+start_sc)<<1)];
+            simde__m128i *txF=(simde__m128i*)&txdataF_precoding[nl][((l*frame_parms->ofdm_symbol_size+start_sc)<<1)];
 
-            __m128i *txl = (__m128i*)&tx_layers[nl][m<<1];
-            __m128i amp128=_mm_set1_epi16(amp);
+            simde__m128i *txl = (simde__m128i*)&tx_layers[nl][m<<1];
+            simde__m128i amp128=simde_mm_set1_epi16(amp);
             for (int i=0; i<(upper_limit>>2); i++) {
-              txF[i] = _mm_mulhrs_epi16(amp128,txl[i]);
+              txF[i] = simde_mm_mulhrs_epi16(amp128,txl[i]);
             } //RE loop, first part
             m+=upper_limit;
             if (remaining_re > 0) {
-               txF = (__m128i*)&txdataF_precoding[nl][((l*frame_parms->ofdm_symbol_size)<<1)];
-               txl = (__m128i*)&tx_layers[nl][m<<1];
+               txF = (simde__m128i*)&txdataF_precoding[nl][((l*frame_parms->ofdm_symbol_size)<<1)];
+               txl = (simde__m128i*)&tx_layers[nl][m<<1];
                for (int i=0; i<(remaining_re>>2); i++) {
-                 txF[i] = _mm_mulhrs_epi16(amp128,txl[i]);
+                 txF[i] = simde_mm_mulhrs_epi16(amp128,txl[i]);
                }
             }
           }
           else {
-            __m128i *txF = (__m128i *)&txdataF_precoding[nl][((l * frame_parms->ofdm_symbol_size + start_sc) << 1)];
+            simde__m128i *txF = (simde__m128i *)&txdataF_precoding[nl][((l * frame_parms->ofdm_symbol_size + start_sc) << 1)];
 
-            __m128i *txl = (__m128i *)&tx_layers[nl][m << 1];
-            __m128i amp64 = _mm_set1_epi16(amp);
+            simde__m128i *txl = (simde__m128i *)&tx_layers[nl][m << 1];
+            simde__m128i amp64 = simde_mm_set1_epi16(amp);
             int i;
             for (i = 0; i < (upper_limit >> 2); i++) {
-              const __m128i txL = _mm_loadu_si128(txl + i);
-              _mm_storeu_si128(txF + i, _mm_mulhrs_epi16(amp64, txL));
+              const simde__m128i txL = simde_mm_loadu_si128(txl + i);
+              simde_mm_storeu_si128(txF + i, simde_mm_mulhrs_epi16(amp64, txL));
 #ifdef DEBUG_DLSCH_MAPPING
               if ((i&1) > 0)
                   printf("m %u\t l %d \t k %d \t txdataF: %d %d\n",
@@ -434,12 +434,13 @@ void nr_generate_pdsch(processingData_L1tx_t *msgTx,
             }
             m+=upper_limit;
             if (remaining_re > 0) {
-              txF = (__m128i *)&txdataF_precoding[nl][((l * frame_parms->ofdm_symbol_size) << 1)];
-              txl = (__m128i *)&tx_layers[nl][m << 1];
+              txF = (simde__m128i *)&txdataF_precoding[nl][((l * frame_parms->ofdm_symbol_size) << 1)];
+              txl = (simde__m128i *)&tx_layers[nl][m << 1];
               int i;
               for (i = 0; i < (remaining_re >> 2); i++) {
-                const __m128i txL = _mm_loadu_si128(txl + i);
-                _mm_storeu_si128(txF + i, _mm_mulhrs_epi16(amp64, txL));
+                const simde__m128i txL = simde_mm_loadu_si128(txl + i);
+                simde_mm_storeu_si128(txF + i, simde_mm_mulhrs_epi16(amp64, txL));
+
 #ifdef DEBUG_DLSCH_MAPPING
                  if ((i&1) > 0)
                    printf("m %u\t l %d \t k %d \t txdataF: %d %d\n",
diff --git a/openair1/PHY/NR_TRANSPORT/nr_scrambling.c b/openair1/PHY/NR_TRANSPORT/nr_scrambling.c
index 15ddc58ab56ba78cf0064cf258775bb5eb82d475..d15fa88c826cf950faadb145659a2ece080c5ecf 100644
--- a/openair1/PHY/NR_TRANSPORT/nr_scrambling.c
+++ b/openair1/PHY/NR_TRANSPORT/nr_scrambling.c
@@ -37,7 +37,7 @@ void nr_codeword_scrambling(uint8_t *in,
 
   s=lte_gold_generic(&x1, &x2, 1);
   for (int i=0; i<((size>>5)+((size&0x1f) > 0 ? 1 : 0)); i++) {
-    __m256i c = ((__m256i*)in)[i];
+    simde__m256i c = ((simde__m256i*)in)[i];
     uint32_t in32 = simde_mm256_movemask_epi8(simde_mm256_slli_epi16(c,7));
     out[i]=(in32^s);
     DEBUG_SCRAMBLING(LOG_D(PHY, "in[%d] %x => %x\n", i, in32, out[i]));
@@ -51,28 +51,15 @@ void nr_codeword_unscrambling(int16_t* llr, uint32_t size, uint8_t q, uint32_t N
   uint32_t x2 = (n_RNTI << 15) + (q << 14) + Nid;
   uint32_t s = 0;
 
-#if defined(__x86_64__) || defined(__i386__)
   uint8_t *s8=(uint8_t *)&s;
-  __m128i *llr128 = (__m128i*)llr;
+  simde__m128i *llr128 = (simde__m128i*)llr;
   s = lte_gold_generic(&x1, &x2, 1);
 
   for (int i = 0, j = 0; i < ((size >> 5) + ((size & 0x1f) > 0 ? 1 : 0)); i++, j += 4) {
-    llr128[j]   = _mm_mullo_epi16(llr128[j],byte2m128i[s8[0]]);
-    llr128[j+1] = _mm_mullo_epi16(llr128[j+1],byte2m128i[s8[1]]);
-    llr128[j+2] = _mm_mullo_epi16(llr128[j+2],byte2m128i[s8[2]]);
-    llr128[j+3] = _mm_mullo_epi16(llr128[j+3],byte2m128i[s8[3]]);
+    llr128[j]   = simde_mm_mullo_epi16(llr128[j],byte2m128i[s8[0]]);
+    llr128[j+1] = simde_mm_mullo_epi16(llr128[j+1],byte2m128i[s8[1]]);
+    llr128[j+2] = simde_mm_mullo_epi16(llr128[j+2],byte2m128i[s8[2]]);
+    llr128[j+3] = simde_mm_mullo_epi16(llr128[j+3],byte2m128i[s8[3]]);
     s = lte_gold_generic(&x1, &x2, 0);
   }
-#else
-  uint8_t reset = 1;
-
-  for (uint32_t i=0; i<size; i++) {
-    if ((i&0x1f)==0) {
-      s = lte_gold_generic(&x1, &x2, reset);
-      reset = 0;
-    }
-    if (((s>>(i&0x1f))&1)==1)
-      llr[i] = -llr[i];
-  }
-#endif
 }
diff --git a/openair1/PHY/NR_TRANSPORT/nr_ulsch_decoding.c b/openair1/PHY/NR_TRANSPORT/nr_ulsch_decoding.c
index ab63282c9bd67940f16e6660286f951d4cd754d6..f421aec7ea3ab1b309fc849b0047cf02deef382f 100644
--- a/openair1/PHY/NR_TRANSPORT/nr_ulsch_decoding.c
+++ b/openair1/PHY/NR_TRANSPORT/nr_ulsch_decoding.c
@@ -146,8 +146,8 @@ static void nr_processULSegment(void *arg)
   int16_t z[68 * 384 + 16] __attribute__((aligned(16)));
   int8_t l[68 * 384 + 16] __attribute__((aligned(16)));
 
-  __m128i *pv = (__m128i *)&z;
-  __m128i *pl = (__m128i *)&l;
+  simde__m128i *pv = (simde__m128i *)&z;
+  simde__m128i *pl = (simde__m128i *)&l;
 
   Kr = ulsch_harq->K;
   Kr_bytes = Kr >> 3;
@@ -233,7 +233,7 @@ static void nr_processULSegment(void *arg)
   memcpy((&z[0] + Kr), ulsch_harq->d[r] + (Kr - 2 * ulsch_harq->Z), (kc * ulsch_harq->Z - Kr) * sizeof(int16_t));
   // Saturate coded bits before decoding into 8 bits values
   for (i = 0, j = 0; j < ((kc * ulsch_harq->Z) >> 4) + 1; i += 2, j++) {
-    pl[j] = _mm_packs_epi16(pv[i], pv[i + 1]);
+    pl[j] = simde_mm_packs_epi16(pv[i], pv[i + 1]);
   }
   //////////////////////////////////////////////////////////////////////////////////////////
 
@@ -411,10 +411,10 @@ int nr_ulsch_decoding(PHY_VARS_gNB *phy_vars_gNB,
       if ((dtx_det == 0) && (pusch_pdu->pusch_data.rv_index == 0)) {
         // if (dtx_det==0){
           memcpy((&z_ol[0]), ulsch_llr + r_offset, E * sizeof(short));
-          __m128i *pv_ol128 = (__m128i *)&z_ol;
-          __m128i *pl_ol128 = (__m128i *)&l_ol;
+          simde__m128i *pv_ol128 = (simde__m128i *)&z_ol;
+          simde__m128i *pl_ol128 = (simde__m128i *)&l_ol;
           for (int i = 0, j = 0; j < ((kc * harq_process->Z) >> 4) + 1; i += 2, j++) {
-            pl_ol128[j] = _mm_packs_epi16(pv_ol128[i], pv_ol128[i + 1]);
+            pl_ol128[j] = simde_mm_packs_epi16(pv_ol128[i], pv_ol128[i + 1]);
           }
 
           int ret = nrLDPC_decoder_offload(&decParams, harq_pid, ULSCH_id, r, pusch_pdu->pusch_data.rv_index, harq_process->F, E, Qm, (int8_t *)&pl_ol128[0], llrProcBuf, 1);
diff --git a/openair1/PHY/NR_TRANSPORT/nr_ulsch_demodulation.c b/openair1/PHY/NR_TRANSPORT/nr_ulsch_demodulation.c
index bf5b6195284de38e387d6bae59e242c8f1a54943..6f491ffafacdb35d9f1a8ae8e37b8c117775f67c 100644
--- a/openair1/PHY/NR_TRANSPORT/nr_ulsch_demodulation.c
+++ b/openair1/PHY/NR_TRANSPORT/nr_ulsch_demodulation.c
@@ -9,6 +9,7 @@
 #include "PHY/defs_nr_common.h"
 #include "common/utils/nr/nr_common.h"
 #include <openair1/PHY/TOOLS/phy_scope_interface.h>
+#include "PHY/sse_intrin.h"
 
 //#define DEBUG_CH_COMP
 //#define DEBUG_RB_EXT
@@ -20,13 +21,8 @@
 void nr_idft(int32_t *z, uint32_t Msc_PUSCH)
 {
 
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i idft_in128[1][3240], idft_out128[1][3240];
-  __m128i norm128;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t idft_in128[1][3240], idft_out128[1][3240];
-  int16x8_t norm128;
-#endif
+  simde__m128i idft_in128[1][3240], idft_out128[1][3240];
+  simde__m128i norm128;
   int16_t *idft_in0 = (int16_t*)idft_in128[0], *idft_out0 = (int16_t*)idft_out128[0];
 
   int i, ip;
@@ -36,11 +32,7 @@ void nr_idft(int32_t *z, uint32_t Msc_PUSCH)
   if ((Msc_PUSCH % 1536) > 0) {
     // conjugate input
     for (i = 0; i < (Msc_PUSCH>>2); i++) {
-#if defined(__x86_64__)||defined(__i386__)
-      *&(((__m128i*)z)[i]) = _mm_sign_epi16(*&(((__m128i*)z)[i]), *(__m128i*)&conjugate2[0]);
-#elif defined(__arm__) || defined(__aarch64__)
-      *&(((int16x8_t*)z)[i]) = vmulq_s16(*&(((int16x8_t*)z)[i]), *(int16x8_t*)&conjugate2[0]);
-#endif
+      *&(((simde__m128i*)z)[i]) = simde_mm_sign_epi16(*&(((simde__m128i*)z)[i]), *(simde__m128i*)&conjugate2[0]);
     }
     for (i = 0, ip = 0; i < Msc_PUSCH; i++, ip+=4)
       ((uint32_t*)idft_in0)[ip+0] = z[i];
@@ -50,18 +42,10 @@ void nr_idft(int32_t *z, uint32_t Msc_PUSCH)
     case 12:
       dft(DFT_12,(int16_t *)idft_in0, (int16_t *)idft_out0,0);
 
-#if defined(__x86_64__)||defined(__i386__)
-      norm128 = _mm_set1_epi16(9459);
-#elif defined(__arm__) || defined(__aarch64__)
-      norm128 = vdupq_n_s16(9459);
-#endif
+      norm128 = simde_mm_set1_epi16(9459);
 
       for (i = 0; i < 12; i++) {
-#if defined(__x86_64__)||defined(__i386__)
-        ((__m128i*)idft_out0)[i] = _mm_slli_epi16(_mm_mulhi_epi16(((__m128i*)idft_out0)[i], norm128), 1);
-#elif defined(__arm__) || defined(__aarch64__)
-        ((int16x8_t*)idft_out0)[i] = vqdmulhq_s16(((int16x8_t*)idft_out0)[i], norm128);
-#endif
+        ((simde__m128i*)idft_out0)[i] = simde_mm_slli_epi16(simde_mm_mulhi_epi16(((simde__m128i*)idft_out0)[i], norm128), 1);
       }
 
       break;
@@ -288,18 +272,12 @@ void nr_idft(int32_t *z, uint32_t Msc_PUSCH)
 
     // conjugate output
     for (i = 0; i < (Msc_PUSCH>>2); i++) {
-#if defined(__x86_64__) || defined(__i386__)
-      ((__m128i*)z)[i] = _mm_sign_epi16(((__m128i*)z)[i], *(__m128i*)&conjugate2[0]);
-#elif defined(__arm__) || defined(__aarch64__)
-      *&(((int16x8_t*)z)[i]) = vmulq_s16(*&(((int16x8_t*)z)[i]), *(int16x8_t*)&conjugate2[0]);
-#endif
+      ((simde__m128i*)z)[i] = simde_mm_sign_epi16(((simde__m128i*)z)[i], *(simde__m128i*)&conjugate2[0]);
     }
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 
 }
 
@@ -339,18 +317,18 @@ void nr_ulsch_extract_rbs(c16_t **rxdataF,
     
     if (is_dmrs_symbol == 0) {
       if (start_re + nb_re_pusch <= frame_parms->ofdm_symbol_size) {
-        memcpy1((void*)rxF_ext, (void*)&rxF[start_re*2], nb_re_pusch*sizeof(int32_t));
+        memcpy((void*)rxF_ext, (void*)&rxF[start_re*2], nb_re_pusch*sizeof(int32_t));
       } else {
         int neg_length = frame_parms->ofdm_symbol_size-start_re;
         int pos_length = nb_re_pusch-neg_length;
-        memcpy1((void*)rxF_ext,(void*)&rxF[start_re*2],neg_length*sizeof(int32_t));
-        memcpy1((void*)&rxF_ext[2*neg_length],(void*)rxF,pos_length*sizeof(int32_t));
+        memcpy((void*)rxF_ext,(void*)&rxF[start_re*2],neg_length*sizeof(int32_t));
+        memcpy((void*)&rxF_ext[2*neg_length],(void*)rxF,pos_length*sizeof(int32_t));
       }
 
       for (aatx = 0; aatx < pusch_pdu->nrOfLayers; aatx++) {
         ul_ch0 = &pusch_vars->ul_ch_estimates[aatx*frame_parms->nb_antennas_rx+aarx][pusch_vars->dmrs_symbol*frame_parms->ofdm_symbol_size]; // update channel estimates if new dmrs symbol are available
         ul_ch0_ext = &pusch_vars->ul_ch_estimates_ext[aatx*frame_parms->nb_antennas_rx+aarx][symbol*nb_re_pusch2];
-        memcpy1((void*)ul_ch0_ext,(void*)ul_ch0,nb_re_pusch*sizeof(int32_t));
+        memcpy((void*)ul_ch0_ext,(void*)ul_ch0,nb_re_pusch*sizeof(int32_t));
       }
 
     } else {
@@ -407,9 +385,6 @@ void nr_ulsch_scale_channel(int **ul_ch_estimates_ext,
                             unsigned short nb_rb,
                             int shift_ch_ext)
 {
-
-#if defined(__x86_64__)||defined(__i386__)
-
   // Determine scaling amplitude based the symbol
   int b = 3;
   short ch_amp = 1024 * 8;
@@ -422,28 +397,27 @@ void nr_ulsch_scale_channel(int **ul_ch_estimates_ext,
   } else {
     b -= shift_ch_ext;
   }
-  __m128i ch_amp128 = _mm_set1_epi16(ch_amp); // Q3.13
+  simde__m128i ch_amp128 = simde_mm_set1_epi16(ch_amp); // Q3.13
   LOG_D(PHY, "Scaling PUSCH Chest in OFDM symbol %d by %d, pilots %d nb_rb %d NCP %d symbol %d\n", symbol, ch_amp, is_dmrs_symbol, nb_rb, frame_parms->Ncp, symbol);
 
   uint32_t nb_rb_0 = len / 12 + ((len % 12) ? 1 : 0);
   int off = ((nb_rb & 1) == 1) ? 4 : 0;
   for (int aatx = 0; aatx < nrOfLayers; aatx++) {
     for (int aarx = 0; aarx < frame_parms->nb_antennas_rx; aarx++) {
-      __m128i *ul_ch128 = (__m128i *)&ul_ch_estimates_ext[aatx * frame_parms->nb_antennas_rx + aarx][symbol * (off + (nb_rb * NR_NB_SC_PER_RB))];
+      simde__m128i *ul_ch128 = (simde__m128i *)&ul_ch_estimates_ext[aatx * frame_parms->nb_antennas_rx + aarx][symbol * (off + (nb_rb * NR_NB_SC_PER_RB))];
       for (int rb = 0; rb < nb_rb_0; rb++) {
-        ul_ch128[0] = _mm_mulhi_epi16(ul_ch128[0], ch_amp128);
-        ul_ch128[0] = _mm_slli_epi16(ul_ch128[0], b);
+        ul_ch128[0] = simde_mm_mulhi_epi16(ul_ch128[0], ch_amp128);
+        ul_ch128[0] = simde_mm_slli_epi16(ul_ch128[0], b);
 
-        ul_ch128[1] = _mm_mulhi_epi16(ul_ch128[1], ch_amp128);
-        ul_ch128[1] = _mm_slli_epi16(ul_ch128[1], b);
+        ul_ch128[1] = simde_mm_mulhi_epi16(ul_ch128[1], ch_amp128);
+        ul_ch128[1] = simde_mm_slli_epi16(ul_ch128[1], b);
 
-        ul_ch128[2] = _mm_mulhi_epi16(ul_ch128[2], ch_amp128);
-        ul_ch128[2] = _mm_slli_epi16(ul_ch128[2], b);
+        ul_ch128[2] = simde_mm_mulhi_epi16(ul_ch128[2], ch_amp128);
+        ul_ch128[2] = simde_mm_slli_epi16(ul_ch128[2], b);
         ul_ch128 += 3;
       }
     }
   }
-#endif
 }
 
 //compute average channel_level on each (TX,RX) antenna pair
@@ -456,11 +430,10 @@ void nr_ulsch_channel_level(int **ul_ch_estimates_ext,
                             unsigned short nb_rb)
 {
 
-#if defined(__x86_64__)||defined(__i386__)
 
   short rb;
   unsigned char aatx, aarx;
-  __m128i *ul_ch128, avg128U;
+  simde__m128i *ul_ch128, avg128U;
 
   int16_t x = factor2(len);
   int16_t y = (len)>>x;
@@ -472,14 +445,14 @@ void nr_ulsch_channel_level(int **ul_ch_estimates_ext,
   for (aatx = 0; aatx < nrOfLayers; aatx++) {
     for (aarx = 0; aarx < frame_parms->nb_antennas_rx; aarx++) {
       //clear average level
-      avg128U = _mm_setzero_si128();
+      avg128U = simde_mm_setzero_si128();
 
-      ul_ch128=(__m128i *)&ul_ch_estimates_ext[aatx*frame_parms->nb_antennas_rx+aarx][symbol*(off+(nb_rb*12))];
+      ul_ch128=(simde__m128i *)&ul_ch_estimates_ext[aatx*frame_parms->nb_antennas_rx+aarx][symbol*(off+(nb_rb*12))];
 
       for (rb = 0; rb < nb_rb_0; rb++) {
-        avg128U = _mm_add_epi32(avg128U, _mm_srai_epi32(_mm_madd_epi16(ul_ch128[0], ul_ch128[0]), x));
-        avg128U = _mm_add_epi32(avg128U, _mm_srai_epi32(_mm_madd_epi16(ul_ch128[1], ul_ch128[1]), x));
-        avg128U = _mm_add_epi32(avg128U, _mm_srai_epi32(_mm_madd_epi16(ul_ch128[2], ul_ch128[2]), x));
+        avg128U = simde_mm_add_epi32(avg128U, simde_mm_srai_epi32(simde_mm_madd_epi16(ul_ch128[0], ul_ch128[0]), x));
+        avg128U = simde_mm_add_epi32(avg128U, simde_mm_srai_epi32(simde_mm_madd_epi16(ul_ch128[1], ul_ch128[1]), x));
+        avg128U = simde_mm_add_epi32(avg128U, simde_mm_srai_epi32(simde_mm_madd_epi16(ul_ch128[2], ul_ch128[2]), x));
         ul_ch128+=3;
       }
 
@@ -490,77 +463,23 @@ void nr_ulsch_channel_level(int **ul_ch_estimates_ext,
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
-#elif defined(__arm__) || defined(__aarch64__)
-
-  short rb;
-  unsigned char aatx, aarx, nre = 12, symbol_mod;
-  int32x4_t avg128U;
-  int16x4_t *ul_ch128;
-
-  symbol_mod = (symbol>=(7-frame_parms->Ncp)) ? symbol-(7-frame_parms->Ncp) : symbol;
-  uint32_t nb_rb_0 = len/12 + ((len%12)?1:0);
-  for (aatx=0; aatx<nrOfLayers; aatx++) {
-    for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-      //clear average level
-      avg128U = vdupq_n_s32(0);
-      // 5 is always a symbol with no pilots for both normal and extended prefix
-
-      ul_ch128 = (int16x4_t *)&ul_ch_estimates_ext[aatx*frame_parms->nb_antennas_rx+aarx][symbol*frame_parms->N_RB_UL*12];
-
-      for (rb = 0; rb < nb_rb_0; rb++) {
-        //  printf("rb %d : ",rb);
-        //  print_shorts("ch",&ul_ch128[0]);
-        avg128U = vqaddq_s32(avg128U, vmull_s16(ul_ch128[0], ul_ch128[0]));
-        avg128U = vqaddq_s32(avg128U, vmull_s16(ul_ch128[1], ul_ch128[1]));
-        avg128U = vqaddq_s32(avg128U, vmull_s16(ul_ch128[2], ul_ch128[2]));
-        avg128U = vqaddq_s32(avg128U, vmull_s16(ul_ch128[3], ul_ch128[3]));
-
-        if (((symbol_mod == 0) || (symbol_mod == (frame_parms->Ncp-1)))&&(nrOfLayers!=1)) {
-          ul_ch128+=4;
-        } else {
-          avg128U = vqaddq_s32(avg128U, vmull_s16(ul_ch128[4], ul_ch128[4]));
-          avg128U = vqaddq_s32(avg128U, vmull_s16(ul_ch128[5], ul_ch128[5]));
-          ul_ch128+=6;
-        }
-
-        /*
-          if (rb==0) {
-          print_shorts("ul_ch128",&ul_ch128[0]);
-          print_shorts("ul_ch128",&ul_ch128[1]);
-          print_shorts("ul_ch128",&ul_ch128[2]);
-          }
-        */
-      }
+  simde_mm_empty();
+  simde_m_empty();
 
-      if (symbol==2) //assume start symbol 2
-          nre=6;
-      else
-          nre=12;
-
-      avg[aatx*frame_parms->nb_antennas_rx+aarx] = (((int32_t*)&avg128U)[0] +
-                                                    ((int32_t*)&avg128U)[1] +
-                                                    ((int32_t*)&avg128U)[2] +
-                                                    ((int32_t*)&avg128U)[3]) / (nb_rb*nre);
-    }
-  }
-#endif
 }
 
-__m128i a_mult_conjb(__m128i a, __m128i b, unsigned char output_shift)
+static simde__m128i a_mult_conjb(simde__m128i a, simde__m128i b, unsigned char output_shift)
 {
-  __m128i mmtmpD0 = _mm_madd_epi16(b, a);
-  __m128i mmtmpD1 = _mm_shufflelo_epi16(b, _MM_SHUFFLE(2, 3, 0, 1));
-  mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1, _MM_SHUFFLE(2, 3, 0, 1));
-  mmtmpD1 = _mm_sign_epi16(mmtmpD1, *(__m128i *)&conjugate[0]);
-  mmtmpD1 = _mm_madd_epi16(mmtmpD1, a);
-  mmtmpD0 = _mm_srai_epi32(mmtmpD0, output_shift);
-  mmtmpD1 = _mm_srai_epi32(mmtmpD1, output_shift);
-  __m128i mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0, mmtmpD1);
-  __m128i mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0, mmtmpD1);
-  return _mm_packs_epi32(mmtmpD2, mmtmpD3);
+  simde__m128i mmtmpD0 = simde_mm_madd_epi16(b, a);
+  simde__m128i mmtmpD1 = simde_mm_shufflelo_epi16(b, SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+  mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2, 3, 0, 1));
+  mmtmpD1 = simde_mm_sign_epi16(mmtmpD1, *(simde__m128i *)&conjugate[0]);
+  mmtmpD1 = simde_mm_madd_epi16(mmtmpD1, a);
+  mmtmpD0 = simde_mm_srai_epi32(mmtmpD0, output_shift);
+  mmtmpD1 = simde_mm_srai_epi32(mmtmpD1, output_shift);
+  simde__m128i mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0, mmtmpD1);
+  simde__m128i mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0, mmtmpD1);
+  return simde_mm_packs_epi32(mmtmpD2, mmtmpD3);
 }
 
 //==============================================================================================
@@ -624,41 +543,40 @@ void nr_ulsch_channel_compensation(int **rxdataF_ext,
 
 #endif
 
-#if defined(__i386) || defined(__x86_64__)
 
   unsigned short rb;
   unsigned char aatx,aarx;
-  __m128i *ul_ch128,*ul_ch128_2,*ul_ch_mag128,*ul_ch_mag128b,*ul_ch_mag128c,*rxdataF128,*rxdataF_comp128,*rho128;
-  __m128i mmtmpD0,mmtmpD1,mmtmpD2,mmtmpD3,QAM_amp128={0},QAM_amp128b={0},QAM_amp128c={0};
-  QAM_amp128b = _mm_setzero_si128();
+  simde__m128i *ul_ch128,*ul_ch128_2,*ul_ch_mag128,*ul_ch_mag128b,*ul_ch_mag128c,*rxdataF128,*rxdataF_comp128,*rho128;
+  simde__m128i mmtmpD0,mmtmpD1,mmtmpD2,mmtmpD3,QAM_amp128={0},QAM_amp128b={0},QAM_amp128c={0};
+  QAM_amp128b = simde_mm_setzero_si128();
 
   uint32_t nb_rb_0 = length/12 + ((length%12)?1:0);
   for (aatx=0; aatx<nrOfLayers; aatx++) {
     if (mod_order == 4) {
-      QAM_amp128 = _mm_set1_epi16(QAM16_n1);  // 2/sqrt(10)
-      QAM_amp128b = _mm_setzero_si128();
-      QAM_amp128c = _mm_setzero_si128();
-    }
+      QAM_amp128 = simde_mm_set1_epi16(QAM16_n1);  // 2/sqrt(10)
+      QAM_amp128b = simde_mm_setzero_si128();
+      QAM_amp128c = simde_mm_setzero_si128();
+    } 
     else if (mod_order == 6) {
-      QAM_amp128  = _mm_set1_epi16(QAM64_n1); //
-      QAM_amp128b = _mm_set1_epi16(QAM64_n2);
-      QAM_amp128c = _mm_setzero_si128();
+      QAM_amp128  = simde_mm_set1_epi16(QAM64_n1); //
+      QAM_amp128b = simde_mm_set1_epi16(QAM64_n2);
+      QAM_amp128c = simde_mm_setzero_si128();
     }
     else if (mod_order == 8) {
-      QAM_amp128  = _mm_set1_epi16(QAM256_n1); //
-      QAM_amp128b = _mm_set1_epi16(QAM256_n2);
-      QAM_amp128c = _mm_set1_epi16(QAM256_n3);
+      QAM_amp128  = simde_mm_set1_epi16(QAM256_n1); //
+      QAM_amp128b = simde_mm_set1_epi16(QAM256_n2);
+      QAM_amp128c = simde_mm_set1_epi16(QAM256_n3);
     }
 
     //    printf("comp: rxdataF_comp %p, symbol %d\n",rxdataF_comp[0],symbol);
 
     for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++)  {
-      ul_ch128          = (__m128i *)&ul_ch_estimates_ext[aatx*frame_parms->nb_antennas_rx+aarx][symbol*(off+(nb_rb*12))];
-      ul_ch_mag128      = (__m128i *)&ul_ch_mag[aatx*frame_parms->nb_antennas_rx+aarx][symbol*(off+(nb_rb*12))];
-      ul_ch_mag128b     = (__m128i *)&ul_ch_magb[aatx*frame_parms->nb_antennas_rx+aarx][symbol*(off+(nb_rb*12))];
-      ul_ch_mag128c     = (__m128i *)&ul_ch_magc[aatx*frame_parms->nb_antennas_rx+aarx][symbol*(off+(nb_rb*12))];
-      rxdataF128        = (__m128i *)&rxdataF_ext[aarx][symbol*(off+(nb_rb*12))];
-      rxdataF_comp128   = (__m128i *)&rxdataF_comp[aatx*frame_parms->nb_antennas_rx+aarx][symbol*(off+(nb_rb*12))];
+      ul_ch128          = (simde__m128i *)&ul_ch_estimates_ext[aatx*frame_parms->nb_antennas_rx+aarx][symbol*(off+(nb_rb*12))];
+      ul_ch_mag128      = (simde__m128i *)&ul_ch_mag[aatx*frame_parms->nb_antennas_rx+aarx][symbol*(off+(nb_rb*12))];
+      ul_ch_mag128b     = (simde__m128i *)&ul_ch_magb[aatx*frame_parms->nb_antennas_rx+aarx][symbol*(off+(nb_rb*12))];
+      ul_ch_mag128c     = (simde__m128i *)&ul_ch_magc[aatx*frame_parms->nb_antennas_rx+aarx][symbol*(off+(nb_rb*12))];
+      rxdataF128        = (simde__m128i *)&rxdataF_ext[aarx][symbol*(off+(nb_rb*12))];
+      rxdataF_comp128   = (simde__m128i *)&rxdataF_comp[aatx*frame_parms->nb_antennas_rx+aarx][symbol*(off+(nb_rb*12))];
 
 
       for (rb=0; rb<nb_rb_0; rb++) {
@@ -667,44 +585,44 @@ void nr_ulsch_channel_compensation(int **rxdataF_ext,
 
           //print_shorts("ch:",(int16_t*)&ul_ch128[0]);
 
-          mmtmpD0 = _mm_madd_epi16(ul_ch128[0],ul_ch128[0]);
-          mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
+          mmtmpD0 = simde_mm_madd_epi16(ul_ch128[0],ul_ch128[0]);
+          mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
 
-          mmtmpD1 = _mm_madd_epi16(ul_ch128[1],ul_ch128[1]);
-          mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
+          mmtmpD1 = simde_mm_madd_epi16(ul_ch128[1],ul_ch128[1]);
+          mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
 
-          mmtmpD0 = _mm_packs_epi32(mmtmpD0,mmtmpD1);
+          mmtmpD0 = simde_mm_packs_epi32(mmtmpD0,mmtmpD1);
 
           // store channel magnitude here in a new field of ulsch
 
-          ul_ch_mag128[0] = _mm_unpacklo_epi16(mmtmpD0,mmtmpD0);
+          ul_ch_mag128[0] = simde_mm_unpacklo_epi16(mmtmpD0,mmtmpD0);
           ul_ch_mag128b[0] = ul_ch_mag128[0];
           ul_ch_mag128c[0] = ul_ch_mag128[0];
-          ul_ch_mag128[0] = _mm_mulhrs_epi16(ul_ch_mag128[0],QAM_amp128);
-          ul_ch_mag128b[0] = _mm_mulhrs_epi16(ul_ch_mag128b[0],QAM_amp128b);
-          ul_ch_mag128c[0] = _mm_mulhrs_epi16(ul_ch_mag128c[0],QAM_amp128c);
+          ul_ch_mag128[0] = simde_mm_mulhrs_epi16(ul_ch_mag128[0],QAM_amp128);
+          ul_ch_mag128b[0] = simde_mm_mulhrs_epi16(ul_ch_mag128b[0],QAM_amp128b);
+          ul_ch_mag128c[0] = simde_mm_mulhrs_epi16(ul_ch_mag128c[0],QAM_amp128c);
           // print_ints("ch: = ",(int32_t*)&mmtmpD0);
           // print_shorts("QAM_amp:",(int16_t*)&QAM_amp128);
           // print_shorts("mag:",(int16_t*)&ul_ch_mag128[0]);
 
-          ul_ch_mag128[1]  = _mm_unpackhi_epi16(mmtmpD0,mmtmpD0);
+          ul_ch_mag128[1]  = simde_mm_unpackhi_epi16(mmtmpD0,mmtmpD0);
           ul_ch_mag128b[1] = ul_ch_mag128[1];
           ul_ch_mag128c[1] = ul_ch_mag128[1];
-          ul_ch_mag128[1]  = _mm_mulhrs_epi16(ul_ch_mag128[1],QAM_amp128);
-          ul_ch_mag128b[1] = _mm_mulhrs_epi16(ul_ch_mag128b[1],QAM_amp128b);
-          ul_ch_mag128c[1] = _mm_mulhrs_epi16(ul_ch_mag128c[1],QAM_amp128c);
+          ul_ch_mag128[1]  = simde_mm_mulhrs_epi16(ul_ch_mag128[1],QAM_amp128);
+          ul_ch_mag128b[1] = simde_mm_mulhrs_epi16(ul_ch_mag128b[1],QAM_amp128b);
+          ul_ch_mag128c[1] = simde_mm_mulhrs_epi16(ul_ch_mag128c[1],QAM_amp128c);
 
-          mmtmpD0 = _mm_madd_epi16(ul_ch128[2],ul_ch128[2]);
-          mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-          mmtmpD1 = _mm_packs_epi32(mmtmpD0,mmtmpD0);
+          mmtmpD0 = simde_mm_madd_epi16(ul_ch128[2],ul_ch128[2]);
+          mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
+          mmtmpD1 = simde_mm_packs_epi32(mmtmpD0,mmtmpD0);
 
-          ul_ch_mag128[2]  = _mm_unpacklo_epi16(mmtmpD1,mmtmpD1);
+          ul_ch_mag128[2]  = simde_mm_unpacklo_epi16(mmtmpD1,mmtmpD1);
           ul_ch_mag128b[2] = ul_ch_mag128[2];
           ul_ch_mag128c[2] = ul_ch_mag128[2];
 
-          ul_ch_mag128[2]  = _mm_mulhrs_epi16(ul_ch_mag128[2],QAM_amp128);
-          ul_ch_mag128b[2] = _mm_mulhrs_epi16(ul_ch_mag128b[2],QAM_amp128b);
-          ul_ch_mag128c[2] = _mm_mulhrs_epi16(ul_ch_mag128c[2],QAM_amp128c);
+          ul_ch_mag128[2]  = simde_mm_mulhrs_epi16(ul_ch_mag128[2],QAM_amp128);
+          ul_ch_mag128b[2] = simde_mm_mulhrs_epi16(ul_ch_mag128b[2],QAM_amp128b);
+          ul_ch_mag128c[2] = simde_mm_mulhrs_epi16(ul_ch_mag128c[2],QAM_amp128c);
         }
 
         // Multiply received data by conjugated channel
@@ -737,31 +655,31 @@ void nr_ulsch_channel_compensation(int **rxdataF_ext,
 
           avg_rho_re[aarx][aatx*nrOfLayers+atx] = 0;
           avg_rho_im[aarx][aatx*nrOfLayers+atx] = 0;
-          rho128        = (__m128i *)&rho[aarx][aatx*nrOfLayers+atx][symbol*(off+(nb_rb*12))];
-          ul_ch128      = (__m128i *)&ul_ch_estimates_ext[aatx*frame_parms->nb_antennas_rx+aarx][symbol*(off+(nb_rb*12))];
-          ul_ch128_2    = (__m128i *)&ul_ch_estimates_ext[atx*frame_parms->nb_antennas_rx+aarx][symbol*(off+(nb_rb*12))];
+          rho128        = (simde__m128i *)&rho[aarx][aatx*nrOfLayers+atx][symbol*(off+(nb_rb*12))];
+          ul_ch128      = (simde__m128i *)&ul_ch_estimates_ext[aatx*frame_parms->nb_antennas_rx+aarx][symbol*(off+(nb_rb*12))];
+          ul_ch128_2    = (simde__m128i *)&ul_ch_estimates_ext[atx*frame_parms->nb_antennas_rx+aarx][symbol*(off+(nb_rb*12))];
 
           for (rb=0; rb<nb_rb_0; rb++) {
             // multiply by conjugated channel
-            mmtmpD0 = _mm_madd_epi16(ul_ch128[0],ul_ch128_2[0]);
+            mmtmpD0 = simde_mm_madd_epi16(ul_ch128[0],ul_ch128_2[0]);
             //  print_ints("re",&mmtmpD0);
 
             // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-            mmtmpD1 = _mm_shufflelo_epi16(ul_ch128[0],_MM_SHUFFLE(2,3,0,1));
-            mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-            mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i*)&conjugate[0]);
+            mmtmpD1 = simde_mm_shufflelo_epi16(ul_ch128[0], SIMDE_MM_SHUFFLE(2,3,0,1));
+            mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+            mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i*)&conjugate[0]);
             //  print_ints("im",&mmtmpD1);
-            mmtmpD1 = _mm_madd_epi16(mmtmpD1,ul_ch128_2[0]);
+            mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,ul_ch128_2[0]);
             // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-            mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
+            mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
             //  print_ints("re(shift)",&mmtmpD0);
-            mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
+            mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
             //  print_ints("im(shift)",&mmtmpD1);
-            mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-            mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+            mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+            mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
             //        print_ints("c0",&mmtmpD2);
             //  print_ints("c1",&mmtmpD3);
-            rho128[0] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+            rho128[0] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
 
             //print_shorts("rx:",ul_ch128_2);
             //print_shorts("ch:",ul_ch128);
@@ -777,18 +695,18 @@ void nr_ulsch_channel_compensation(int **rxdataF_ext,
               ((int16_t*)&rho128[0])[5] +
               ((int16_t*)&rho128[0])[7])/16;//
             // multiply by conjugated channel
-            mmtmpD0 = _mm_madd_epi16(ul_ch128[1],ul_ch128_2[1]);
+            mmtmpD0 = simde_mm_madd_epi16(ul_ch128[1],ul_ch128_2[1]);
             // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-            mmtmpD1 = _mm_shufflelo_epi16(ul_ch128[1],_MM_SHUFFLE(2,3,0,1));
-            mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-            mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i*)conjugate);
-            mmtmpD1 = _mm_madd_epi16(mmtmpD1,ul_ch128_2[1]);
+            mmtmpD1 = simde_mm_shufflelo_epi16(ul_ch128[1], SIMDE_MM_SHUFFLE(2,3,0,1));
+            mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+            mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i*)conjugate);
+            mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,ul_ch128_2[1]);
             // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-            mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-            mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
-            mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-            mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
-            rho128[1] =_mm_packs_epi32(mmtmpD2,mmtmpD3);
+            mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
+            mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
+            mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+            mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+            rho128[1] =simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
             //print_shorts("rx:",ul_ch128_2+1);
             //print_shorts("ch:",ul_ch128+1);
             //print_shorts("pack:",rho128+1);
@@ -804,19 +722,19 @@ void nr_ulsch_channel_compensation(int **rxdataF_ext,
               ((int16_t*)&rho128[1])[5] +
               ((int16_t*)&rho128[1])[7])/16;
 
-            mmtmpD0 = _mm_madd_epi16(ul_ch128[2],ul_ch128_2[2]);
+            mmtmpD0 = simde_mm_madd_epi16(ul_ch128[2],ul_ch128_2[2]);
             // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-            mmtmpD1 = _mm_shufflelo_epi16(ul_ch128[2],_MM_SHUFFLE(2,3,0,1));
-            mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-            mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i*)conjugate);
-            mmtmpD1 = _mm_madd_epi16(mmtmpD1,ul_ch128_2[2]);
+            mmtmpD1 = simde_mm_shufflelo_epi16(ul_ch128[2], SIMDE_MM_SHUFFLE(2,3,0,1));
+            mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+            mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i*)conjugate);
+            mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,ul_ch128_2[2]);
             // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-            mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-            mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
-            mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-            mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+            mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
+            mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
+            mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+            mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
 
-            rho128[2] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+            rho128[2] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
             //print_shorts("rx:",ul_ch128_2+2);
             //print_shorts("ch:",ul_ch128+2);
             //print_shorts("pack:",rho128+2);
@@ -845,204 +763,8 @@ void nr_ulsch_channel_compensation(int **rxdataF_ext,
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
-#elif defined(__arm__) || defined(__aarch64__)
-
-  unsigned short rb;
-  unsigned char aatx,aarx,symbol_mod,is_dmrs_symbol=0;
-
-  int16x4_t *ul_ch128,*ul_ch128_2,*rxdataF128;
-  int32x4_t mmtmpD0,mmtmpD1,mmtmpD0b,mmtmpD1b;
-  int16x8_t *ul_ch_mag128,*ul_ch_mag128b,mmtmpD2,mmtmpD3,mmtmpD4;
-  int16x8_t QAM_amp128,QAM_amp128b;
-  int16x4x2_t *rxdataF_comp128,*rho128;
-
-  int16_t conj[4]__attribute__((aligned(16))) = {1,-1,1,-1};
-  int32x4_t output_shift128 = vmovq_n_s32(-(int32_t)output_shift);
-
-  symbol_mod = (symbol>=(7-frame_parms->Ncp)) ? symbol-(7-frame_parms->Ncp) : symbol;
-
-  if ((symbol_mod == 0) || (symbol_mod == (4-frame_parms->Ncp))) {
-    if (nrOfLayers==1) { // 10 out of 12 so don't reduce size
-      nb_rb=1+(5*nb_rb/6);
-    }
-    else {
-      is_dmrs_symbol=1;
-    }
-  }
-
-  for (aatx=0; aatx<nrOfLayers; aatx++) {
-    if (mod_order == 4) {
-      QAM_amp128  = vmovq_n_s16(QAM16_n1);  // 2/sqrt(10)
-      QAM_amp128b = vmovq_n_s16(0);
-    } else if (mod_order == 6) {
-      QAM_amp128  = vmovq_n_s16(QAM64_n1); //
-      QAM_amp128b = vmovq_n_s16(QAM64_n2);
-    }
-    //    printf("comp: rxdataF_comp %p, symbol %d\n",rxdataF_comp[0],symbol);
-
-    for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-      ul_ch128          = (int16x4_t*)&ul_ch_estimates_ext[aatx*frame_parms->nb_antennas_rx+aarx][symbol*frame_parms->N_RB_UL*12];
-      ul_ch_mag128      = (int16x8_t*)&ul_ch_mag[aatx*frame_parms->nb_antennas_rx+aarx][symbol*frame_parms->N_RB_UL*12];
-      ul_ch_mag128b     = (int16x8_t*)&ul_ch_magb[aatx*frame_parms->nb_antennas_rx+aarx][symbol*frame_parms->N_RB_UL*12];
-      rxdataF128        = (int16x4_t*)&rxdataF_ext[aarx][symbol*frame_parms->N_RB_UL*12];
-      rxdataF_comp128   = (int16x4x2_t*)&rxdataF_comp[aatx*frame_parms->nb_antennas_rx+aarx][symbol*frame_parms->N_RB_UL*12];
-
-      for (rb=0; rb<nb_rb; rb++) {
-  if (mod_order>2) {
-    // get channel amplitude if not QPSK
-    mmtmpD0 = vmull_s16(ul_ch128[0], ul_ch128[0]);
-    // mmtmpD0 = [ch0*ch0,ch1*ch1,ch2*ch2,ch3*ch3];
-    mmtmpD0 = vqshlq_s32(vqaddq_s32(mmtmpD0,vrev64q_s32(mmtmpD0)),output_shift128);
-    // mmtmpD0 = [ch0*ch0 + ch1*ch1,ch0*ch0 + ch1*ch1,ch2*ch2 + ch3*ch3,ch2*ch2 + ch3*ch3]>>output_shift128 on 32-bits
-    mmtmpD1 = vmull_s16(ul_ch128[1], ul_ch128[1]);
-    mmtmpD1 = vqshlq_s32(vqaddq_s32(mmtmpD1,vrev64q_s32(mmtmpD1)),output_shift128);
-    mmtmpD2 = vcombine_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-    // mmtmpD2 = [ch0*ch0 + ch1*ch1,ch0*ch0 + ch1*ch1,ch2*ch2 + ch3*ch3,ch2*ch2 + ch3*ch3,ch4*ch4 + ch5*ch5,ch4*ch4 + ch5*ch5,ch6*ch6 + ch7*ch7,ch6*ch6 + ch7*ch7]>>output_shift128 on 16-bits
-    mmtmpD0 = vmull_s16(ul_ch128[2], ul_ch128[2]);
-    mmtmpD0 = vqshlq_s32(vqaddq_s32(mmtmpD0,vrev64q_s32(mmtmpD0)),output_shift128);
-    mmtmpD1 = vmull_s16(ul_ch128[3], ul_ch128[3]);
-    mmtmpD1 = vqshlq_s32(vqaddq_s32(mmtmpD1,vrev64q_s32(mmtmpD1)),output_shift128);
-    mmtmpD3 = vcombine_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-    if (is_dmrs_symbol==0) {
-      mmtmpD0 = vmull_s16(ul_ch128[4], ul_ch128[4]);
-      mmtmpD0 = vqshlq_s32(vqaddq_s32(mmtmpD0,vrev64q_s32(mmtmpD0)),output_shift128);
-      mmtmpD1 = vmull_s16(ul_ch128[5], ul_ch128[5]);
-      mmtmpD1 = vqshlq_s32(vqaddq_s32(mmtmpD1,vrev64q_s32(mmtmpD1)),output_shift128);
-      mmtmpD4 = vcombine_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-    }
-
-    ul_ch_mag128b[0] = vqdmulhq_s16(mmtmpD2,QAM_amp128b);
-    ul_ch_mag128b[1] = vqdmulhq_s16(mmtmpD3,QAM_amp128b);
-    ul_ch_mag128[0] = vqdmulhq_s16(mmtmpD2,QAM_amp128);
-    ul_ch_mag128[1] = vqdmulhq_s16(mmtmpD3,QAM_amp128);
-
-    if (is_dmrs_symbol==0) {
-      ul_ch_mag128b[2] = vqdmulhq_s16(mmtmpD4,QAM_amp128b);
-      ul_ch_mag128[2]  = vqdmulhq_s16(mmtmpD4,QAM_amp128);
-    }
-  }
-
-  mmtmpD0 = vmull_s16(ul_ch128[0], rxdataF128[0]);
-  //mmtmpD0 = [Re(ch[0])Re(rx[0]) Im(ch[0])Im(ch[0]) Re(ch[1])Re(rx[1]) Im(ch[1])Im(ch[1])]
-  mmtmpD1 = vmull_s16(ul_ch128[1], rxdataF128[1]);
-  //mmtmpD1 = [Re(ch[2])Re(rx[2]) Im(ch[2])Im(ch[2]) Re(ch[3])Re(rx[3]) Im(ch[3])Im(ch[3])]
-  mmtmpD0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0),vget_high_s32(mmtmpD0)),
-             vpadd_s32(vget_low_s32(mmtmpD1),vget_high_s32(mmtmpD1)));
-  //mmtmpD0 = [Re(ch[0])Re(rx[0])+Im(ch[0])Im(ch[0]) Re(ch[1])Re(rx[1])+Im(ch[1])Im(ch[1]) Re(ch[2])Re(rx[2])+Im(ch[2])Im(ch[2]) Re(ch[3])Re(rx[3])+Im(ch[3])Im(ch[3])]
-
-  mmtmpD0b = vmull_s16(vrev32_s16(vmul_s16(ul_ch128[0],*(int16x4_t*)conj)), rxdataF128[0]);
-  //mmtmpD0 = [-Im(ch[0])Re(rx[0]) Re(ch[0])Im(rx[0]) -Im(ch[1])Re(rx[1]) Re(ch[1])Im(rx[1])]
-  mmtmpD1b = vmull_s16(vrev32_s16(vmul_s16(ul_ch128[1],*(int16x4_t*)conj)), rxdataF128[1]);
-  //mmtmpD0 = [-Im(ch[2])Re(rx[2]) Re(ch[2])Im(rx[2]) -Im(ch[3])Re(rx[3]) Re(ch[3])Im(rx[3])]
-  mmtmpD1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0b),vget_high_s32(mmtmpD0b)),
-             vpadd_s32(vget_low_s32(mmtmpD1b),vget_high_s32(mmtmpD1b)));
-  //mmtmpD1 = [-Im(ch[0])Re(rx[0])+Re(ch[0])Im(rx[0]) -Im(ch[1])Re(rx[1])+Re(ch[1])Im(rx[1]) -Im(ch[2])Re(rx[2])+Re(ch[2])Im(rx[2]) -Im(ch[3])Re(rx[3])+Re(ch[3])Im(rx[3])]
-
-  mmtmpD0 = vqshlq_s32(mmtmpD0,output_shift128);
-  mmtmpD1 = vqshlq_s32(mmtmpD1,output_shift128);
-  rxdataF_comp128[0] = vzip_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-  mmtmpD0 = vmull_s16(ul_ch128[2], rxdataF128[2]);
-  mmtmpD1 = vmull_s16(ul_ch128[3], rxdataF128[3]);
-  mmtmpD0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0),vget_high_s32(mmtmpD0)),
-             vpadd_s32(vget_low_s32(mmtmpD1),vget_high_s32(mmtmpD1)));
-  mmtmpD0b = vmull_s16(vrev32_s16(vmul_s16(ul_ch128[2],*(int16x4_t*)conj)), rxdataF128[2]);
-  mmtmpD1b = vmull_s16(vrev32_s16(vmul_s16(ul_ch128[3],*(int16x4_t*)conj)), rxdataF128[3]);
-  mmtmpD1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0b),vget_high_s32(mmtmpD0b)),
-             vpadd_s32(vget_low_s32(mmtmpD1b),vget_high_s32(mmtmpD1b)));
-  mmtmpD0 = vqshlq_s32(mmtmpD0,output_shift128);
-  mmtmpD1 = vqshlq_s32(mmtmpD1,output_shift128);
-  rxdataF_comp128[1] = vzip_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-
-  if (is_dmrs_symbol==0) {
-    mmtmpD0 = vmull_s16(ul_ch128[4], rxdataF128[4]);
-    mmtmpD1 = vmull_s16(ul_ch128[5], rxdataF128[5]);
-    mmtmpD0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0),vget_high_s32(mmtmpD0)),
-         vpadd_s32(vget_low_s32(mmtmpD1),vget_high_s32(mmtmpD1)));
-
-    mmtmpD0b = vmull_s16(vrev32_s16(vmul_s16(ul_ch128[4],*(int16x4_t*)conj)), rxdataF128[4]);
-    mmtmpD1b = vmull_s16(vrev32_s16(vmul_s16(ul_ch128[5],*(int16x4_t*)conj)), rxdataF128[5]);
-    mmtmpD1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0b),vget_high_s32(mmtmpD0b)),
-         vpadd_s32(vget_low_s32(mmtmpD1b),vget_high_s32(mmtmpD1b)));
-
-
-    mmtmpD0 = vqshlq_s32(mmtmpD0,output_shift128);
-    mmtmpD1 = vqshlq_s32(mmtmpD1,output_shift128);
-    rxdataF_comp128[2] = vzip_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-
-
-    ul_ch128+=6;
-    ul_ch_mag128+=3;
-    ul_ch_mag128b+=3;
-    rxdataF128+=6;
-    rxdataF_comp128+=3;
-
-  } else { // we have a smaller PUSCH in symbols with pilots so skip last group of 4 REs and increment less
-    ul_ch128+=4;
-    ul_ch_mag128+=2;
-    ul_ch_mag128b+=2;
-    rxdataF128+=4;
-    rxdataF_comp128+=2;
-  }
-      }
-    }
-  }
-
-  if (rho) {
-    for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-      rho128        = (int16x4x2_t*)&rho[aarx][symbol*frame_parms->N_RB_UL*12];
-      ul_ch128      = (int16x4_t*)&ul_ch_estimates_ext[aarx][symbol*frame_parms->N_RB_UL*12];
-      ul_ch128_2    = (int16x4_t*)&ul_ch_estimates_ext[2+aarx][symbol*frame_parms->N_RB_UL*12];
-      for (rb=0; rb<nb_rb; rb++) {
-  mmtmpD0 = vmull_s16(ul_ch128[0], ul_ch128_2[0]);
-  mmtmpD1 = vmull_s16(ul_ch128[1], ul_ch128_2[1]);
-  mmtmpD0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0),vget_high_s32(mmtmpD0)),
-             vpadd_s32(vget_low_s32(mmtmpD1),vget_high_s32(mmtmpD1)));
-  mmtmpD0b = vmull_s16(vrev32_s16(vmul_s16(ul_ch128[0],*(int16x4_t*)conj)), ul_ch128_2[0]);
-  mmtmpD1b = vmull_s16(vrev32_s16(vmul_s16(ul_ch128[1],*(int16x4_t*)conj)), ul_ch128_2[1]);
-  mmtmpD1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0b),vget_high_s32(mmtmpD0b)),
-             vpadd_s32(vget_low_s32(mmtmpD1b),vget_high_s32(mmtmpD1b)));
-
-  mmtmpD0 = vqshlq_s32(mmtmpD0,output_shift128);
-  mmtmpD1 = vqshlq_s32(mmtmpD1,output_shift128);
-  rho128[0] = vzip_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-
-  mmtmpD0 = vmull_s16(ul_ch128[2], ul_ch128_2[2]);
-  mmtmpD1 = vmull_s16(ul_ch128[3], ul_ch128_2[3]);
-  mmtmpD0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0),vget_high_s32(mmtmpD0)),
-             vpadd_s32(vget_low_s32(mmtmpD1),vget_high_s32(mmtmpD1)));
-  mmtmpD0b = vmull_s16(vrev32_s16(vmul_s16(ul_ch128[2],*(int16x4_t*)conj)), ul_ch128_2[2]);
-  mmtmpD1b = vmull_s16(vrev32_s16(vmul_s16(ul_ch128[3],*(int16x4_t*)conj)), ul_ch128_2[3]);
-  mmtmpD1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0b),vget_high_s32(mmtmpD0b)),
-             vpadd_s32(vget_low_s32(mmtmpD1b),vget_high_s32(mmtmpD1b)));
-
-  mmtmpD0 = vqshlq_s32(mmtmpD0,output_shift128);
-  mmtmpD1 = vqshlq_s32(mmtmpD1,output_shift128);
-  rho128[1] = vzip_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-
-  mmtmpD0 = vmull_s16(ul_ch128[0], ul_ch128_2[0]);
-  mmtmpD1 = vmull_s16(ul_ch128[1], ul_ch128_2[1]);
-  mmtmpD0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0),vget_high_s32(mmtmpD0)),
-             vpadd_s32(vget_low_s32(mmtmpD1),vget_high_s32(mmtmpD1)));
-  mmtmpD0b = vmull_s16(vrev32_s16(vmul_s16(ul_ch128[4],*(int16x4_t*)conj)), ul_ch128_2[4]);
-  mmtmpD1b = vmull_s16(vrev32_s16(vmul_s16(ul_ch128[5],*(int16x4_t*)conj)), ul_ch128_2[5]);
-  mmtmpD1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0b),vget_high_s32(mmtmpD0b)),
-             vpadd_s32(vget_low_s32(mmtmpD1b),vget_high_s32(mmtmpD1b)));
-
-  mmtmpD0 = vqshlq_s32(mmtmpD0,output_shift128);
-  mmtmpD1 = vqshlq_s32(mmtmpD1,output_shift128);
-  rho128[2] = vzip_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-
-
-  ul_ch128+=6;
-  ul_ch128_2+=6;
-  rho128+=3;
-      }
-    }
-  }
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 
 
 #ifdef DEBUG_CH_COMP
@@ -1089,81 +811,54 @@ void nr_ulsch_detection_mrc(NR_DL_FRAME_PARMS *frame_parms,
                 uint16_t nb_rb,
                 int length) {
   int n_rx = frame_parms->nb_antennas_rx;
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *rxdataF_comp128[2],*ul_ch_mag128[2],*ul_ch_mag128b[2],*ul_ch_mag128c[2];
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *rxdataF_comp128_0,*ul_ch_mag128_0,*ul_ch_mag128_0b;
-  int16x8_t *rxdataF_comp128_1,*ul_ch_mag128_1,*ul_ch_mag128_1b;
-#endif
+  simde__m128i *rxdataF_comp128[2],*ul_ch_mag128[2],*ul_ch_mag128b[2],*ul_ch_mag128c[2];
   int32_t i;
   uint32_t nb_rb_0 = length/12 + ((length%12)?1:0);
 
   int off = ((nb_rb&1) == 1)? 4:0;
 
   if (n_rx > 1) {
-    #if defined(__x86_64__) || defined(__i386__)
 
     int nb_re = nb_rb * 12;
 
     for (int aatx = 0; aatx < nrOfLayers; aatx++) {
 
-      rxdataF_comp128[0]   = (__m128i *)&rxdataF_comp[aatx*frame_parms->nb_antennas_rx][(symbol*(nb_re + off))];
-      ul_ch_mag128[0]      = (__m128i *)&ul_ch_mag[aatx*frame_parms->nb_antennas_rx][(symbol*(nb_re + off))];
-      ul_ch_mag128b[0]     = (__m128i *)&ul_ch_magb[aatx*frame_parms->nb_antennas_rx][(symbol*(nb_re + off))];
-      ul_ch_mag128c[0]     = (__m128i *)&ul_ch_magc[aatx*frame_parms->nb_antennas_rx][(symbol*(nb_re + off))];
+      rxdataF_comp128[0]   = (simde__m128i *)&rxdataF_comp[aatx*frame_parms->nb_antennas_rx][(symbol*(nb_re + off))];
+      ul_ch_mag128[0]      = (simde__m128i *)&ul_ch_mag[aatx*frame_parms->nb_antennas_rx][(symbol*(nb_re + off))];
+      ul_ch_mag128b[0]     = (simde__m128i *)&ul_ch_magb[aatx*frame_parms->nb_antennas_rx][(symbol*(nb_re + off))];
+      ul_ch_mag128c[0]     = (simde__m128i *)&ul_ch_magc[aatx*frame_parms->nb_antennas_rx][(symbol*(nb_re + off))];
 
       for (int aa=1;aa < n_rx;aa++) {
-        rxdataF_comp128[1]   = (__m128i *)&rxdataF_comp[aatx*frame_parms->nb_antennas_rx+aa][(symbol*(nb_re + off))];
-        ul_ch_mag128[1]      = (__m128i *)&ul_ch_mag[aatx*frame_parms->nb_antennas_rx+aa][(symbol*(nb_re + off))];
-        ul_ch_mag128b[1]     = (__m128i *)&ul_ch_magb[aatx*frame_parms->nb_antennas_rx+aa][(symbol*(nb_re + off))];
-        ul_ch_mag128c[1]     = (__m128i *)&ul_ch_magc[aatx*frame_parms->nb_antennas_rx+aa][(symbol*(nb_re + off))];
+        rxdataF_comp128[1]   = (simde__m128i *)&rxdataF_comp[aatx*frame_parms->nb_antennas_rx+aa][(symbol*(nb_re + off))];
+        ul_ch_mag128[1]      = (simde__m128i *)&ul_ch_mag[aatx*frame_parms->nb_antennas_rx+aa][(symbol*(nb_re + off))];
+        ul_ch_mag128b[1]     = (simde__m128i *)&ul_ch_magb[aatx*frame_parms->nb_antennas_rx+aa][(symbol*(nb_re + off))];
+        ul_ch_mag128c[1]     = (simde__m128i *)&ul_ch_magc[aatx*frame_parms->nb_antennas_rx+aa][(symbol*(nb_re + off))];
 
         // MRC on each re of rb, both on MF output and magnitude (for 16QAM/64QAM llr computation)
         for (i=0; i<nb_rb_0*3; i++) {
-            rxdataF_comp128[0][i] = _mm_adds_epi16(rxdataF_comp128[0][i],rxdataF_comp128[1][i]);
-            ul_ch_mag128[0][i]    = _mm_adds_epi16(ul_ch_mag128[0][i],ul_ch_mag128[1][i]);
-            ul_ch_mag128b[0][i]   = _mm_adds_epi16(ul_ch_mag128b[0][i],ul_ch_mag128b[1][i]);
-            ul_ch_mag128c[0][i]   = _mm_adds_epi16(ul_ch_mag128c[0][i],ul_ch_mag128c[1][i]);
+            rxdataF_comp128[0][i] = simde_mm_adds_epi16(rxdataF_comp128[0][i],rxdataF_comp128[1][i]);
+            ul_ch_mag128[0][i]    = simde_mm_adds_epi16(ul_ch_mag128[0][i],ul_ch_mag128[1][i]);
+            ul_ch_mag128b[0][i]   = simde_mm_adds_epi16(ul_ch_mag128b[0][i],ul_ch_mag128b[1][i]);
+            ul_ch_mag128c[0][i]   = simde_mm_adds_epi16(ul_ch_mag128c[0][i],ul_ch_mag128c[1][i]);
             //rxdataF_comp128[0][i] = _mm_add_epi16(rxdataF_comp128_0[i],(*(__m128i *)&jitterc[0]));
         }
       }
 
       if (rho) {
-        __m128i *rho128[2];
+        simde__m128i *rho128[2];
         for (int aatx2 = 0; aatx2 < nrOfLayers; aatx2++) {
-          rho128[0] = (__m128i *) &rho[0][aatx * nrOfLayers + aatx2][(symbol * (nb_re + off))];
+          rho128[0] = (simde__m128i *) &rho[0][aatx * nrOfLayers + aatx2][(symbol * (nb_re + off))];
           for (int aa = 1; aa < n_rx; aa++) {
-            rho128[1] = (__m128i *) &rho[aa][aatx * nrOfLayers + aatx2][(symbol * (nb_re + off))];
+            rho128[1] = (simde__m128i *) &rho[aa][aatx * nrOfLayers + aatx2][(symbol * (nb_re + off))];
             for (i = 0; i < nb_rb_0 * 3; i++) {
-              rho128[0][i] = _mm_adds_epi16(rho128[0][i], rho128[1][i]);
+              rho128[0][i] = simde_mm_adds_epi16(rho128[0][i], rho128[1][i]);
             }
           }
         }
       }
 
     }
-    #elif defined(__arm__) || defined(__aarch64__)
-    rxdataF_comp128_0   = (int16x8_t *)&rxdataF_comp[0][symbol*frame_parms->N_RB_DL*12];
-    rxdataF_comp128_1   = (int16x8_t *)&rxdataF_comp[1][symbol*frame_parms->N_RB_DL*12];
-    ul_ch_mag128_0      = (int16x8_t *)&ul_ch_mag[0][symbol*frame_parms->N_RB_DL*12];
-    ul_ch_mag128_1      = (int16x8_t *)&ul_ch_mag[1][symbol*frame_parms->N_RB_DL*12];
-    ul_ch_mag128_0b     = (int16x8_t *)&ul_ch_magb[0][symbol*frame_parms->N_RB_DL*12];
-    ul_ch_mag128_1b     = (int16x8_t *)&ul_ch_magb[1][symbol*frame_parms->N_RB_DL*12];
-      
-    // MRC on each re of rb, both on MF output and magnitude (for 16QAM/64QAM llr computation)
-    for (i=0; i<nb_rb*3; i++) {
-      rxdataF_comp128_0[i] = vhaddq_s16(rxdataF_comp128_0[i],rxdataF_comp128_1[i]);
-      ul_ch_mag128_0[i]    = vhaddq_s16(ul_ch_mag128_0[i],ul_ch_mag128_1[i]);
-      ul_ch_mag128_0b[i]   = vhaddq_s16(ul_ch_mag128_0b[i],ul_ch_mag128_1b[i]);
-      rxdataF_comp128_0[i] = vqaddq_s16(rxdataF_comp128_0[i],(*(int16x8_t *)&jitterc[0]));
-    }
-    #endif
   }
-
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
 }
 
 /* Zero Forcing Rx function: nr_det_HhH()
@@ -1181,39 +876,39 @@ void nr_ulsch_det_HhH(int32_t *after_mf_00,//a
 {
   int16_t nr_conjug2[8]__attribute__((aligned(16))) = {1,-1,1,-1,1,-1,1,-1} ;
   unsigned short rb;
-  __m128i *after_mf_00_128,*after_mf_01_128, *after_mf_10_128, *after_mf_11_128, ad_re_128, bc_re_128; //ad_im_128, bc_im_128;
-  __m128i *det_fin_128, det_re_128; //det_im_128, tmp_det0, tmp_det1;
+  simde__m128i *after_mf_00_128,*after_mf_01_128, *after_mf_10_128, *after_mf_11_128, ad_re_128, bc_re_128; //ad_im_128, bc_im_128;
+  simde__m128i *det_fin_128, det_re_128; //det_im_128, tmp_det0, tmp_det1;
 
-  after_mf_00_128 = (__m128i *)after_mf_00;
-  after_mf_01_128 = (__m128i *)after_mf_01;
-  after_mf_10_128 = (__m128i *)after_mf_10;
-  after_mf_11_128 = (__m128i *)after_mf_11;
+  after_mf_00_128 = (simde__m128i *)after_mf_00;
+  after_mf_01_128 = (simde__m128i *)after_mf_01;
+  after_mf_10_128 = (simde__m128i *)after_mf_10;
+  after_mf_11_128 = (simde__m128i *)after_mf_11;
 
-  det_fin_128 = (__m128i *)det_fin;
+  det_fin_128 = (simde__m128i *)det_fin;
 
   for (rb=0; rb<3*nb_rb; rb++) {
 
     //complex multiplication (I_a+jQ_a)(I_d+jQ_d) = (I_aI_d - Q_aQ_d) + j(Q_aI_d + I_aQ_d)
     //The imag part is often zero, we compute only the real part
-    ad_re_128 = _mm_sign_epi16(after_mf_00_128[0],*(__m128i*)&nr_conjug2[0]);
-    ad_re_128 = _mm_madd_epi16(ad_re_128,after_mf_11_128[0]); //Re: I_a0*I_d0 - Q_a1*Q_d1
-    //ad_im_128 = _mm_shufflelo_epi16(after_mf_00_128[0],_MM_SHUFFLE(2,3,0,1));//permutes IQs for the low 64 bits as [I_a0 Q_a1 I_a2 Q_a3]_64bits to [Q_a1 I_a0 Q_a3 I_a2]_64bits
-    //ad_im_128 = _mm_shufflehi_epi16(ad_im_128,_MM_SHUFFLE(2,3,0,1));//permutes IQs for the high 64 bits as [I_a0 Q_a1 I_a2 Q_a3]_64bits to [Q_a1 I_a0 Q_a3 I_a2]_64bits
-    //ad_im_128 = _mm_madd_epi16(ad_im_128,after_mf_11_128[0]);//Im: (Q_aI_d + I_aQ_d)
+    ad_re_128 = simde_mm_sign_epi16(after_mf_00_128[0],*(simde__m128i*)&nr_conjug2[0]);
+    ad_re_128 = simde_mm_madd_epi16(ad_re_128,after_mf_11_128[0]); //Re: I_a0*I_d0 - Q_a1*Q_d1
+    //ad_im_128 = simde_mm_shufflelo_epi16(after_mf_00_128[0], SIMDE_MM_SHUFFLE(2,3,0,1));//permutes IQs for the low 64 bits as [I_a0 Q_a1 I_a2 Q_a3]_64bits to [Q_a1 I_a0 Q_a3 I_a2]_64bits
+    //ad_im_128 = simde_mm_shufflehi_epi16(ad_im_128, SIMDE_MM_SHUFFLE(2,3,0,1));//permutes IQs for the high 64 bits as [I_a0 Q_a1 I_a2 Q_a3]_64bits to [Q_a1 I_a0 Q_a3 I_a2]_64bits
+    //ad_im_128 = simde_mm_madd_epi16(ad_im_128,after_mf_11_128[0]);//Im: (Q_aI_d + I_aQ_d)
 
     //complex multiplication (I_b+jQ_b)(I_c+jQ_c) = (I_bI_c - Q_bQ_c) + j(Q_bI_c + I_bQ_c)
     //The imag part is often zero, we compute only the real part
-    bc_re_128 = _mm_sign_epi16(after_mf_01_128[0],*(__m128i*)&nr_conjug2[0]);
-    bc_re_128 = _mm_madd_epi16(bc_re_128,after_mf_10_128[0]); //Re: I_b0*I_c0 - Q_b1*Q_c1
-    //bc_im_128 = _mm_shufflelo_epi16(after_mf_01_128[0],_MM_SHUFFLE(2,3,0,1));//permutes IQs for the low 64 bits as [I_b0 Q_b1 I_b2 Q_b3]_64bits to [Q_b1 I_b0 Q_b3 I_b2]_64bits
-    //bc_im_128 = _mm_shufflehi_epi16(bc_im_128,_MM_SHUFFLE(2,3,0,1));//permutes IQs for the high 64 bits as [I_b0 Q_b1 I_b2 Q_b3]_64bits to [Q_b1 I_b0 Q_b3 I_b2]_64bits
-    //bc_im_128 = _mm_madd_epi16(bc_im_128,after_mf_10_128[0]);//Im: (Q_bI_c + I_bQ_c)
+    bc_re_128 = simde_mm_sign_epi16(after_mf_01_128[0],*(simde__m128i*)&nr_conjug2[0]);
+    bc_re_128 = simde_mm_madd_epi16(bc_re_128,after_mf_10_128[0]); //Re: I_b0*I_c0 - Q_b1*Q_c1
+    //bc_im_128 = simde_mm_shufflelo_epi16(after_mf_01_128[0], SIMDE_MM_SHUFFLE(2,3,0,1));//permutes IQs for the low 64 bits as [I_b0 Q_b1 I_b2 Q_b3]_64bits to [Q_b1 I_b0 Q_b3 I_b2]_64bits
+    //bc_im_128 = simde_mm_shufflehi_epi16(bc_im_128, SIMDE_MM_SHUFFLE(2,3,0,1));//permutes IQs for the high 64 bits as [I_b0 Q_b1 I_b2 Q_b3]_64bits to [Q_b1 I_b0 Q_b3 I_b2]_64bits
+    //bc_im_128 = simde_mm_madd_epi16(bc_im_128,after_mf_10_128[0]);//Im: (Q_bI_c + I_bQ_c)
 
-    det_re_128 = _mm_sub_epi32(ad_re_128, bc_re_128);
-    //det_im_128 = _mm_sub_epi32(ad_im_128, bc_im_128);
+    det_re_128 = simde_mm_sub_epi32(ad_re_128, bc_re_128);
+    //det_im_128 = simde_mm_sub_epi32(ad_im_128, bc_im_128);
 
     //det in Q30 format
-    det_fin_128[0] = _mm_abs_epi32(det_re_128);
+    det_fin_128[0] = simde_mm_abs_epi32(det_re_128);
 
 
 #ifdef DEBUG_DLSCH_DEMOD
@@ -1228,45 +923,45 @@ void nr_ulsch_det_HhH(int32_t *after_mf_00,//a
     after_mf_10_128+=1;
     after_mf_11_128+=1;
   }
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 /* Zero Forcing Rx function: nr_inv_comp_muli
  * Complex number multi: z = x*y
  *                         = (x_re*y_re - x_im*y_im) + j(x_im*y_re + x_re*y_im)
  * */
-__m128i nr_ulsch_inv_comp_muli(__m128i input_x,
-                         __m128i input_y)
+simde__m128i nr_ulsch_inv_comp_muli(simde__m128i input_x,
+                         simde__m128i input_y)
 {
   int16_t nr_conjug2[8]__attribute__((aligned(16))) = {1,-1,1,-1,1,-1,1,-1} ;
 
-  __m128i xy_re_128, xy_im_128;
-  __m128i output_z, tmp_z0, tmp_z1;
+  simde__m128i xy_re_128, xy_im_128;
+  simde__m128i output_z, tmp_z0, tmp_z1;
 
   // complex multiplication (x_re + jx_im)*(y_re + jy_im) = (x_re*y_re - x_im*y_im) + j(x_im*y_re + x_re*y_im)
 
   // the real part
-  xy_re_128 = _mm_sign_epi16(input_x,*(__m128i*)&nr_conjug2[0]);
-  xy_re_128 = _mm_madd_epi16(xy_re_128,input_y); //Re: (x_re*y_re - x_im*y_im)
+  xy_re_128 = simde_mm_sign_epi16(input_x,*(simde__m128i*)&nr_conjug2[0]);
+  xy_re_128 = simde_mm_madd_epi16(xy_re_128,input_y); //Re: (x_re*y_re - x_im*y_im)
 
   // the imag part
-  xy_im_128 = _mm_shufflelo_epi16(input_x,_MM_SHUFFLE(2,3,0,1));//permutes IQs for the low 64 bits as [I_a0 Q_a1 I_a2 Q_a3]_64bits to [Q_a1 I_a0 Q_a3 I_a2]_64bits
-  xy_im_128 = _mm_shufflehi_epi16(xy_im_128,_MM_SHUFFLE(2,3,0,1));//permutes IQs for the high 64 bits as [I_a0 Q_a1 I_a2 Q_a3]_64bits to [Q_a1 I_a0 Q_a3 I_a2]_64bits
-  xy_im_128 = _mm_madd_epi16(xy_im_128,input_y);//Im: (x_im*y_re + x_re*y_im)
+  xy_im_128 = simde_mm_shufflelo_epi16(input_x, SIMDE_MM_SHUFFLE(2,3,0,1));//permutes IQs for the low 64 bits as [I_a0 Q_a1 I_a2 Q_a3]_64bits to [Q_a1 I_a0 Q_a3 I_a2]_64bits
+  xy_im_128 = simde_mm_shufflehi_epi16(xy_im_128, SIMDE_MM_SHUFFLE(2,3,0,1));//permutes IQs for the high 64 bits as [I_a0 Q_a1 I_a2 Q_a3]_64bits to [Q_a1 I_a0 Q_a3 I_a2]_64bits
+  xy_im_128 = simde_mm_madd_epi16(xy_im_128,input_y);//Im: (x_im*y_re + x_re*y_im)
 
   //convert back to Q15 before packing
-  xy_re_128 = _mm_srai_epi32(xy_re_128,4);//(2^15/64*2*16)
-  xy_im_128 = _mm_srai_epi32(xy_im_128,4);
+  xy_re_128 = simde_mm_srai_epi32(xy_re_128,4);//(2^15/64*2*16)
+  xy_im_128 = simde_mm_srai_epi32(xy_im_128,4);
 
-  tmp_z0  = _mm_unpacklo_epi32(xy_re_128,xy_im_128);
+  tmp_z0  = simde_mm_unpacklo_epi32(xy_re_128,xy_im_128);
   //print_ints("unpack lo:",&tmp_z0[0]);
-  tmp_z1  = _mm_unpackhi_epi32(xy_re_128,xy_im_128);
+  tmp_z1  = simde_mm_unpackhi_epi32(xy_re_128,xy_im_128);
   //print_ints("unpack hi:",&tmp_z1[0]);
-  output_z = _mm_packs_epi32(tmp_z0,tmp_z1);
+  output_z = simde_mm_packs_epi32(tmp_z0,tmp_z1);
 
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
   return(output_z);
 }
 
@@ -1283,26 +978,26 @@ void nr_ulsch_conjch0_mult_ch1(int *ch0,
   //This function is used to compute multiplications in H_hermitian * H matrix
   short nr_conjugate[8]__attribute__((aligned(16))) = {-1,1,-1,1,-1,1,-1,1};
   unsigned short rb;
-  __m128i *dl_ch0_128,*dl_ch1_128, *ch0conj_ch1_128, mmtmpD0,mmtmpD1,mmtmpD2,mmtmpD3;
+  simde__m128i *dl_ch0_128,*dl_ch1_128, *ch0conj_ch1_128, mmtmpD0,mmtmpD1,mmtmpD2,mmtmpD3;
 
-  dl_ch0_128 = (__m128i *)ch0;
-  dl_ch1_128 = (__m128i *)ch1;
+  dl_ch0_128 = (simde__m128i *)ch0;
+  dl_ch1_128 = (simde__m128i *)ch1;
 
-  ch0conj_ch1_128 = (__m128i *)ch0conj_ch1;
+  ch0conj_ch1_128 = (simde__m128i *)ch0conj_ch1;
 
   for (rb=0; rb<3*nb_rb; rb++) {
 
-    mmtmpD0 = _mm_madd_epi16(dl_ch0_128[0],dl_ch1_128[0]);
-    mmtmpD1 = _mm_shufflelo_epi16(dl_ch0_128[0],_MM_SHUFFLE(2,3,0,1));
-    mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-    mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i*)&nr_conjugate[0]);
-    mmtmpD1 = _mm_madd_epi16(mmtmpD1,dl_ch1_128[0]);
-    mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift0);
-    mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift0);
-    mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-    mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+    mmtmpD0 = simde_mm_madd_epi16(dl_ch0_128[0],dl_ch1_128[0]);
+    mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch0_128[0], SIMDE_MM_SHUFFLE(2,3,0,1));
+    mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1, SIMDE_MM_SHUFFLE(2,3,0,1));
+    mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i*)&nr_conjugate[0]);
+    mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,dl_ch1_128[0]);
+    mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift0);
+    mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift0);
+    mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+    mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
 
-    ch0conj_ch1_128[0] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+    ch0conj_ch1_128[0] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
 
     /*printf("\n Computing conjugates \n");
     print_shorts("ch0:",(int16_t*)&dl_ch0_128[0]);
@@ -1313,43 +1008,43 @@ void nr_ulsch_conjch0_mult_ch1(int *ch0,
     dl_ch1_128+=1;
     ch0conj_ch1_128+=1;
   }
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
-__m128i nr_ulsch_comp_muli_sum(__m128i input_x,
-                         __m128i input_y,
-                         __m128i input_w,
-                         __m128i input_z,
-                         __m128i det)
+simde__m128i nr_ulsch_comp_muli_sum(simde__m128i input_x,
+                         simde__m128i input_y,
+                         simde__m128i input_w,
+                         simde__m128i input_z,
+                         simde__m128i det)
 {
   int16_t nr_conjug2[8]__attribute__((aligned(16))) = {1,-1,1,-1,1,-1,1,-1} ;
 
-  __m128i xy_re_128, xy_im_128, wz_re_128, wz_im_128;
-  __m128i output, tmp_z0, tmp_z1;
+  simde__m128i xy_re_128, xy_im_128, wz_re_128, wz_im_128;
+  simde__m128i output, tmp_z0, tmp_z1;
 
   // complex multiplication (x_re + jx_im)*(y_re + jy_im) = (x_re*y_re - x_im*y_im) + j(x_im*y_re + x_re*y_im)
   // the real part
-  xy_re_128 = _mm_sign_epi16(input_x,*(__m128i*)&nr_conjug2[0]);
-  xy_re_128 = _mm_madd_epi16(xy_re_128,input_y); //Re: (x_re*y_re - x_im*y_im)
+  xy_re_128 = simde_mm_sign_epi16(input_x,*(simde__m128i*)&nr_conjug2[0]);
+  xy_re_128 = simde_mm_madd_epi16(xy_re_128,input_y); //Re: (x_re*y_re - x_im*y_im)
 
   // the imag part
-  xy_im_128 = _mm_shufflelo_epi16(input_x,_MM_SHUFFLE(2,3,0,1));//permutes IQs for the low 64 bits as [I_a0 Q_a1 I_a2 Q_a3]_64bits to [Q_a1 I_a0 Q_a3 I_a2]_64bits
-  xy_im_128 = _mm_shufflehi_epi16(xy_im_128,_MM_SHUFFLE(2,3,0,1));//permutes IQs for the high 64 bits as [I_a0 Q_a1 I_a2 Q_a3]_64bits to [Q_a1 I_a0 Q_a3 I_a2]_64bits
-  xy_im_128 = _mm_madd_epi16(xy_im_128,input_y);//Im: (x_im*y_re + x_re*y_im)
+  xy_im_128 = simde_mm_shufflelo_epi16(input_x, SIMDE_MM_SHUFFLE(2,3,0,1));//permutes IQs for the low 64 bits as [I_a0 Q_a1 I_a2 Q_a3]_64bits to [Q_a1 I_a0 Q_a3 I_a2]_64bits
+  xy_im_128 = simde_mm_shufflehi_epi16(xy_im_128, SIMDE_MM_SHUFFLE(2,3,0,1));//permutes IQs for the high 64 bits as [I_a0 Q_a1 I_a2 Q_a3]_64bits to [Q_a1 I_a0 Q_a3 I_a2]_64bits
+  xy_im_128 = simde_mm_madd_epi16(xy_im_128,input_y);//Im: (x_im*y_re + x_re*y_im)
 
   // complex multiplication (w_re + jw_im)*(z_re + jz_im) = (w_re*z_re - w_im*z_im) + j(w_im*z_re + w_re*z_im)
   // the real part
-  wz_re_128 = _mm_sign_epi16(input_w,*(__m128i*)&nr_conjug2[0]);
-  wz_re_128 = _mm_madd_epi16(wz_re_128,input_z); //Re: (w_re*z_re - w_im*z_im)
+  wz_re_128 = simde_mm_sign_epi16(input_w,*(simde__m128i*)&nr_conjug2[0]);
+  wz_re_128 = simde_mm_madd_epi16(wz_re_128,input_z); //Re: (w_re*z_re - w_im*z_im)
 
   // the imag part
-  wz_im_128 = _mm_shufflelo_epi16(input_w,_MM_SHUFFLE(2,3,0,1));//permutes IQs for the low 64 bits as [I_a0 Q_a1 I_a2 Q_a3]_64bits to [Q_a1 I_a0 Q_a3 I_a2]_64bits
-  wz_im_128 = _mm_shufflehi_epi16(wz_im_128,_MM_SHUFFLE(2,3,0,1));//permutes IQs for the high 64 bits as [I_a0 Q_a1 I_a2 Q_a3]_64bits to [Q_a1 I_a0 Q_a3 I_a2]_64bits
-  wz_im_128 = _mm_madd_epi16(wz_im_128,input_z);//Im: (w_im*z_re + w_re*z_im)
+  wz_im_128 = simde_mm_shufflelo_epi16(input_w, SIMDE_MM_SHUFFLE(2,3,0,1));//permutes IQs for the low 64 bits as [I_a0 Q_a1 I_a2 Q_a3]_64bits to [Q_a1 I_a0 Q_a3 I_a2]_64bits
+  wz_im_128 = simde_mm_shufflehi_epi16(wz_im_128, SIMDE_MM_SHUFFLE(2,3,0,1));//permutes IQs for the high 64 bits as [I_a0 Q_a1 I_a2 Q_a3]_64bits to [Q_a1 I_a0 Q_a3 I_a2]_64bits
+  wz_im_128 = simde_mm_madd_epi16(wz_im_128,input_z);//Im: (w_im*z_re + w_re*z_im)
 
 
-  xy_re_128 = _mm_sub_epi32(xy_re_128, wz_re_128);
-  xy_im_128 = _mm_sub_epi32(xy_im_128, wz_im_128);
+  xy_re_128 = simde_mm_sub_epi32(xy_re_128, wz_re_128);
+  xy_im_128 = simde_mm_sub_epi32(xy_im_128, wz_im_128);
   //print_ints("rx_re:",(int32_t*)&xy_re_128[0]);
   //print_ints("rx_Img:",(int32_t*)&xy_im_128[0]);
   //divide by matrix det and convert back to Q15 before packing
@@ -1361,21 +1056,21 @@ __m128i nr_ulsch_comp_muli_sum(__m128i input_x,
 
   int b = log2_approx(sum_det) - 8;
   if (b > 0) {
-    xy_re_128 = _mm_srai_epi32(xy_re_128, b);
-    xy_im_128 = _mm_srai_epi32(xy_im_128, b);
+    xy_re_128 = simde_mm_srai_epi32(xy_re_128, b);
+    xy_im_128 = simde_mm_srai_epi32(xy_im_128, b);
   } else {
-    xy_re_128 = _mm_slli_epi32(xy_re_128, -b);
-    xy_im_128 = _mm_slli_epi32(xy_im_128, -b);
+    xy_re_128 = simde_mm_slli_epi32(xy_re_128, -b);
+    xy_im_128 = simde_mm_slli_epi32(xy_im_128, -b);
   }
 
-  tmp_z0  = _mm_unpacklo_epi32(xy_re_128,xy_im_128);
+  tmp_z0  = simde_mm_unpacklo_epi32(xy_re_128,xy_im_128);
   //print_ints("unpack lo:",&tmp_z0[0]);
-  tmp_z1  = _mm_unpackhi_epi32(xy_re_128,xy_im_128);
+  tmp_z1  = simde_mm_unpackhi_epi32(xy_re_128,xy_im_128);
   //print_ints("unpack hi:",&tmp_z1[0]);
-  output = _mm_packs_epi32(tmp_z0,tmp_z1);
+  output = simde_mm_packs_epi32(tmp_z0,tmp_z1);
 
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
   return(output);
 }
 /* Zero Forcing Rx function: nr_construct_HhH_elements()
@@ -1407,54 +1102,54 @@ void nr_ulsch_construct_HhH_elements(int *conjch00_ch00,
 {
   //This function is used to construct the (H_hermitian * H matrix) matrix elements
   unsigned short rb;
-  __m128i *conjch00_ch00_128, *conjch01_ch01_128, *conjch11_ch11_128, *conjch10_ch10_128;
-  __m128i *conjch20_ch20_128, *conjch21_ch21_128, *conjch30_ch30_128, *conjch31_ch31_128;
-  __m128i *conjch00_ch01_128, *conjch01_ch00_128, *conjch10_ch11_128, *conjch11_ch10_128;
-  __m128i *conjch20_ch21_128, *conjch21_ch20_128, *conjch30_ch31_128, *conjch31_ch30_128;
-  __m128i *after_mf_00_128, *after_mf_01_128, *after_mf_10_128, *after_mf_11_128;
-
-  conjch00_ch00_128 = (__m128i *)conjch00_ch00;
-  conjch01_ch01_128 = (__m128i *)conjch01_ch01;
-  conjch11_ch11_128 = (__m128i *)conjch11_ch11;
-  conjch10_ch10_128 = (__m128i *)conjch10_ch10;
-
-  conjch20_ch20_128 = (__m128i *)conjch20_ch20;
-  conjch21_ch21_128 = (__m128i *)conjch21_ch21;
-  conjch30_ch30_128 = (__m128i *)conjch30_ch30;
-  conjch31_ch31_128 = (__m128i *)conjch31_ch31;
-
-  conjch00_ch01_128 = (__m128i *)conjch00_ch01;
-  conjch01_ch00_128 = (__m128i *)conjch01_ch00;
-  conjch10_ch11_128 = (__m128i *)conjch10_ch11;
-  conjch11_ch10_128 = (__m128i *)conjch11_ch10;
-
-  conjch20_ch21_128 = (__m128i *)conjch20_ch21;
-  conjch21_ch20_128 = (__m128i *)conjch21_ch20;
-  conjch30_ch31_128 = (__m128i *)conjch30_ch31;
-  conjch31_ch30_128 = (__m128i *)conjch31_ch30;
-
-  after_mf_00_128 = (__m128i *)after_mf_00;
-  after_mf_01_128 = (__m128i *)after_mf_01;
-  after_mf_10_128 = (__m128i *)after_mf_10;
-  after_mf_11_128 = (__m128i *)after_mf_11;
+  simde__m128i *conjch00_ch00_128, *conjch01_ch01_128, *conjch11_ch11_128, *conjch10_ch10_128;
+  simde__m128i *conjch20_ch20_128, *conjch21_ch21_128, *conjch30_ch30_128, *conjch31_ch31_128;
+  simde__m128i *conjch00_ch01_128, *conjch01_ch00_128, *conjch10_ch11_128, *conjch11_ch10_128;
+  simde__m128i *conjch20_ch21_128, *conjch21_ch20_128, *conjch30_ch31_128, *conjch31_ch30_128;
+  simde__m128i *after_mf_00_128, *after_mf_01_128, *after_mf_10_128, *after_mf_11_128;
+
+  conjch00_ch00_128 = (simde__m128i *)conjch00_ch00;
+  conjch01_ch01_128 = (simde__m128i *)conjch01_ch01;
+  conjch11_ch11_128 = (simde__m128i *)conjch11_ch11;
+  conjch10_ch10_128 = (simde__m128i *)conjch10_ch10;
+
+  conjch20_ch20_128 = (simde__m128i *)conjch20_ch20;
+  conjch21_ch21_128 = (simde__m128i *)conjch21_ch21;
+  conjch30_ch30_128 = (simde__m128i *)conjch30_ch30;
+  conjch31_ch31_128 = (simde__m128i *)conjch31_ch31;
+
+  conjch00_ch01_128 = (simde__m128i *)conjch00_ch01;
+  conjch01_ch00_128 = (simde__m128i *)conjch01_ch00;
+  conjch10_ch11_128 = (simde__m128i *)conjch10_ch11;
+  conjch11_ch10_128 = (simde__m128i *)conjch11_ch10;
+
+  conjch20_ch21_128 = (simde__m128i *)conjch20_ch21;
+  conjch21_ch20_128 = (simde__m128i *)conjch21_ch20;
+  conjch30_ch31_128 = (simde__m128i *)conjch30_ch31;
+  conjch31_ch30_128 = (simde__m128i *)conjch31_ch30;
+
+  after_mf_00_128 = (simde__m128i *)after_mf_00;
+  after_mf_01_128 = (simde__m128i *)after_mf_01;
+  after_mf_10_128 = (simde__m128i *)after_mf_10;
+  after_mf_11_128 = (simde__m128i *)after_mf_11;
 
   for (rb=0; rb<3*nb_rb; rb++) {
 
-    after_mf_00_128[0] =_mm_adds_epi16(conjch00_ch00_128[0],conjch10_ch10_128[0]);//00_00 + 10_10
-    if (conjch20_ch20 != NULL) after_mf_00_128[0] =_mm_adds_epi16(after_mf_00_128[0],conjch20_ch20_128[0]);
-    if (conjch30_ch30 != NULL) after_mf_00_128[0] =_mm_adds_epi16(after_mf_00_128[0],conjch30_ch30_128[0]);
+    after_mf_00_128[0] =simde_mm_adds_epi16(conjch00_ch00_128[0],conjch10_ch10_128[0]);//00_00 + 10_10
+    if (conjch20_ch20 != NULL) after_mf_00_128[0] =simde_mm_adds_epi16(after_mf_00_128[0],conjch20_ch20_128[0]);
+    if (conjch30_ch30 != NULL) after_mf_00_128[0] =simde_mm_adds_epi16(after_mf_00_128[0],conjch30_ch30_128[0]);
 
-    after_mf_11_128[0] =_mm_adds_epi16(conjch01_ch01_128[0], conjch11_ch11_128[0]); //01_01 + 11_11
-    if (conjch21_ch21 != NULL) after_mf_11_128[0] =_mm_adds_epi16(after_mf_11_128[0],conjch21_ch21_128[0]);
-    if (conjch31_ch31 != NULL) after_mf_11_128[0] =_mm_adds_epi16(after_mf_11_128[0],conjch31_ch31_128[0]);
+    after_mf_11_128[0] =simde_mm_adds_epi16(conjch01_ch01_128[0], conjch11_ch11_128[0]); //01_01 + 11_11
+    if (conjch21_ch21 != NULL) after_mf_11_128[0] =simde_mm_adds_epi16(after_mf_11_128[0],conjch21_ch21_128[0]);
+    if (conjch31_ch31 != NULL) after_mf_11_128[0] =simde_mm_adds_epi16(after_mf_11_128[0],conjch31_ch31_128[0]);
 
-    after_mf_01_128[0] =_mm_adds_epi16(conjch00_ch01_128[0], conjch10_ch11_128[0]);//00_01 + 10_11
-    if (conjch20_ch21 != NULL) after_mf_01_128[0] =_mm_adds_epi16(after_mf_01_128[0],conjch20_ch21_128[0]);
-    if (conjch30_ch31 != NULL) after_mf_01_128[0] =_mm_adds_epi16(after_mf_01_128[0],conjch30_ch31_128[0]);
+    after_mf_01_128[0] =simde_mm_adds_epi16(conjch00_ch01_128[0], conjch10_ch11_128[0]);//00_01 + 10_11
+    if (conjch20_ch21 != NULL) after_mf_01_128[0] =simde_mm_adds_epi16(after_mf_01_128[0],conjch20_ch21_128[0]);
+    if (conjch30_ch31 != NULL) after_mf_01_128[0] =simde_mm_adds_epi16(after_mf_01_128[0],conjch30_ch31_128[0]);
 
-    after_mf_10_128[0] =_mm_adds_epi16(conjch01_ch00_128[0], conjch11_ch10_128[0]);//01_00 + 11_10
-    if (conjch21_ch20 != NULL) after_mf_10_128[0] =_mm_adds_epi16(after_mf_10_128[0],conjch21_ch20_128[0]);
-    if (conjch31_ch30 != NULL) after_mf_10_128[0] =_mm_adds_epi16(after_mf_10_128[0],conjch31_ch30_128[0]);
+    after_mf_10_128[0] =simde_mm_adds_epi16(conjch01_ch00_128[0], conjch11_ch10_128[0]);//01_00 + 11_10
+    if (conjch21_ch20 != NULL) after_mf_10_128[0] =simde_mm_adds_epi16(after_mf_10_128[0],conjch21_ch20_128[0]);
+    if (conjch31_ch30 != NULL) after_mf_10_128[0] =simde_mm_adds_epi16(after_mf_10_128[0],conjch31_ch30_128[0]);
 
 #ifdef DEBUG_DLSCH_DEMOD
     if ((rb<=30))
@@ -1491,8 +1186,8 @@ void nr_ulsch_construct_HhH_elements(int *conjch00_ch00,
     after_mf_10_128 += 1;
     after_mf_11_128 += 1;
   }
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 /*
@@ -1733,9 +1428,9 @@ uint8_t nr_ulsch_mmse_2layers(NR_DL_FRAME_PARMS *frame_parms,
 
   // Add noise_var such that: H^h * H + noise_var * I
   if (noise_var != 0) {
-    __m128i nvar_128i = simde_mm_set1_epi32(noise_var);
-    __m128i *af_mf_00_128i = (__m128i *)af_mf_00;
-    __m128i *af_mf_11_128i = (__m128i *)af_mf_11;
+    simde__m128i nvar_128i = simde_mm_set1_epi32(noise_var);
+    simde__m128i *af_mf_00_128i = (simde__m128i *)af_mf_00;
+    simde__m128i *af_mf_11_128i = (simde__m128i *)af_mf_11;
     for (int k = 0; k < 3 * nb_rb_0; k++) {
       af_mf_00_128i[0] = simde_mm_add_epi32(af_mf_00_128i[0], nvar_128i);
       af_mf_11_128i[0] = simde_mm_add_epi32(af_mf_11_128i[0], nvar_128i);
@@ -1766,41 +1461,42 @@ uint8_t nr_ulsch_mmse_2layers(NR_DL_FRAME_PARMS *frame_parms,
      *
      *
      **************************************************************************/
-  __m128i *ul_ch_mag128_0 = NULL, *ul_ch_mag128b_0 = NULL, *ul_ch_mag128c_0 = NULL; // Layer 0
-  __m128i *ul_ch_mag128_1 = NULL, *ul_ch_mag128b_1 = NULL, *ul_ch_mag128c_1 = NULL; // Layer 1
-  __m128i mmtmpD0, mmtmpD1, mmtmpD2, mmtmpD3;
-  __m128i QAM_amp128 = {0}, QAM_amp128b = {0}, QAM_amp128c = {0};
+  simde__m128i *ul_ch_mag128_0 = NULL, *ul_ch_mag128b_0 = NULL, *ul_ch_mag128c_0 = NULL; // Layer 0
+  simde__m128i *ul_ch_mag128_1 = NULL, *ul_ch_mag128b_1 = NULL, *ul_ch_mag128c_1 = NULL; // Layer 1
+  simde__m128i mmtmpD0, mmtmpD1, mmtmpD2, mmtmpD3;
+  simde__m128i QAM_amp128 = {0}, QAM_amp128b = {0}, QAM_amp128c = {0};
+
+  simde__m128i *determ_fin_128 = (simde__m128i *)&determ_fin[0];
 
-  __m128i *determ_fin_128 = (__m128i *)&determ_fin[0];
+  simde__m128i *rxdataF_comp128_0 = (simde__m128i *)&rxdataF_comp[0][symbol * (off + nb_rb * 12)]; // aatx=0 @ aarx =0
+  simde__m128i *rxdataF_comp128_1 = (simde__m128i *)&rxdataF_comp[n_rx][symbol * (off + nb_rb * 12)]; // aatx=1 @ aarx =0
 
-  __m128i *rxdataF_comp128_0 = (__m128i *)&rxdataF_comp[0][symbol * (off + nb_rb * 12)]; // aatx=0 @ aarx =0
-  __m128i *rxdataF_comp128_1 = (__m128i *)&rxdataF_comp[n_rx][symbol * (off + nb_rb * 12)]; // aatx=1 @ aarx =0
+  simde__m128i *after_mf_a_128 = (simde__m128i *)af_mf_00;
+  simde__m128i *after_mf_b_128 = (simde__m128i *)af_mf_01;
+  simde__m128i *after_mf_c_128 = (simde__m128i *)af_mf_10;
+  simde__m128i *after_mf_d_128 = (simde__m128i *)af_mf_11;
 
-  __m128i *after_mf_a_128 = (__m128i *)af_mf_00;
-  __m128i *after_mf_b_128 = (__m128i *)af_mf_01;
-  __m128i *after_mf_c_128 = (__m128i *)af_mf_10;
-  __m128i *after_mf_d_128 = (__m128i *)af_mf_11;
 
   if (mod_order > 2) {
     if (mod_order == 4) {
-      QAM_amp128 = _mm_set1_epi16(QAM16_n1); // 2/sqrt(10)
-      QAM_amp128b = _mm_setzero_si128();
-      QAM_amp128c = _mm_setzero_si128();
+      QAM_amp128 = simde_mm_set1_epi16(QAM16_n1); // 2/sqrt(10)
+      QAM_amp128b = simde_mm_setzero_si128();
+      QAM_amp128c = simde_mm_setzero_si128();
     } else if (mod_order == 6) {
-      QAM_amp128 = _mm_set1_epi16(QAM64_n1); // 4/sqrt{42}
-      QAM_amp128b = _mm_set1_epi16(QAM64_n2); // 2/sqrt{42}
-      QAM_amp128c = _mm_setzero_si128();
+      QAM_amp128 = simde_mm_set1_epi16(QAM64_n1); // 4/sqrt{42}
+      QAM_amp128b = simde_mm_set1_epi16(QAM64_n2); // 2/sqrt{42}
+      QAM_amp128c = simde_mm_setzero_si128();
     } else if (mod_order == 8) {
-      QAM_amp128 = _mm_set1_epi16(QAM256_n1);
-      QAM_amp128b = _mm_set1_epi16(QAM256_n2);
-      QAM_amp128c = _mm_set1_epi16(QAM256_n3);
+      QAM_amp128 = simde_mm_set1_epi16(QAM256_n1);
+      QAM_amp128b = simde_mm_set1_epi16(QAM256_n2);
+      QAM_amp128c = simde_mm_set1_epi16(QAM256_n3);
     }
-    ul_ch_mag128_0 = (__m128i *)&ul_ch_mag[0][symbol * (off + nb_rb * 12)];
-    ul_ch_mag128b_0 = (__m128i *)&ul_ch_magb[0][symbol * (off + nb_rb * 12)];
-    ul_ch_mag128c_0 = (__m128i *)&ul_ch_magc[0][symbol * (off + nb_rb * 12)];
-    ul_ch_mag128_1 = (__m128i *)&ul_ch_mag[frame_parms->nb_antennas_rx][symbol * (off + nb_rb * 12)];
-    ul_ch_mag128b_1 = (__m128i *)&ul_ch_magb[frame_parms->nb_antennas_rx][symbol * (off + nb_rb * 12)];
-    ul_ch_mag128c_1 = (__m128i *)&ul_ch_magc[frame_parms->nb_antennas_rx][symbol * (off + nb_rb * 12)];
+    ul_ch_mag128_0 = (simde__m128i *)&ul_ch_mag[0][symbol * (off + nb_rb * 12)];
+    ul_ch_mag128b_0 = (simde__m128i *)&ul_ch_magb[0][symbol * (off + nb_rb * 12)];
+    ul_ch_mag128c_0 = (simde__m128i *)&ul_ch_magc[0][symbol * (off + nb_rb * 12)];
+    ul_ch_mag128_1 = (simde__m128i *)&ul_ch_mag[frame_parms->nb_antennas_rx][symbol * (off + nb_rb * 12)];
+    ul_ch_mag128b_1 = (simde__m128i *)&ul_ch_magb[frame_parms->nb_antennas_rx][symbol * (off + nb_rb * 12)];
+    ul_ch_mag128c_1 = (simde__m128i *)&ul_ch_magc[frame_parms->nb_antennas_rx][symbol * (off + nb_rb * 12)];
   }
 
   for (int rb = 0; rb < 3 * nb_rb_0; rb++) {
@@ -1815,35 +1511,35 @@ uint8_t nr_ulsch_mmse_2layers(NR_DL_FRAME_PARMS *frame_parms,
 
       int b = log2_approx(sum_det) - 8;
       if (b > 0) {
-        mmtmpD2 = _mm_srai_epi32(determ_fin_128[0], b);
+        mmtmpD2 = simde_mm_srai_epi32(determ_fin_128[0], b);
       } else {
-        mmtmpD2 = _mm_slli_epi32(determ_fin_128[0], -b);
+        mmtmpD2 = simde_mm_slli_epi32(determ_fin_128[0], -b);
       }
-      mmtmpD3 = _mm_unpacklo_epi32(mmtmpD2, mmtmpD2);
-      mmtmpD2 = _mm_unpackhi_epi32(mmtmpD2, mmtmpD2);
-      mmtmpD2 = _mm_packs_epi32(mmtmpD3, mmtmpD2);
+      mmtmpD3 = simde_mm_unpacklo_epi32(mmtmpD2, mmtmpD2);
+      mmtmpD2 = simde_mm_unpackhi_epi32(mmtmpD2, mmtmpD2);
+      mmtmpD2 = simde_mm_packs_epi32(mmtmpD3, mmtmpD2);
 
       // Layer 0
       ul_ch_mag128_0[0] = mmtmpD2;
       ul_ch_mag128b_0[0] = mmtmpD2;
       ul_ch_mag128c_0[0] = mmtmpD2;
-      ul_ch_mag128_0[0] = _mm_mulhi_epi16(ul_ch_mag128_0[0], QAM_amp128);
-      ul_ch_mag128_0[0] = _mm_slli_epi16(ul_ch_mag128_0[0], 1);
-      ul_ch_mag128b_0[0] = _mm_mulhi_epi16(ul_ch_mag128b_0[0], QAM_amp128b);
-      ul_ch_mag128b_0[0] = _mm_slli_epi16(ul_ch_mag128b_0[0], 1);
-      ul_ch_mag128c_0[0] = _mm_mulhi_epi16(ul_ch_mag128c_0[0], QAM_amp128c);
-      ul_ch_mag128c_0[0] = _mm_slli_epi16(ul_ch_mag128c_0[0], 1);
+      ul_ch_mag128_0[0] = simde_mm_mulhi_epi16(ul_ch_mag128_0[0], QAM_amp128);
+      ul_ch_mag128_0[0] = simde_mm_slli_epi16(ul_ch_mag128_0[0], 1);
+      ul_ch_mag128b_0[0] = simde_mm_mulhi_epi16(ul_ch_mag128b_0[0], QAM_amp128b);
+      ul_ch_mag128b_0[0] = simde_mm_slli_epi16(ul_ch_mag128b_0[0], 1);
+      ul_ch_mag128c_0[0] = simde_mm_mulhi_epi16(ul_ch_mag128c_0[0], QAM_amp128c);
+      ul_ch_mag128c_0[0] = simde_mm_slli_epi16(ul_ch_mag128c_0[0], 1);
 
       // Layer 1
       ul_ch_mag128_1[0] = mmtmpD2;
       ul_ch_mag128b_1[0] = mmtmpD2;
       ul_ch_mag128c_1[0] = mmtmpD2;
-      ul_ch_mag128_1[0] = _mm_mulhi_epi16(ul_ch_mag128_1[0], QAM_amp128);
-      ul_ch_mag128_1[0] = _mm_slli_epi16(ul_ch_mag128_1[0], 1);
-      ul_ch_mag128b_1[0] = _mm_mulhi_epi16(ul_ch_mag128b_1[0], QAM_amp128b);
-      ul_ch_mag128b_1[0] = _mm_slli_epi16(ul_ch_mag128b_1[0], 1);
-      ul_ch_mag128c_1[0] = _mm_mulhi_epi16(ul_ch_mag128c_1[0], QAM_amp128c);
-      ul_ch_mag128c_1[0] = _mm_slli_epi16(ul_ch_mag128c_1[0], 1);
+      ul_ch_mag128_1[0] = simde_mm_mulhi_epi16(ul_ch_mag128_1[0], QAM_amp128);
+      ul_ch_mag128_1[0] = simde_mm_slli_epi16(ul_ch_mag128_1[0], 1);
+      ul_ch_mag128b_1[0] = simde_mm_mulhi_epi16(ul_ch_mag128b_1[0], QAM_amp128b);
+      ul_ch_mag128b_1[0] = simde_mm_slli_epi16(ul_ch_mag128b_1[0], 1);
+      ul_ch_mag128c_1[0] = simde_mm_mulhi_epi16(ul_ch_mag128c_1[0], QAM_amp128c);
+      ul_ch_mag128c_1[0] = simde_mm_slli_epi16(ul_ch_mag128c_1[0], 1);
     }
 
     // multiply by channel Inv
@@ -1885,8 +1581,8 @@ uint8_t nr_ulsch_mmse_2layers(NR_DL_FRAME_PARMS *frame_parms,
     after_mf_c_128 += 1;
     after_mf_d_128 += 1;
   }
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
    return(0);
 }
 
diff --git a/openair1/PHY/NR_TRANSPORT/nr_ulsch_llr_computation.c b/openair1/PHY/NR_TRANSPORT/nr_ulsch_llr_computation.c
index 0578587e4e00cb361e8bf34208a0a68902cbeaff..f4d526960e07cf8df42d46c597ad8924fbb87e07 100644
--- a/openair1/PHY/NR_TRANSPORT/nr_ulsch_llr_computation.c
+++ b/openair1/PHY/NR_TRANSPORT/nr_ulsch_llr_computation.c
@@ -71,41 +71,23 @@ void nr_ulsch_16qam_llr(int32_t *rxdataF_comp,
                         uint8_t  symbol)
 {
 
-#if defined(__x86_64__) || defined(__i386__)
-  __m256i *rxF = (__m256i*)rxdataF_comp;
-  __m256i *ch_mag;
-  __m256i llr256[2];
-  register __m256i xmm0;
+  simde__m256i *rxF = (simde__m256i*)rxdataF_comp;
+  simde__m256i *ch_mag;
+  simde__m256i llr256[2];
+  register simde__m256i xmm0;
   uint32_t *llr32;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *rxF = (int16x8_t*)&rxdataF_comp;
-  int16x8_t *ch_mag;
-  int16x8_t xmm0;
-  int16_t *llr16;
-#endif
-
-
   int i;
 
   int off = ((nb_rb&1) == 1)? 4:0;
 
-#if defined(__x86_64__) || defined(__i386__)
-    llr32 = (uint32_t*)ulsch_llr;
-#elif defined(__arm__) || defined(__aarch64__)
-    llr16 = (int16_t*)ulsch_llr;
-#endif
-
-#if defined(__x86_64__) || defined(__i386__)
-    ch_mag = (__m256i*)&ul_ch_mag[(symbol*(off+(nb_rb*12)))];
-#elif defined(__arm__) || defined(__aarch64__)
-  ch_mag = (int16x8_t*)&ul_ch_mag[(symbol*nb_rb*12)];
-#endif
+  llr32 = (uint32_t*)ulsch_llr;
+
+  ch_mag = (simde__m256i*)&ul_ch_mag[(symbol*(off+(nb_rb*12)))];
   unsigned char len_mod8 = nb_re&7;
   nb_re >>= 3;  // length in quad words (4 REs)
   nb_re += (len_mod8 == 0 ? 0 : 1);
 
   for (i=0; i<nb_re; i++) {
-#if defined(__x86_64__) || defined(__i386)
     xmm0 = simde_mm256_abs_epi16(rxF[i]); // registers of even index in xmm0-> |y_R|, registers of odd index in xmm0-> |y_I|
     xmm0 = simde_mm256_subs_epi16(ch_mag[i],xmm0); // registers of even index in xmm0-> |y_R|-|h|^2, registers of odd index in xmm0-> |y_I|-|h|^2
  
@@ -145,35 +127,11 @@ void nr_ulsch_16qam_llr(int32_t *rxdataF_comp,
     llr32[15] = simde_mm256_extract_epi32(llr256[1],7); // llr32[15] low 16 bits-> |h|-|y_R|^2, high 16 bits-> |h|-|y_I|^2
 
     llr32+=16;
-#elif defined(__arm__) || defined(__aarch64__)
-    xmm0 = vabsq_s16(rxF[i]);
-    xmm0 = vqsubq_s16((*(__m128i*)&ones[0]),xmm0);
-
-    llr16[0]  = vgetq_lane_s16(rxF[i],0);
-    llr16[1]  = vgetq_lane_s16(rxF[i],1);
-    llr16[2]  = vgetq_lane_s16(xmm0,0);
-    llr16[3]  = vgetq_lane_s16(xmm0,1);
-    llr16[4]  = vgetq_lane_s16(rxF[i],2);
-    llr16[5]  = vgetq_lane_s16(rxF[i],3);
-    llr16[6]  = vgetq_lane_s16(xmm0,2);
-    llr16[7]  = vgetq_lane_s16(xmm0,3);
-    llr16[8]  = vgetq_lane_s16(rxF[i],4);
-    llr16[9]  = vgetq_lane_s16(rxF[i],5);
-    llr16[10] = vgetq_lane_s16(xmm0,4);
-    llr16[11] = vgetq_lane_s16(xmm0,5);
-    llr16[12] = vgetq_lane_s16(rxF[i],6);
-    llr16[13] = vgetq_lane_s16(rxF[i],6);
-    llr16[14] = vgetq_lane_s16(xmm0,7);
-    llr16[15] = vgetq_lane_s16(xmm0,7);
-    llr16+=16;
-#endif
 
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 //----------------------------------------------------------------------------------------------
@@ -190,25 +148,14 @@ void nr_ulsch_64qam_llr(int32_t *rxdataF_comp,
 {
   int off = ((nb_rb&1) == 1)? 4:0;
 
-#if defined(__x86_64__) || defined(__i386__)
-  __m256i *rxF = (__m256i*)rxdataF_comp;
-  __m256i *ch_mag,*ch_magb;
-  register __m256i xmm0,xmm1,xmm2;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *rxF = (int16x8_t*)&rxdataF_comp;
-  int16x8_t *ch_mag,*ch_magb; // [hna] This should be uncommented once channel estimation is implemented
-  int16x8_t xmm0,xmm1,xmm2;
-#endif
+  simde__m256i *rxF = (simde__m256i*)rxdataF_comp;
+  simde__m256i *ch_mag,*ch_magb;
+  register simde__m256i xmm0,xmm1,xmm2;
 
   int i;
 
-#if defined(__x86_64__) || defined(__i386__)
-  ch_mag = (__m256i*)&ul_ch_mag[(symbol*(off+(nb_rb*12)))];
-  ch_magb = (__m256i*)&ul_ch_magb[(symbol*(off+(nb_rb*12)))];
-#elif defined(__arm__) || defined(__aarch64__)
-  ch_mag = (int16x8_t*)&ul_ch_mag[(symbol*nb_rb*12)];
-  ch_magb = (int16x8_t*)&ul_ch_magb[(symbol*nb_rb*12)];
-#endif
+  ch_mag = (simde__m256i*)&ul_ch_mag[(symbol*(off+(nb_rb*12)))];
+  ch_magb = (simde__m256i*)&ul_ch_magb[(symbol*(off+(nb_rb*12)))];
 
   int len_mod8 = nb_re&7;
   nb_re    = nb_re>>3;  // length in quad words (4 REs)
@@ -216,36 +163,20 @@ void nr_ulsch_64qam_llr(int32_t *rxdataF_comp,
 
   for (i=0; i<nb_re; i++) {
     xmm0 = rxF[i];
-#if defined(__x86_64__) || defined(__i386__)
     xmm1 = simde_mm256_abs_epi16(xmm0);
     xmm1 = simde_mm256_subs_epi16(ch_mag[i],xmm1);
     xmm2 = simde_mm256_abs_epi16(xmm1);
     xmm2 = simde_mm256_subs_epi16(ch_magb[i],xmm2);
-#elif defined(__arm__) || defined(__aarch64__)
-    xmm1 = vabsq_s16(xmm0);
-    xmm1 = vsubq_s16(ch_mag[i],xmm1);
-    xmm2 = vabsq_s16(xmm1);
-    xmm2 = vsubq_s16(ch_magb[i],xmm2);
-#endif
     
     // ---------------------------------------
     // 1st RE
     // ---------------------------------------
-#if defined(__x86_64__) || defined(__i386__)
     ulsch_llr[0] = simde_mm256_extract_epi16(xmm0,0);
     ulsch_llr[1] = simde_mm256_extract_epi16(xmm0,1);
     ulsch_llr[2] = simde_mm256_extract_epi16(xmm1,0);
     ulsch_llr[3] = simde_mm256_extract_epi16(xmm1,1);
     ulsch_llr[4] = simde_mm256_extract_epi16(xmm2,0);
     ulsch_llr[5] = simde_mm256_extract_epi16(xmm2,1);
-#elif defined(__arm__) || defined(__aarch64__)
-    ulsch_llr[0] = vgetq_lane_s16(xmm0,0);
-    ulsch_llr[1] = vgetq_lane_s16(xmm0,1);
-    ulsch_llr[2] = vgetq_lane_s16(xmm1,0);
-    ulsch_llr[3] = vgetq_lane_s16(xmm1,1);
-    ulsch_llr[4] = vgetq_lane_s16(xmm2,0);
-    ulsch_llr[5] = vgetq_lane_s16(xmm2,1);
-#endif
     // ---------------------------------------
 
     ulsch_llr+=6;
@@ -253,21 +184,12 @@ void nr_ulsch_64qam_llr(int32_t *rxdataF_comp,
     // ---------------------------------------
     // 2nd RE
     // ---------------------------------------
-#if defined(__x86_64__) || defined(__i386__)
     ulsch_llr[0] = simde_mm256_extract_epi16(xmm0,2);
     ulsch_llr[1] = simde_mm256_extract_epi16(xmm0,3);
     ulsch_llr[2] = simde_mm256_extract_epi16(xmm1,2);
     ulsch_llr[3] = simde_mm256_extract_epi16(xmm1,3);
     ulsch_llr[4] = simde_mm256_extract_epi16(xmm2,2);
     ulsch_llr[5] = simde_mm256_extract_epi16(xmm2,3);
-#elif defined(__arm__) || defined(__aarch64__)
-    ulsch_llr[2] = vgetq_lane_s16(xmm0,2);
-    ulsch_llr[3] = vgetq_lane_s16(xmm0,3);
-    ulsch_llr[2] = vgetq_lane_s16(xmm1,2);
-    ulsch_llr[3] = vgetq_lane_s16(xmm1,3);
-    ulsch_llr[4] = vgetq_lane_s16(xmm2,2);
-    ulsch_llr[5] = vgetq_lane_s16(xmm2,3);
-#endif
     // ---------------------------------------
 
     ulsch_llr+=6;
@@ -275,21 +197,12 @@ void nr_ulsch_64qam_llr(int32_t *rxdataF_comp,
     // ---------------------------------------
     // 3rd RE
     // ---------------------------------------
-#if defined(__x86_64__) || defined(__i386__)
     ulsch_llr[0] = simde_mm256_extract_epi16(xmm0,4);
     ulsch_llr[1] = simde_mm256_extract_epi16(xmm0,5);
     ulsch_llr[2] = simde_mm256_extract_epi16(xmm1,4);
     ulsch_llr[3] = simde_mm256_extract_epi16(xmm1,5);
     ulsch_llr[4] = simde_mm256_extract_epi16(xmm2,4);
     ulsch_llr[5] = simde_mm256_extract_epi16(xmm2,5);
-#elif defined(__arm__) || defined(__aarch64__)
-    ulsch_llr[0] = vgetq_lane_s16(xmm0,4);
-    ulsch_llr[1] = vgetq_lane_s16(xmm0,5);
-    ulsch_llr[2] = vgetq_lane_s16(xmm1,4);
-    ulsch_llr[3] = vgetq_lane_s16(xmm1,5);
-    ulsch_llr[4] = vgetq_lane_s16(xmm2,4);
-    ulsch_llr[5] = vgetq_lane_s16(xmm2,5);
-#endif
     // ---------------------------------------
 
     ulsch_llr+=6;
@@ -297,21 +210,12 @@ void nr_ulsch_64qam_llr(int32_t *rxdataF_comp,
     // ---------------------------------------
     // 4th RE
     // ---------------------------------------
-#if defined(__x86_64__) || defined(__i386__)
     ulsch_llr[0] = simde_mm256_extract_epi16(xmm0,6);
     ulsch_llr[1] = simde_mm256_extract_epi16(xmm0,7);
     ulsch_llr[2] = simde_mm256_extract_epi16(xmm1,6);
     ulsch_llr[3] = simde_mm256_extract_epi16(xmm1,7);
     ulsch_llr[4] = simde_mm256_extract_epi16(xmm2,6);
     ulsch_llr[5] = simde_mm256_extract_epi16(xmm2,7);
-#elif defined(__arm__) || defined(__aarch64__)
-    ulsch_llr[0] = vgetq_lane_s16(xmm0,6);
-    ulsch_llr[1] = vgetq_lane_s16(xmm0,7);
-    ulsch_llr[2] = vgetq_lane_s16(xmm1,6);
-    ulsch_llr[3] = vgetq_lane_s16(xmm1,7);
-    ulsch_llr[4] = vgetq_lane_s16(xmm2,6);
-    ulsch_llr[5] = vgetq_lane_s16(xmm2,7);
-#endif
     // ---------------------------------------
 
     ulsch_llr+=6;
@@ -346,10 +250,8 @@ void nr_ulsch_64qam_llr(int32_t *rxdataF_comp,
     ulsch_llr+=24;
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void nr_ulsch_256qam_llr(int32_t *rxdataF_comp,
@@ -525,31 +427,31 @@ void nr_ulsch_compute_llr(int32_t *rxdataF_comp,
  */
 void nr_ulsch_qpsk_qpsk(c16_t *stream0_in, c16_t *stream1_in, c16_t *stream0_out, c16_t *rho01, uint32_t length)
 {
-  __m128i *rho01_128i = (__m128i *)rho01;
-  __m128i *stream0_128i_in = (__m128i *)stream0_in;
-  __m128i *stream1_128i_in = (__m128i *)stream1_in;
-  __m128i *stream0_128i_out = (__m128i *)stream0_out;
-  __m128i ONE_OVER_2_SQRT_2 = _mm_set1_epi16(23170); // round(2 ^ 16 / (2 * sqrt(2)))
+  simde__m128i *rho01_128i = (simde__m128i *)rho01;
+  simde__m128i *stream0_128i_in = (simde__m128i *)stream0_in;
+  simde__m128i *stream1_128i_in = (simde__m128i *)stream1_in;
+  simde__m128i *stream0_128i_out = (simde__m128i *)stream0_out;
+  simde__m128i ONE_OVER_2_SQRT_2 = simde_mm_set1_epi16(23170); // round(2 ^ 16 / (2 * sqrt(2)))
 
   // In each iteration, we take 8 complex symbols
   for (int i = 0; i < length >> 2; i += 2) {
 
     /// Compute real and imaginary parts of MF output for stream 0 (desired stream)
-    __m128i y0r, y0i;
+    simde__m128i y0r, y0i;
     simde_mm128_separate_real_imag_parts(&y0r, &y0i, stream0_128i_in[i], stream0_128i_in[i + 1]);
-    __m128i y0r_over2 = simde_mm_mulhi_epi16(y0r, ONE_OVER_2_SQRT_2);
-    y0r_over2 = _mm_slli_epi16(y0r_over2, 1); // y0r_over2 = Re(y0) / sqrt(2)
-    __m128i y0i_over2 = simde_mm_mulhi_epi16(y0i, ONE_OVER_2_SQRT_2);
-    y0i_over2 = _mm_slli_epi16(y0i_over2, 1); // y0i_over2 = Im(y0) / sqrt(2)
+    simde__m128i y0r_over2 = simde_mm_mulhi_epi16(y0r, ONE_OVER_2_SQRT_2);
+    y0r_over2 = simde_mm_slli_epi16(y0r_over2, 1); // y0r_over2 = Re(y0) / sqrt(2)
+    simde__m128i y0i_over2 = simde_mm_mulhi_epi16(y0i, ONE_OVER_2_SQRT_2);
+    y0i_over2 = simde_mm_slli_epi16(y0i_over2, 1); // y0i_over2 = Im(y0) / sqrt(2)
 
     /// Compute real and imaginary parts of MF output for stream 1 (interference stream)
-    __m128i y1r_over2, y1i_over2;
+    simde__m128i y1r_over2, y1i_over2;
     simde_mm128_separate_real_imag_parts(&y1r_over2, &y1i_over2, stream1_128i_in[i], stream1_128i_in[i + 1]);
     y1r_over2 = simde_mm_srai_epi16(y1r_over2, 1);  // y1r_over2 = Re(y1) / 2
     y1i_over2 = simde_mm_srai_epi16(y1i_over2, 1);  // y1i_over2 = Im(y1) / 2
 
     /// Get real and imaginary parts of rho
-    __m128i rhor, rhoi;
+    simde__m128i rhor, rhoi;
     simde_mm128_separate_real_imag_parts(&rhor, &rhoi, rho01_128i[i], rho01_128i[i + 1]);
 
     /// Compute |psi_r| and |psi_i|
@@ -560,45 +462,45 @@ void nr_ulsch_qpsk_qpsk(c16_t *stream0_in, c16_t *stream1_in, c16_t *stream0_out
     // Put (rho_r + rho_i)/(2*sqrt(2)) in rho_p
     // rhor * xR + rhoi * xI  --> xR = 1/sqrt(2) and xI = 1/sqrt(2)
     // rhor * xI - rhoi * xR  --> xR = -1/sqrt(2) and xI = 1/sqrt(2)
-    __m128i rho_p = simde_mm_adds_epi16(rhor, rhoi);        // rho_p = Re(rho) + Im(rho)
+    simde__m128i rho_p = simde_mm_adds_epi16(rhor, rhoi);        // rho_p = Re(rho) + Im(rho)
     rho_p = simde_mm_mulhi_epi16(rho_p, ONE_OVER_2_SQRT_2); // rho_p = rho_p / (2*sqrt(2))
 
     // Put (rho_r - rho_i)/(2*sqrt(2)) in rho_m
     // rhor * xR + rhoi * xI  --> xR = 1/sqrt(2) and xI = -1/sqrt(2)
     // rhor * xI - rhoi * xR  --> xR = 1/sqrt(2) and xI = 1/sqrt(2)
-    __m128i rho_m = simde_mm_subs_epi16(rhor, rhoi);        // rho_m = Re(rho) - Im(rho)
+    simde__m128i rho_m = simde_mm_subs_epi16(rhor, rhoi);        // rho_m = Re(rho) - Im(rho)
     rho_m = simde_mm_mulhi_epi16(rho_m, ONE_OVER_2_SQRT_2); // rho_m = rho_m / (2*sqrt(2))
 
     // xR = 1/sqrt(2) and xI = 1/sqrt(2)
-    __m128i abs_psi_rpm = simde_mm_subs_epi16(rho_p, y1r_over2);  // psi_rpm = rho_p - y1r/2
+    simde__m128i abs_psi_rpm = simde_mm_subs_epi16(rho_p, y1r_over2);  // psi_rpm = rho_p - y1r/2
     abs_psi_rpm = simde_mm_abs_epi16(abs_psi_rpm);                   // abs_psi_rpm = |psi_rpm|
 
     // xR = 1/sqrt(2) and xI = 1/sqrt(2)
-    __m128i abs_psi_imm = simde_mm_subs_epi16(rho_m, y1i_over2);  // psi_imm = rho_m - y1i/2
+    simde__m128i abs_psi_imm = simde_mm_subs_epi16(rho_m, y1i_over2);  // psi_imm = rho_m - y1i/2
     abs_psi_imm = simde_mm_abs_epi16(abs_psi_imm);                   // abs_psi_imm = |psi_imm|
 
     // xR = 1/sqrt(2) and xI = -1/sqrt(2)
-    __m128i abs_psi_rmm = simde_mm_subs_epi16(rho_m, y1r_over2);  // psi_rmm = rho_m - y1r/2
+    simde__m128i abs_psi_rmm = simde_mm_subs_epi16(rho_m, y1r_over2);  // psi_rmm = rho_m - y1r/2
     abs_psi_rmm = simde_mm_abs_epi16(abs_psi_rmm);                   // abs_psi_rmm = |psi_rmm|
 
     // xR = -1/sqrt(2) and xI = 1/sqrt(2)
-    __m128i abs_psi_ipm = simde_mm_subs_epi16(rho_p, y1i_over2);  // psi_ipm = rho_p - y1i/2
+    simde__m128i abs_psi_ipm = simde_mm_subs_epi16(rho_p, y1i_over2);  // psi_ipm = rho_p - y1i/2
     abs_psi_ipm = simde_mm_abs_epi16(abs_psi_ipm);                   // abs_psi_ipm = |psi_ipm|
 
     // xR = -1/sqrt(2) and xI = -1/sqrt(2)
-    __m128i abs_psi_rpp = simde_mm_adds_epi16(rho_p, y1r_over2);  // psi_rpp = rho_p + y1r/2
+    simde__m128i abs_psi_rpp = simde_mm_adds_epi16(rho_p, y1r_over2);  // psi_rpp = rho_p + y1r/2
     abs_psi_rpp = simde_mm_abs_epi16(abs_psi_rpp);                   // abs_psi_rpp = |psi_rpp|
 
     // xR = -1/sqrt(2) and xI = -1/sqrt(2)
-    __m128i abs_psi_imp = simde_mm_adds_epi16(rho_m, y1i_over2);  // psi_imp = rho_m + y1i/2
+    simde__m128i abs_psi_imp = simde_mm_adds_epi16(rho_m, y1i_over2);  // psi_imp = rho_m + y1i/2
     abs_psi_imp = simde_mm_abs_epi16(abs_psi_imp);                   // abs_psi_imp = |psi_imp|
 
     // xR = -1/sqrt(2) and xI = 1/sqrt(2)
-    __m128i abs_psi_rmp = simde_mm_adds_epi16(rho_m, y1r_over2);  // psi_rmp = rho_m + y1r/2
+    simde__m128i abs_psi_rmp = simde_mm_adds_epi16(rho_m, y1r_over2);  // psi_rmp = rho_m + y1r/2
     abs_psi_rmp = simde_mm_abs_epi16(abs_psi_rmp);                   // abs_psi_rmp = |psi_rmp|
 
     // xR = 1/sqrt(2) and xI = -1/sqrt(2)
-    __m128i abs_psi_ipp = simde_mm_adds_epi16(rho_p, y1i_over2);  // psi_ipm = rho_p + y1i/2
+    simde__m128i abs_psi_ipp = simde_mm_adds_epi16(rho_p, y1i_over2);  // psi_ipm = rho_p + y1i/2
     abs_psi_ipp = simde_mm_abs_epi16(abs_psi_ipp);                   // abs_psi_ipp = |psi_ipm|
 
     /// Compute bit metrics (lambda)
@@ -607,49 +509,49 @@ void nr_ulsch_qpsk_qpsk(c16_t *stream0_in, c16_t *stream1_in, c16_t *stream0_out
 
     // xR = 1/sqrt(2) and xI = 1/sqrt(2)
     // For numerator: bit_met_num_re_p = abs_psi_rpm + abs_psi_imm + y0r/sqrt(2) + y0i/sqrt(2)
-    __m128i bit_met_num_re_p = simde_mm_adds_epi16(abs_psi_rpm, abs_psi_imm);
+    simde__m128i bit_met_num_re_p = simde_mm_adds_epi16(abs_psi_rpm, abs_psi_imm);
     bit_met_num_re_p = simde_mm_adds_epi16(bit_met_num_re_p, y0r_over2);
     bit_met_num_re_p = simde_mm_adds_epi16(bit_met_num_re_p, y0i_over2);
 
     // xR = 1/sqrt(2) and xI = -1/sqrt(2)
     // For numerator: bit_met_num_re_m = abs_psi_rmm + abs_psi_ipp + y0r/sqrt(2) - y0i/sqrt(2)
-    __m128i bit_met_num_re_m = simde_mm_adds_epi16(abs_psi_rmm, abs_psi_ipp);
+    simde__m128i bit_met_num_re_m = simde_mm_adds_epi16(abs_psi_rmm, abs_psi_ipp);
     bit_met_num_re_m = simde_mm_adds_epi16(bit_met_num_re_m, y0r_over2);
     bit_met_num_re_m = simde_mm_subs_epi16(bit_met_num_re_m, y0i_over2);
 
     // xR = -1/sqrt(2) and xI = 1/sqrt(2)
     // For denominator: bit_met_den_re_p = abs_psi_rmp + abs_psi_ipm - y0r/sqrt(2) + y0i/sqrt(2)
-    __m128i bit_met_den_re_p = simde_mm_adds_epi16(abs_psi_rmp, abs_psi_ipm);
+    simde__m128i bit_met_den_re_p = simde_mm_adds_epi16(abs_psi_rmp, abs_psi_ipm);
     bit_met_den_re_p = simde_mm_subs_epi16(bit_met_den_re_p, y0r_over2);
     bit_met_den_re_p = simde_mm_adds_epi16(bit_met_den_re_p, y0i_over2);
 
     // xR = -1/sqrt(2) and xI = -1/sqrt(2)
     // For denominator: bit_met_den_re_m = abs_psi_rpp + abs_psi_imp - y0r/sqrt(2) - y0i/sqrt(2)
-    __m128i bit_met_den_re_m = simde_mm_adds_epi16(abs_psi_rpp, abs_psi_imp);
+    simde__m128i bit_met_den_re_m = simde_mm_adds_epi16(abs_psi_rpp, abs_psi_imp);
     bit_met_den_re_m = simde_mm_subs_epi16(bit_met_den_re_m, y0r_over2);
     bit_met_den_re_m = simde_mm_subs_epi16(bit_met_den_re_m, y0i_over2);
 
     // xR = 1/sqrt(2) and xI = 1/sqrt(2)
     // For numerator: bit_met_num_im_p = abs_psi_rpm + abs_psi_imm + y0r/sqrt(2) + y0i/sqrt(2)
-    __m128i bit_met_num_im_p = simde_mm_adds_epi16(abs_psi_rpm, abs_psi_imm);
+    simde__m128i bit_met_num_im_p = simde_mm_adds_epi16(abs_psi_rpm, abs_psi_imm);
     bit_met_num_im_p = simde_mm_adds_epi16(bit_met_num_im_p, y0r_over2);
     bit_met_num_im_p = simde_mm_adds_epi16(bit_met_num_im_p, y0i_over2);
 
     // xR = -1/sqrt(2) and xI = 1/sqrt(2)
     // For numerator: bit_met_num_im_m = abs_psi_rmp + abs_psi_ipm - y0r/sqrt(2) + y0i/sqrt(2)
-    __m128i bit_met_num_im_m = simde_mm_adds_epi16(abs_psi_rmp, abs_psi_ipm);
+    simde__m128i bit_met_num_im_m = simde_mm_adds_epi16(abs_psi_rmp, abs_psi_ipm);
     bit_met_num_im_m = simde_mm_subs_epi16(bit_met_num_im_m, y0r_over2);
     bit_met_num_im_m = simde_mm_adds_epi16(bit_met_num_im_m, y0i_over2);
 
     // xR = 1/sqrt(2) and xI = -1/sqrt(2)
     // For denominator: bit_met_den_im_p = abs_psi_rmm + abs_psi_ipp + y0r/sqrt(2) - y0i/sqrt(2)
-    __m128i bit_met_den_im_p = simde_mm_adds_epi16(abs_psi_rmm, abs_psi_ipp);
+    simde__m128i bit_met_den_im_p = simde_mm_adds_epi16(abs_psi_rmm, abs_psi_ipp);
     bit_met_den_im_p = simde_mm_adds_epi16(bit_met_den_im_p, y0r_over2);
     bit_met_den_im_p = simde_mm_subs_epi16(bit_met_den_im_p, y0i_over2);
 
     // xR = -1/sqrt(2) and xI = -1/sqrt(2)
     // For denominator: bit_met_den_im_m = abs_psi_rpp + abs_psi_imp - y0r/sqrt(2)- y0i/sqrt(2)
-    __m128i bit_met_den_im_m = simde_mm_adds_epi16(abs_psi_rpp, abs_psi_imp);
+    simde__m128i bit_met_den_im_m = simde_mm_adds_epi16(abs_psi_rpp, abs_psi_imp);
     bit_met_den_im_m = simde_mm_subs_epi16(bit_met_den_im_m, y0r_over2);
     bit_met_den_im_m = simde_mm_subs_epi16(bit_met_den_im_m, y0i_over2);
 
@@ -657,10 +559,10 @@ void nr_ulsch_qpsk_qpsk(c16_t *stream0_in, c16_t *stream1_in, c16_t *stream0_out
 
     // LLR = lambda(c==1) - lambda(c==0)
 
-    __m128i logmax_num_re0 = simde_mm_max_epi16(bit_met_num_re_p, bit_met_num_re_m); // LLR of the first bit: Bit = 1
-    __m128i logmax_den_re0 = simde_mm_max_epi16(bit_met_den_re_p, bit_met_den_re_m); // LLR of the first bit: Bit = 0
-    __m128i logmax_num_im0 = simde_mm_max_epi16(bit_met_num_im_p, bit_met_num_im_m); // LLR of the second bit: Bit = 1
-    __m128i logmax_den_im0 = simde_mm_max_epi16(bit_met_den_im_p, bit_met_den_im_m); // LLR of the second bit: Bit = 0
+    simde__m128i logmax_num_re0 = simde_mm_max_epi16(bit_met_num_re_p, bit_met_num_re_m); // LLR of the first bit: Bit = 1
+    simde__m128i logmax_den_re0 = simde_mm_max_epi16(bit_met_den_re_p, bit_met_den_re_m); // LLR of the first bit: Bit = 0
+    simde__m128i logmax_num_im0 = simde_mm_max_epi16(bit_met_num_im_p, bit_met_num_im_m); // LLR of the second bit: Bit = 1
+    simde__m128i logmax_den_im0 = simde_mm_max_epi16(bit_met_den_im_p, bit_met_den_im_m); // LLR of the second bit: Bit = 0
 
     y0r = simde_mm_subs_epi16(logmax_num_re0, logmax_den_re0);  // LLR of first bit [L1(1), L1(2), L1(3), L1(4)]
     y0i = simde_mm_subs_epi16(logmax_num_im0, logmax_den_im0);  // LLR of second bit [L2(1), L2(2), L2(3), L2(4)]
@@ -674,8 +576,8 @@ void nr_ulsch_qpsk_qpsk(c16_t *stream0_in, c16_t *stream1_in, c16_t *stream0_out
     }
   }
 
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 static const int16_t ones[8] __attribute__((aligned(16))) = {0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff};
@@ -691,7 +593,7 @@ static const int16_t ones[8] __attribute__((aligned(16))) = {0xffff, 0xffff, 0xf
 // Calculate interference magnitude
 #define interference_abs_epi16(psi, int_ch_mag, int_mag, c1, c2)        \
   tmp_result = simde_mm_cmplt_epi16(psi, int_ch_mag);                   \
-  tmp_result2 = simde_mm_xor_si128(tmp_result, (*(__m128i *)&ones[0])); \
+  tmp_result2 = simde_mm_xor_si128(tmp_result, (*(simde__m128i *)&ones[0])); \
   tmp_result = simde_mm_and_si128(tmp_result, c1);                      \
   tmp_result2 = simde_mm_and_si128(tmp_result2, c2);                    \
   int_mag = simde_mm_or_si128(tmp_result, tmp_result2);
@@ -712,14 +614,14 @@ static const int16_t ones[8] __attribute__((aligned(16))) = {0xffff, 0xffff, 0xf
   tmp_result2 = simde_mm_slli_epi16(tmp_result2, 1);             \
   a_sq = simde_mm_adds_epi16(tmp_result, tmp_result2);
 
-__m128i max_epi16(__m128i m0, __m128i m1, __m128i m2, __m128i m3, __m128i m4, __m128i m5, __m128i m6, __m128i m7)
+simde__m128i max_epi16(simde__m128i m0, simde__m128i m1, simde__m128i m2, simde__m128i m3, simde__m128i m4, simde__m128i m5, simde__m128i m6, simde__m128i m7)
 {
-  __m128i a0 = simde_mm_max_epi16(m0, m1);
-  __m128i a1 = simde_mm_max_epi16(m2, m3);
-  __m128i a2 = simde_mm_max_epi16(m4, m5);
-  __m128i a3 = simde_mm_max_epi16(m6, m7);
-  __m128i b0 = simde_mm_max_epi16(a0, a1);
-  __m128i b1 = simde_mm_max_epi16(a2, a3);
+  simde__m128i a0 = simde_mm_max_epi16(m0, m1);
+  simde__m128i a1 = simde_mm_max_epi16(m2, m3);
+  simde__m128i a2 = simde_mm_max_epi16(m4, m5);
+  simde__m128i a3 = simde_mm_max_epi16(m6, m7);
+  simde__m128i b0 = simde_mm_max_epi16(a0, a1);
+  simde__m128i b1 = simde_mm_max_epi16(a2, a3);
   return simde_mm_max_epi16(b0, b1);
 }
 
@@ -746,166 +648,166 @@ void nr_ulsch_qam16_qam16(c16_t *stream0_in,
                           c16_t *rho01,
                           uint32_t length)
 {
-  __m128i *rho01_128i = (__m128i *)rho01;
-  __m128i *stream0_128i_in = (__m128i *)stream0_in;
-  __m128i *stream1_128i_in = (__m128i *)stream1_in;
-  __m128i *stream0_128i_out = (__m128i *)stream0_out;
-  __m128i *ch_mag_128i = (__m128i *)ch_mag;
-  __m128i *ch_mag_128i_i = (__m128i *)ch_mag_i;
-
-  __m128i ONE_OVER_SQRT_10 = simde_mm_set1_epi16(20724); // round(1/sqrt(10)*2^16)
-  __m128i ONE_OVER_SQRT_10_Q15 = simde_mm_set1_epi16(10362); // round(1/sqrt(10)*2^15)
-  __m128i THREE_OVER_SQRT_10 = simde_mm_set1_epi16(31086); // round(3/sqrt(10)*2^15)
-  __m128i SQRT_10_OVER_FOUR = simde_mm_set1_epi16(25905); // round(sqrt(10)/4*2^15)
-  __m128i ONE_OVER_TWO_SQRT_10 = simde_mm_set1_epi16(10362); // round(1/2/sqrt(10)*2^16)
-  __m128i NINE_OVER_TWO_SQRT_10 = simde_mm_set1_epi16(23315); // round(9/2/sqrt(10)*2^14)
-  __m128i ch_mag_des, ch_mag_int;
-  __m128i y0r_over_sqrt10;
-  __m128i y0i_over_sqrt10;
-  __m128i y0r_three_over_sqrt10;
-  __m128i y0i_three_over_sqrt10;
-  __m128i ch_mag_over_10;
-  __m128i ch_mag_over_2;
-  __m128i ch_mag_9_over_10;
-
-  __m128i xmm0 __attribute__((aligned(16)));
-  __m128i xmm1 __attribute__((aligned(16)));
-  __m128i xmm2 __attribute__((aligned(16)));
-  __m128i xmm3 __attribute__((aligned(16)));
-  __m128i xmm4 __attribute__((aligned(16)));
-  __m128i xmm5 __attribute__((aligned(16)));
-  __m128i xmm6 __attribute__((aligned(16)));
-  __m128i xmm7 __attribute__((aligned(16)));
-
-  __m128i rho_rpi __attribute__((aligned(16)));
-  __m128i rho_rmi __attribute__((aligned(16)));
-  __m128i rho_rpi_1_1 __attribute__((aligned(16)));
-  __m128i rho_rpi_1_3 __attribute__((aligned(16)));
-  __m128i rho_rpi_3_1 __attribute__((aligned(16)));
-  __m128i rho_rpi_3_3 __attribute__((aligned(16)));
-  __m128i rho_rmi_1_1 __attribute__((aligned(16)));
-  __m128i rho_rmi_1_3 __attribute__((aligned(16)));
-  __m128i rho_rmi_3_1 __attribute__((aligned(16)));
-  __m128i rho_rmi_3_3 __attribute__((aligned(16)));
-
-  __m128i psi_r_m3_m3 __attribute__((aligned(16)));
-  __m128i psi_r_m3_m1 __attribute__((aligned(16)));
-  __m128i psi_r_m3_p1 __attribute__((aligned(16)));
-  __m128i psi_r_m3_p3 __attribute__((aligned(16)));
-  __m128i psi_r_m1_m3 __attribute__((aligned(16)));
-  __m128i psi_r_m1_m1 __attribute__((aligned(16)));
-  __m128i psi_r_m1_p1 __attribute__((aligned(16)));
-  __m128i psi_r_m1_p3 __attribute__((aligned(16)));
-  __m128i psi_r_p1_m3 __attribute__((aligned(16)));
-  __m128i psi_r_p1_m1 __attribute__((aligned(16)));
-  __m128i psi_r_p1_p1 __attribute__((aligned(16)));
-  __m128i psi_r_p1_p3 __attribute__((aligned(16)));
-  __m128i psi_r_p3_m3 __attribute__((aligned(16)));
-  __m128i psi_r_p3_m1 __attribute__((aligned(16)));
-  __m128i psi_r_p3_p1 __attribute__((aligned(16)));
-  __m128i psi_r_p3_p3 __attribute__((aligned(16)));
-
-  __m128i psi_i_m3_m3 __attribute__((aligned(16)));
-  __m128i psi_i_m3_m1 __attribute__((aligned(16)));
-  __m128i psi_i_m3_p1 __attribute__((aligned(16)));
-  __m128i psi_i_m3_p3 __attribute__((aligned(16)));
-  __m128i psi_i_m1_m3 __attribute__((aligned(16)));
-  __m128i psi_i_m1_m1 __attribute__((aligned(16)));
-  __m128i psi_i_m1_p1 __attribute__((aligned(16)));
-  __m128i psi_i_m1_p3 __attribute__((aligned(16)));
-  __m128i psi_i_p1_m3 __attribute__((aligned(16)));
-  __m128i psi_i_p1_m1 __attribute__((aligned(16)));
-  __m128i psi_i_p1_p1 __attribute__((aligned(16)));
-  __m128i psi_i_p1_p3 __attribute__((aligned(16)));
-  __m128i psi_i_p3_m3 __attribute__((aligned(16)));
-  __m128i psi_i_p3_m1 __attribute__((aligned(16)));
-  __m128i psi_i_p3_p1 __attribute__((aligned(16)));
-  __m128i psi_i_p3_p3 __attribute__((aligned(16)));
-
-  __m128i a_r_m3_m3 __attribute__((aligned(16)));
-  __m128i a_r_m3_m1 __attribute__((aligned(16)));
-  __m128i a_r_m3_p1 __attribute__((aligned(16)));
-  __m128i a_r_m3_p3 __attribute__((aligned(16)));
-  __m128i a_r_m1_m3 __attribute__((aligned(16)));
-  __m128i a_r_m1_m1 __attribute__((aligned(16)));
-  __m128i a_r_m1_p1 __attribute__((aligned(16)));
-  __m128i a_r_m1_p3 __attribute__((aligned(16)));
-  __m128i a_r_p1_m3 __attribute__((aligned(16)));
-  __m128i a_r_p1_m1 __attribute__((aligned(16)));
-  __m128i a_r_p1_p1 __attribute__((aligned(16)));
-  __m128i a_r_p1_p3 __attribute__((aligned(16)));
-  __m128i a_r_p3_m3 __attribute__((aligned(16)));
-  __m128i a_r_p3_m1 __attribute__((aligned(16)));
-  __m128i a_r_p3_p1 __attribute__((aligned(16)));
-  __m128i a_r_p3_p3 __attribute__((aligned(16)));
-
-  __m128i a_i_m3_m3 __attribute__((aligned(16)));
-  __m128i a_i_m3_m1 __attribute__((aligned(16)));
-  __m128i a_i_m3_p1 __attribute__((aligned(16)));
-  __m128i a_i_m3_p3 __attribute__((aligned(16)));
-  __m128i a_i_m1_m3 __attribute__((aligned(16)));
-  __m128i a_i_m1_m1 __attribute__((aligned(16)));
-  __m128i a_i_m1_p1 __attribute__((aligned(16)));
-  __m128i a_i_m1_p3 __attribute__((aligned(16)));
-  __m128i a_i_p1_m3 __attribute__((aligned(16)));
-  __m128i a_i_p1_m1 __attribute__((aligned(16)));
-  __m128i a_i_p1_p1 __attribute__((aligned(16)));
-  __m128i a_i_p1_p3 __attribute__((aligned(16)));
-  __m128i a_i_p3_m3 __attribute__((aligned(16)));
-  __m128i a_i_p3_m1 __attribute__((aligned(16)));
-  __m128i a_i_p3_p1 __attribute__((aligned(16)));
-  __m128i a_i_p3_p3 __attribute__((aligned(16)));
-
-  __m128i psi_a_m3_m3 __attribute__((aligned(16)));
-  __m128i psi_a_m3_m1 __attribute__((aligned(16)));
-  __m128i psi_a_m3_p1 __attribute__((aligned(16)));
-  __m128i psi_a_m3_p3 __attribute__((aligned(16)));
-  __m128i psi_a_m1_m3 __attribute__((aligned(16)));
-  __m128i psi_a_m1_m1 __attribute__((aligned(16)));
-  __m128i psi_a_m1_p1 __attribute__((aligned(16)));
-  __m128i psi_a_m1_p3 __attribute__((aligned(16)));
-  __m128i psi_a_p1_m3 __attribute__((aligned(16)));
-  __m128i psi_a_p1_m1 __attribute__((aligned(16)));
-  __m128i psi_a_p1_p1 __attribute__((aligned(16)));
-  __m128i psi_a_p1_p3 __attribute__((aligned(16)));
-  __m128i psi_a_p3_m3 __attribute__((aligned(16)));
-  __m128i psi_a_p3_m1 __attribute__((aligned(16)));
-  __m128i psi_a_p3_p1 __attribute__((aligned(16)));
-  __m128i psi_a_p3_p3 __attribute__((aligned(16)));
-
-  __m128i a_sq_m3_m3 __attribute__((aligned(16)));
-  __m128i a_sq_m3_m1 __attribute__((aligned(16)));
-  __m128i a_sq_m3_p1 __attribute__((aligned(16)));
-  __m128i a_sq_m3_p3 __attribute__((aligned(16)));
-  __m128i a_sq_m1_m3 __attribute__((aligned(16)));
-  __m128i a_sq_m1_m1 __attribute__((aligned(16)));
-  __m128i a_sq_m1_p1 __attribute__((aligned(16)));
-  __m128i a_sq_m1_p3 __attribute__((aligned(16)));
-  __m128i a_sq_p1_m3 __attribute__((aligned(16)));
-  __m128i a_sq_p1_m1 __attribute__((aligned(16)));
-  __m128i a_sq_p1_p1 __attribute__((aligned(16)));
-  __m128i a_sq_p1_p3 __attribute__((aligned(16)));
-  __m128i a_sq_p3_m3 __attribute__((aligned(16)));
-  __m128i a_sq_p3_m1 __attribute__((aligned(16)));
-  __m128i a_sq_p3_p1 __attribute__((aligned(16)));
-  __m128i a_sq_p3_p3 __attribute__((aligned(16)));
-
-  __m128i y0_p_1_1 __attribute__((aligned(16)));
-  __m128i y0_p_1_3 __attribute__((aligned(16)));
-  __m128i y0_p_3_1 __attribute__((aligned(16)));
-  __m128i y0_p_3_3 __attribute__((aligned(16)));
-  __m128i y0_m_1_1 __attribute__((aligned(16)));
-  __m128i y0_m_1_3 __attribute__((aligned(16)));
-  __m128i y0_m_3_1 __attribute__((aligned(16)));
-  __m128i y0_m_3_3 __attribute__((aligned(16)));
-
-  __m128i y0r __attribute__((aligned(16)));
-  __m128i y0i __attribute__((aligned(16)));
-  __m128i y1r __attribute__((aligned(16)));
-  __m128i y1i __attribute__((aligned(16)));
-
-  __m128i tmp_result __attribute__((aligned(16)));
-  __m128i tmp_result2 __attribute__((aligned(16)));
+  simde__m128i *rho01_128i = (simde__m128i *)rho01;
+  simde__m128i *stream0_128i_in = (simde__m128i *)stream0_in;
+  simde__m128i *stream1_128i_in = (simde__m128i *)stream1_in;
+  simde__m128i *stream0_128i_out = (simde__m128i *)stream0_out;
+  simde__m128i *ch_mag_128i = (simde__m128i *)ch_mag;
+  simde__m128i *ch_mag_128i_i = (simde__m128i *)ch_mag_i;
+
+  simde__m128i ONE_OVER_SQRT_10 = simde_mm_set1_epi16(20724); // round(1/sqrt(10)*2^16)
+  simde__m128i ONE_OVER_SQRT_10_Q15 = simde_mm_set1_epi16(10362); // round(1/sqrt(10)*2^15)
+  simde__m128i THREE_OVER_SQRT_10 = simde_mm_set1_epi16(31086); // round(3/sqrt(10)*2^15)
+  simde__m128i SQRT_10_OVER_FOUR = simde_mm_set1_epi16(25905); // round(sqrt(10)/4*2^15)
+  simde__m128i ONE_OVER_TWO_SQRT_10 = simde_mm_set1_epi16(10362); // round(1/2/sqrt(10)*2^16)
+  simde__m128i NINE_OVER_TWO_SQRT_10 = simde_mm_set1_epi16(23315); // round(9/2/sqrt(10)*2^14)
+  simde__m128i ch_mag_des, ch_mag_int;
+  simde__m128i y0r_over_sqrt10;
+  simde__m128i y0i_over_sqrt10;
+  simde__m128i y0r_three_over_sqrt10;
+  simde__m128i y0i_three_over_sqrt10;
+  simde__m128i ch_mag_over_10;
+  simde__m128i ch_mag_over_2;
+  simde__m128i ch_mag_9_over_10;
+
+  simde__m128i xmm0 __attribute__((aligned(16)));
+  simde__m128i xmm1 __attribute__((aligned(16)));
+  simde__m128i xmm2 __attribute__((aligned(16)));
+  simde__m128i xmm3 __attribute__((aligned(16)));
+  simde__m128i xmm4 __attribute__((aligned(16)));
+  simde__m128i xmm5 __attribute__((aligned(16)));
+  simde__m128i xmm6 __attribute__((aligned(16)));
+  simde__m128i xmm7 __attribute__((aligned(16)));
+
+  simde__m128i rho_rpi __attribute__((aligned(16)));
+  simde__m128i rho_rmi __attribute__((aligned(16)));
+  simde__m128i rho_rpi_1_1 __attribute__((aligned(16)));
+  simde__m128i rho_rpi_1_3 __attribute__((aligned(16)));
+  simde__m128i rho_rpi_3_1 __attribute__((aligned(16)));
+  simde__m128i rho_rpi_3_3 __attribute__((aligned(16)));
+  simde__m128i rho_rmi_1_1 __attribute__((aligned(16)));
+  simde__m128i rho_rmi_1_3 __attribute__((aligned(16)));
+  simde__m128i rho_rmi_3_1 __attribute__((aligned(16)));
+  simde__m128i rho_rmi_3_3 __attribute__((aligned(16)));
+
+  simde__m128i psi_r_m3_m3 __attribute__((aligned(16)));
+  simde__m128i psi_r_m3_m1 __attribute__((aligned(16)));
+  simde__m128i psi_r_m3_p1 __attribute__((aligned(16)));
+  simde__m128i psi_r_m3_p3 __attribute__((aligned(16)));
+  simde__m128i psi_r_m1_m3 __attribute__((aligned(16)));
+  simde__m128i psi_r_m1_m1 __attribute__((aligned(16)));
+  simde__m128i psi_r_m1_p1 __attribute__((aligned(16)));
+  simde__m128i psi_r_m1_p3 __attribute__((aligned(16)));
+  simde__m128i psi_r_p1_m3 __attribute__((aligned(16)));
+  simde__m128i psi_r_p1_m1 __attribute__((aligned(16)));
+  simde__m128i psi_r_p1_p1 __attribute__((aligned(16)));
+  simde__m128i psi_r_p1_p3 __attribute__((aligned(16)));
+  simde__m128i psi_r_p3_m3 __attribute__((aligned(16)));
+  simde__m128i psi_r_p3_m1 __attribute__((aligned(16)));
+  simde__m128i psi_r_p3_p1 __attribute__((aligned(16)));
+  simde__m128i psi_r_p3_p3 __attribute__((aligned(16)));
+
+  simde__m128i psi_i_m3_m3 __attribute__((aligned(16)));
+  simde__m128i psi_i_m3_m1 __attribute__((aligned(16)));
+  simde__m128i psi_i_m3_p1 __attribute__((aligned(16)));
+  simde__m128i psi_i_m3_p3 __attribute__((aligned(16)));
+  simde__m128i psi_i_m1_m3 __attribute__((aligned(16)));
+  simde__m128i psi_i_m1_m1 __attribute__((aligned(16)));
+  simde__m128i psi_i_m1_p1 __attribute__((aligned(16)));
+  simde__m128i psi_i_m1_p3 __attribute__((aligned(16)));
+  simde__m128i psi_i_p1_m3 __attribute__((aligned(16)));
+  simde__m128i psi_i_p1_m1 __attribute__((aligned(16)));
+  simde__m128i psi_i_p1_p1 __attribute__((aligned(16)));
+  simde__m128i psi_i_p1_p3 __attribute__((aligned(16)));
+  simde__m128i psi_i_p3_m3 __attribute__((aligned(16)));
+  simde__m128i psi_i_p3_m1 __attribute__((aligned(16)));
+  simde__m128i psi_i_p3_p1 __attribute__((aligned(16)));
+  simde__m128i psi_i_p3_p3 __attribute__((aligned(16)));
+
+  simde__m128i a_r_m3_m3 __attribute__((aligned(16)));
+  simde__m128i a_r_m3_m1 __attribute__((aligned(16)));
+  simde__m128i a_r_m3_p1 __attribute__((aligned(16)));
+  simde__m128i a_r_m3_p3 __attribute__((aligned(16)));
+  simde__m128i a_r_m1_m3 __attribute__((aligned(16)));
+  simde__m128i a_r_m1_m1 __attribute__((aligned(16)));
+  simde__m128i a_r_m1_p1 __attribute__((aligned(16)));
+  simde__m128i a_r_m1_p3 __attribute__((aligned(16)));
+  simde__m128i a_r_p1_m3 __attribute__((aligned(16)));
+  simde__m128i a_r_p1_m1 __attribute__((aligned(16)));
+  simde__m128i a_r_p1_p1 __attribute__((aligned(16)));
+  simde__m128i a_r_p1_p3 __attribute__((aligned(16)));
+  simde__m128i a_r_p3_m3 __attribute__((aligned(16)));
+  simde__m128i a_r_p3_m1 __attribute__((aligned(16)));
+  simde__m128i a_r_p3_p1 __attribute__((aligned(16)));
+  simde__m128i a_r_p3_p3 __attribute__((aligned(16)));
+
+  simde__m128i a_i_m3_m3 __attribute__((aligned(16)));
+  simde__m128i a_i_m3_m1 __attribute__((aligned(16)));
+  simde__m128i a_i_m3_p1 __attribute__((aligned(16)));
+  simde__m128i a_i_m3_p3 __attribute__((aligned(16)));
+  simde__m128i a_i_m1_m3 __attribute__((aligned(16)));
+  simde__m128i a_i_m1_m1 __attribute__((aligned(16)));
+  simde__m128i a_i_m1_p1 __attribute__((aligned(16)));
+  simde__m128i a_i_m1_p3 __attribute__((aligned(16)));
+  simde__m128i a_i_p1_m3 __attribute__((aligned(16)));
+  simde__m128i a_i_p1_m1 __attribute__((aligned(16)));
+  simde__m128i a_i_p1_p1 __attribute__((aligned(16)));
+  simde__m128i a_i_p1_p3 __attribute__((aligned(16)));
+  simde__m128i a_i_p3_m3 __attribute__((aligned(16)));
+  simde__m128i a_i_p3_m1 __attribute__((aligned(16)));
+  simde__m128i a_i_p3_p1 __attribute__((aligned(16)));
+  simde__m128i a_i_p3_p3 __attribute__((aligned(16)));
+
+  simde__m128i psi_a_m3_m3 __attribute__((aligned(16)));
+  simde__m128i psi_a_m3_m1 __attribute__((aligned(16)));
+  simde__m128i psi_a_m3_p1 __attribute__((aligned(16)));
+  simde__m128i psi_a_m3_p3 __attribute__((aligned(16)));
+  simde__m128i psi_a_m1_m3 __attribute__((aligned(16)));
+  simde__m128i psi_a_m1_m1 __attribute__((aligned(16)));
+  simde__m128i psi_a_m1_p1 __attribute__((aligned(16)));
+  simde__m128i psi_a_m1_p3 __attribute__((aligned(16)));
+  simde__m128i psi_a_p1_m3 __attribute__((aligned(16)));
+  simde__m128i psi_a_p1_m1 __attribute__((aligned(16)));
+  simde__m128i psi_a_p1_p1 __attribute__((aligned(16)));
+  simde__m128i psi_a_p1_p3 __attribute__((aligned(16)));
+  simde__m128i psi_a_p3_m3 __attribute__((aligned(16)));
+  simde__m128i psi_a_p3_m1 __attribute__((aligned(16)));
+  simde__m128i psi_a_p3_p1 __attribute__((aligned(16)));
+  simde__m128i psi_a_p3_p3 __attribute__((aligned(16)));
+
+  simde__m128i a_sq_m3_m3 __attribute__((aligned(16)));
+  simde__m128i a_sq_m3_m1 __attribute__((aligned(16)));
+  simde__m128i a_sq_m3_p1 __attribute__((aligned(16)));
+  simde__m128i a_sq_m3_p3 __attribute__((aligned(16)));
+  simde__m128i a_sq_m1_m3 __attribute__((aligned(16)));
+  simde__m128i a_sq_m1_m1 __attribute__((aligned(16)));
+  simde__m128i a_sq_m1_p1 __attribute__((aligned(16)));
+  simde__m128i a_sq_m1_p3 __attribute__((aligned(16)));
+  simde__m128i a_sq_p1_m3 __attribute__((aligned(16)));
+  simde__m128i a_sq_p1_m1 __attribute__((aligned(16)));
+  simde__m128i a_sq_p1_p1 __attribute__((aligned(16)));
+  simde__m128i a_sq_p1_p3 __attribute__((aligned(16)));
+  simde__m128i a_sq_p3_m3 __attribute__((aligned(16)));
+  simde__m128i a_sq_p3_m1 __attribute__((aligned(16)));
+  simde__m128i a_sq_p3_p1 __attribute__((aligned(16)));
+  simde__m128i a_sq_p3_p3 __attribute__((aligned(16)));
+
+  simde__m128i y0_p_1_1 __attribute__((aligned(16)));
+  simde__m128i y0_p_1_3 __attribute__((aligned(16)));
+  simde__m128i y0_p_3_1 __attribute__((aligned(16)));
+  simde__m128i y0_p_3_3 __attribute__((aligned(16)));
+  simde__m128i y0_m_1_1 __attribute__((aligned(16)));
+  simde__m128i y0_m_1_3 __attribute__((aligned(16)));
+  simde__m128i y0_m_3_1 __attribute__((aligned(16)));
+  simde__m128i y0_m_3_3 __attribute__((aligned(16)));
+
+  simde__m128i y0r __attribute__((aligned(16)));
+  simde__m128i y0i __attribute__((aligned(16)));
+  simde__m128i y1r __attribute__((aligned(16)));
+  simde__m128i y1i __attribute__((aligned(16)));
+
+  simde__m128i tmp_result __attribute__((aligned(16)));
+  simde__m128i tmp_result2 __attribute__((aligned(16)));
 
   // In one iteration, we deal with 8 REs
   for (int i = 0; i < length >> 2; i += 2) {
@@ -1117,67 +1019,67 @@ void nr_ulsch_qam16_qam16(c16_t *stream0_in,
 
     /// Compute bit metrics (lambda)
 
-    __m128i bit_met_p1_p1 = simde_mm_subs_epi16(psi_a_p1_p1, a_sq_p1_p1);
+    simde__m128i bit_met_p1_p1 = simde_mm_subs_epi16(psi_a_p1_p1, a_sq_p1_p1);
     bit_met_p1_p1 = simde_mm_adds_epi16(bit_met_p1_p1, y0_p_1_1);
     bit_met_p1_p1 = simde_mm_subs_epi16(bit_met_p1_p1, ch_mag_over_10);
 
-    __m128i bit_met_p1_p3 = simde_mm_subs_epi16(psi_a_p1_p3, a_sq_p1_p3);
+    simde__m128i bit_met_p1_p3 = simde_mm_subs_epi16(psi_a_p1_p3, a_sq_p1_p3);
     bit_met_p1_p3 = simde_mm_adds_epi16(bit_met_p1_p3, y0_p_1_3);
     bit_met_p1_p3 = simde_mm_subs_epi16(bit_met_p1_p3, ch_mag_over_2);
 
-    __m128i bit_met_p1_m1 = simde_mm_subs_epi16(psi_a_p1_m1, a_sq_p1_m1);
+    simde__m128i bit_met_p1_m1 = simde_mm_subs_epi16(psi_a_p1_m1, a_sq_p1_m1);
     bit_met_p1_m1 = simde_mm_adds_epi16(bit_met_p1_m1, y0_m_1_1);
     bit_met_p1_m1 = simde_mm_subs_epi16(bit_met_p1_m1, ch_mag_over_10);
 
-    __m128i bit_met_p1_m3 = simde_mm_subs_epi16(psi_a_p1_m3, a_sq_p1_m3);
+    simde__m128i bit_met_p1_m3 = simde_mm_subs_epi16(psi_a_p1_m3, a_sq_p1_m3);
     bit_met_p1_m3 = simde_mm_adds_epi16(bit_met_p1_m3, y0_m_1_3);
     bit_met_p1_m3 = simde_mm_subs_epi16(bit_met_p1_m3, ch_mag_over_2);
 
-    __m128i bit_met_p3_p1 = simde_mm_subs_epi16(psi_a_p3_p1, a_sq_p3_p1);
+    simde__m128i bit_met_p3_p1 = simde_mm_subs_epi16(psi_a_p3_p1, a_sq_p3_p1);
     bit_met_p3_p1 = simde_mm_adds_epi16(bit_met_p3_p1, y0_p_3_1);
     bit_met_p3_p1 = simde_mm_subs_epi16(bit_met_p3_p1, ch_mag_over_2);
 
-    __m128i bit_met_p3_p3 = simde_mm_subs_epi16(psi_a_p3_p3, a_sq_p3_p3);
+    simde__m128i bit_met_p3_p3 = simde_mm_subs_epi16(psi_a_p3_p3, a_sq_p3_p3);
     bit_met_p3_p3 = simde_mm_adds_epi16(bit_met_p3_p3, y0_p_3_3);
     bit_met_p3_p3 = simde_mm_subs_epi16(bit_met_p3_p3, ch_mag_9_over_10);
 
-    __m128i bit_met_p3_m1 = simde_mm_subs_epi16(psi_a_p3_m1, a_sq_p3_m1);
+    simde__m128i bit_met_p3_m1 = simde_mm_subs_epi16(psi_a_p3_m1, a_sq_p3_m1);
     bit_met_p3_m1 = simde_mm_adds_epi16(bit_met_p3_m1, y0_m_3_1);
     bit_met_p3_m1 = simde_mm_subs_epi16(bit_met_p3_m1, ch_mag_over_2);
 
-    __m128i bit_met_p3_m3 = simde_mm_subs_epi16(psi_a_p3_m3, a_sq_p3_m3);
+    simde__m128i bit_met_p3_m3 = simde_mm_subs_epi16(psi_a_p3_m3, a_sq_p3_m3);
     bit_met_p3_m3 = simde_mm_adds_epi16(bit_met_p3_m3, y0_m_3_3);
     bit_met_p3_m3 = simde_mm_subs_epi16(bit_met_p3_m3, ch_mag_9_over_10);
 
-    __m128i bit_met_m1_p1 = simde_mm_subs_epi16(psi_a_m1_p1, a_sq_m1_p1);
+    simde__m128i bit_met_m1_p1 = simde_mm_subs_epi16(psi_a_m1_p1, a_sq_m1_p1);
     bit_met_m1_p1 = simde_mm_subs_epi16(bit_met_m1_p1, y0_m_1_1);
     bit_met_m1_p1 = simde_mm_subs_epi16(bit_met_m1_p1, ch_mag_over_10);
 
-    __m128i bit_met_m1_p3 = simde_mm_subs_epi16(psi_a_m1_p3, a_sq_m1_p3);
+    simde__m128i bit_met_m1_p3 = simde_mm_subs_epi16(psi_a_m1_p3, a_sq_m1_p3);
     bit_met_m1_p3 = simde_mm_subs_epi16(bit_met_m1_p3, y0_m_1_3);
     bit_met_m1_p3 = simde_mm_subs_epi16(bit_met_m1_p3, ch_mag_over_2);
 
-    __m128i bit_met_m1_m1 = simde_mm_subs_epi16(psi_a_m1_m1, a_sq_m1_m1);
+    simde__m128i bit_met_m1_m1 = simde_mm_subs_epi16(psi_a_m1_m1, a_sq_m1_m1);
     bit_met_m1_m1 = simde_mm_subs_epi16(bit_met_m1_m1, y0_p_1_1);
     bit_met_m1_m1 = simde_mm_subs_epi16(bit_met_m1_m1, ch_mag_over_10);
 
-    __m128i bit_met_m1_m3 = simde_mm_subs_epi16(psi_a_m1_m3, a_sq_m1_m3);
+    simde__m128i bit_met_m1_m3 = simde_mm_subs_epi16(psi_a_m1_m3, a_sq_m1_m3);
     bit_met_m1_m3 = simde_mm_subs_epi16(bit_met_m1_m3, y0_p_1_3);
     bit_met_m1_m3 = simde_mm_subs_epi16(bit_met_m1_m3, ch_mag_over_2);
 
-    __m128i bit_met_m3_p1 = simde_mm_subs_epi16(psi_a_m3_p1, a_sq_m3_p1);
+    simde__m128i bit_met_m3_p1 = simde_mm_subs_epi16(psi_a_m3_p1, a_sq_m3_p1);
     bit_met_m3_p1 = simde_mm_subs_epi16(bit_met_m3_p1, y0_m_3_1);
     bit_met_m3_p1 = simde_mm_subs_epi16(bit_met_m3_p1, ch_mag_over_2);
 
-    __m128i bit_met_m3_p3 = simde_mm_subs_epi16(psi_a_m3_p3, a_sq_m3_p3);
+    simde__m128i bit_met_m3_p3 = simde_mm_subs_epi16(psi_a_m3_p3, a_sq_m3_p3);
     bit_met_m3_p3 = simde_mm_subs_epi16(bit_met_m3_p3, y0_m_3_3);
     bit_met_m3_p3 = simde_mm_subs_epi16(bit_met_m3_p3, ch_mag_9_over_10);
 
-    __m128i bit_met_m3_m1 = simde_mm_subs_epi16(psi_a_m3_m1, a_sq_m3_m1);
+    simde__m128i bit_met_m3_m1 = simde_mm_subs_epi16(psi_a_m3_m1, a_sq_m3_m1);
     bit_met_m3_m1 = simde_mm_subs_epi16(bit_met_m3_m1, y0_p_3_1);
     bit_met_m3_m1 = simde_mm_subs_epi16(bit_met_m3_m1, ch_mag_over_2);
 
-    __m128i bit_met_m3_m3 = simde_mm_subs_epi16(psi_a_m3_m3, a_sq_m3_m3);
+    simde__m128i bit_met_m3_m3 = simde_mm_subs_epi16(psi_a_m3_m3, a_sq_m3_m3);
     bit_met_m3_m3 = simde_mm_subs_epi16(bit_met_m3_m3, y0_p_3_3);
     bit_met_m3_m3 = simde_mm_subs_epi16(bit_met_m3_m3, ch_mag_9_over_10);
 
@@ -1186,7 +1088,7 @@ void nr_ulsch_qam16_qam16(c16_t *stream0_in,
     // LLR = lambda(c==1) - lambda(c==0)
 
     // LLR of the first bit: Bit = 1
-    __m128i logmax_num_re0 = max_epi16(bit_met_m1_p1,
+    simde__m128i logmax_num_re0 = max_epi16(bit_met_m1_p1,
                                        bit_met_m1_p3,
                                        bit_met_m1_m1,
                                        bit_met_m1_m3,
@@ -1196,7 +1098,7 @@ void nr_ulsch_qam16_qam16(c16_t *stream0_in,
                                        bit_met_m3_m3);
 
     // LLR of the first bit: Bit = 0
-    __m128i logmax_den_re0 = max_epi16(bit_met_p1_p1,
+    simde__m128i logmax_den_re0 = max_epi16(bit_met_p1_p1,
                                        bit_met_p1_p3,
                                        bit_met_p1_m1,
                                        bit_met_p1_m3,
@@ -1206,7 +1108,7 @@ void nr_ulsch_qam16_qam16(c16_t *stream0_in,
                                        bit_met_p3_m3);
 
     // LLR of the second bit: Bit = 1
-    __m128i logmax_num_re1 = max_epi16(bit_met_p1_m1,
+    simde__m128i logmax_num_re1 = max_epi16(bit_met_p1_m1,
                                        bit_met_p3_m1,
                                        bit_met_m1_m1,
                                        bit_met_m3_m1,
@@ -1216,7 +1118,7 @@ void nr_ulsch_qam16_qam16(c16_t *stream0_in,
                                        bit_met_m3_m3);
 
     // LLR of the second bit: Bit = 0
-    __m128i logmax_den_re1 = max_epi16(bit_met_p1_p1,
+    simde__m128i logmax_den_re1 = max_epi16(bit_met_p1_p1,
                                        bit_met_p3_p1,
                                        bit_met_m1_p1,
                                        bit_met_m3_p1,
@@ -1226,7 +1128,7 @@ void nr_ulsch_qam16_qam16(c16_t *stream0_in,
                                        bit_met_m3_p3);
 
     // LLR of the third bit: Bit = 1
-    __m128i logmax_num_im0 = max_epi16(bit_met_m3_p1,
+    simde__m128i logmax_num_im0 = max_epi16(bit_met_m3_p1,
                                        bit_met_m3_p3,
                                        bit_met_m3_m1,
                                        bit_met_m3_m3,
@@ -1236,7 +1138,7 @@ void nr_ulsch_qam16_qam16(c16_t *stream0_in,
                                        bit_met_p3_m3);
 
     // LLR of the third bit: Bit = 0
-    __m128i logmax_den_im0 = max_epi16(bit_met_m1_p1,
+    simde__m128i logmax_den_im0 = max_epi16(bit_met_m1_p1,
                                        bit_met_m1_p3,
                                        bit_met_m1_m1,
                                        bit_met_m1_m3,
@@ -1246,7 +1148,7 @@ void nr_ulsch_qam16_qam16(c16_t *stream0_in,
                                        bit_met_p1_m3);
 
     // LLR of the fourth bit: Bit = 1
-    __m128i logmax_num_im1 = max_epi16(bit_met_p1_m3,
+    simde__m128i logmax_num_im1 = max_epi16(bit_met_p1_m3,
                                        bit_met_p3_m3,
                                        bit_met_m1_m3,
                                        bit_met_m3_m3,
@@ -1256,7 +1158,7 @@ void nr_ulsch_qam16_qam16(c16_t *stream0_in,
                                        bit_met_m3_p3);
 
     // LLR of the fourth bit: Bit = 0
-    __m128i logmax_den_im1 = max_epi16(bit_met_p1_m1,
+    simde__m128i logmax_den_im1 = max_epi16(bit_met_p1_m1,
                                        bit_met_p3_m1,
                                        bit_met_m1_m1,
                                        bit_met_m3_m1,
@@ -1281,8 +1183,8 @@ void nr_ulsch_qam16_qam16(c16_t *stream0_in,
     stream0_128i_out[2 * i + 3] = simde_mm_unpackhi_epi32(xmm1, xmm3); // 8 LLRs, 2 REs
   }
 
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 static const int16_t ones256[16] __attribute__((aligned(32))) = {0xffff,
@@ -1309,7 +1211,7 @@ static const int16_t ones256[16] __attribute__((aligned(32))) = {0xffff,
 // interval x>6
 #define interference_abs_64qam_epi16_256(psi, int_ch_mag, int_two_ch_mag, int_three_ch_mag, a, c1, c3, c5, c7) \
   tmp_result = simde_mm256_cmpgt_epi16(int_two_ch_mag, psi);                                               \
-  tmp_result3 = simde_mm256_xor_si256(tmp_result, (*(__m256i *)&ones256[0]));                              \
+  tmp_result3 = simde_mm256_xor_si256(tmp_result, (*(simde__m256i *)&ones256[0]));                              \
   tmp_result2 = simde_mm256_cmpgt_epi16(int_ch_mag, psi);                                                  \
   tmp_result = simde_mm256_xor_si256(tmp_result, tmp_result2);                                             \
   tmp_result4 = simde_mm256_cmpgt_epi16(psi, int_three_ch_mag);                                            \
@@ -1369,54 +1271,54 @@ void nr_ulsch_qam64_qam64(c16_t *stream0_in,
                           c16_t *rho01,
                           uint32_t length)
 {
-  __m256i *rho01_256i = (__m256i *)rho01;
-  __m256i *stream0_256i_in = (__m256i *)stream0_in;
-  __m256i *stream1_256i_in = (__m256i *)stream1_in;
-  __m256i *ch_mag_256i = (__m256i *)ch_mag;
-  __m256i *ch_mag_256i_i = (__m256i *)ch_mag_i;
-
-  __m256i ONE_OVER_SQRT_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(10112)); // round(1/sqrt(42)*2^16)
-  __m256i THREE_OVER_SQRT_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(30337)); // round(3/sqrt(42)*2^16)
-  __m256i FIVE_OVER_SQRT_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(25281)); // round(5/sqrt(42)*2^15)
-  __m256i SEVEN_OVER_SQRT_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(17697)); // round(7/sqrt(42)*2^14) Q2.14
-  __m256i ONE_OVER_SQRT_2 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(23170)); // round(1/sqrt(2)*2^15)
-  __m256i ONE_OVER_SQRT_2_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(3575)); // round(1/sqrt(2*42)*2^15)
-  __m256i THREE_OVER_SQRT_2_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(10726)); // round(3/sqrt(2*42)*2^15)
-  __m256i FIVE_OVER_SQRT_2_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(17876)); // round(5/sqrt(2*42)*2^15)
-  __m256i SEVEN_OVER_SQRT_2_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(25027)); // round(7/sqrt(2*42)*2^15)
-  __m256i FORTYNINE_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(30969)); // round(49/(4*sqrt(42))*2^14), Q2.14
-  __m256i THIRTYSEVEN_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(23385)); // round(37/(4*sqrt(42))*2^14), Q2.14
-  __m256i TWENTYFIVE_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(31601)); // round(25/(4*sqrt(42))*2^15)
-  __m256i TWENTYNINE_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(18329)); // round(29/(4*sqrt(42))*2^15), Q2.14
-  __m256i SEVENTEEN_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(21489)); // round(17/(4*sqrt(42))*2^15)
-  __m256i NINE_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(11376)); // round(9/(4*sqrt(42))*2^15)
-  __m256i THIRTEEN_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(16433)); // round(13/(4*sqrt(42))*2^15)
-  __m256i FIVE_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(6320)); // round(5/(4*sqrt(42))*2^15)
-  __m256i ONE_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(1264)); // round(1/(4*sqrt(42))*2^15)
-  __m256i SQRT_42_OVER_FOUR = simde_mm256_broadcastw_epi16(_mm_set1_epi16(13272)); // round(sqrt(42)/4*2^13), Q3.12
-
-  __m256i ch_mag_des;
-  __m256i ch_mag_int;
-  __m256i ch_mag_98_over_42_with_sigma2;
-  __m256i ch_mag_74_over_42_with_sigma2;
-  __m256i ch_mag_58_over_42_with_sigma2;
-  __m256i ch_mag_50_over_42_with_sigma2;
-  __m256i ch_mag_34_over_42_with_sigma2;
-  __m256i ch_mag_18_over_42_with_sigma2;
-  __m256i ch_mag_26_over_42_with_sigma2;
-  __m256i ch_mag_10_over_42_with_sigma2;
-  __m256i ch_mag_2_over_42_with_sigma2;
-  __m256i y0r_one_over_sqrt_21;
-  __m256i y0r_three_over_sqrt_21;
-  __m256i y0r_five_over_sqrt_21;
-  __m256i y0r_seven_over_sqrt_21;
-  __m256i y0i_one_over_sqrt_21;
-  __m256i y0i_three_over_sqrt_21;
-  __m256i y0i_five_over_sqrt_21;
-  __m256i y0i_seven_over_sqrt_21;
-  __m256i ch_mag_int_with_sigma2;
-  __m256i two_ch_mag_int_with_sigma2;
-  __m256i three_ch_mag_int_with_sigma2;
+  simde__m256i *rho01_256i = (simde__m256i *)rho01;
+  simde__m256i *stream0_256i_in = (simde__m256i *)stream0_in;
+  simde__m256i *stream1_256i_in = (simde__m256i *)stream1_in;
+  simde__m256i *ch_mag_256i = (simde__m256i *)ch_mag;
+  simde__m256i *ch_mag_256i_i = (simde__m256i *)ch_mag_i;
+
+  simde__m256i ONE_OVER_SQRT_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(10112)); // round(1/sqrt(42)*2^16)
+  simde__m256i THREE_OVER_SQRT_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(30337)); // round(3/sqrt(42)*2^16)
+  simde__m256i FIVE_OVER_SQRT_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(25281)); // round(5/sqrt(42)*2^15)
+  simde__m256i SEVEN_OVER_SQRT_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(17697)); // round(7/sqrt(42)*2^14) Q2.14
+  simde__m256i ONE_OVER_SQRT_2 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(23170)); // round(1/sqrt(2)*2^15)
+  simde__m256i ONE_OVER_SQRT_2_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(3575)); // round(1/sqrt(2*42)*2^15)
+  simde__m256i THREE_OVER_SQRT_2_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(10726)); // round(3/sqrt(2*42)*2^15)
+  simde__m256i FIVE_OVER_SQRT_2_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(17876)); // round(5/sqrt(2*42)*2^15)
+  simde__m256i SEVEN_OVER_SQRT_2_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(25027)); // round(7/sqrt(2*42)*2^15)
+  simde__m256i FORTYNINE_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(30969)); // round(49/(4*sqrt(42))*2^14), Q2.14
+  simde__m256i THIRTYSEVEN_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(23385)); // round(37/(4*sqrt(42))*2^14), Q2.14
+  simde__m256i TWENTYFIVE_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(31601)); // round(25/(4*sqrt(42))*2^15)
+  simde__m256i TWENTYNINE_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(18329)); // round(29/(4*sqrt(42))*2^15), Q2.14
+  simde__m256i SEVENTEEN_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(21489)); // round(17/(4*sqrt(42))*2^15)
+  simde__m256i NINE_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(11376)); // round(9/(4*sqrt(42))*2^15)
+  simde__m256i THIRTEEN_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(16433)); // round(13/(4*sqrt(42))*2^15)
+  simde__m256i FIVE_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(6320)); // round(5/(4*sqrt(42))*2^15)
+  simde__m256i ONE_OVER_FOUR_SQRT_42 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(1264)); // round(1/(4*sqrt(42))*2^15)
+  simde__m256i SQRT_42_OVER_FOUR = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(13272)); // round(sqrt(42)/4*2^13), Q3.12
+
+  simde__m256i ch_mag_des;
+  simde__m256i ch_mag_int;
+  simde__m256i ch_mag_98_over_42_with_sigma2;
+  simde__m256i ch_mag_74_over_42_with_sigma2;
+  simde__m256i ch_mag_58_over_42_with_sigma2;
+  simde__m256i ch_mag_50_over_42_with_sigma2;
+  simde__m256i ch_mag_34_over_42_with_sigma2;
+  simde__m256i ch_mag_18_over_42_with_sigma2;
+  simde__m256i ch_mag_26_over_42_with_sigma2;
+  simde__m256i ch_mag_10_over_42_with_sigma2;
+  simde__m256i ch_mag_2_over_42_with_sigma2;
+  simde__m256i y0r_one_over_sqrt_21;
+  simde__m256i y0r_three_over_sqrt_21;
+  simde__m256i y0r_five_over_sqrt_21;
+  simde__m256i y0r_seven_over_sqrt_21;
+  simde__m256i y0i_one_over_sqrt_21;
+  simde__m256i y0i_three_over_sqrt_21;
+  simde__m256i y0i_five_over_sqrt_21;
+  simde__m256i y0i_seven_over_sqrt_21;
+  simde__m256i ch_mag_int_with_sigma2;
+  simde__m256i two_ch_mag_int_with_sigma2;
+  simde__m256i three_ch_mag_int_with_sigma2;
 
   uint32_t len256 = length >> 3;
 
@@ -1490,7 +1392,7 @@ void nr_ulsch_qam64_qam64(c16_t *stream0_in,
     simde_mm256_separate_real_imag_parts(&y1r, &y1i, stream1_256i_in[i], stream1_256i_in[i + 1]);
 
     // Psi_r calculation from rho_rpi or rho_rmi
-    xmm0 = simde_mm256_broadcastw_epi16(_mm_set1_epi16(0)); // ZERO for abs_pi16
+    xmm0 = simde_mm256_broadcastw_epi16(simde_mm_set1_epi16(0)); // ZERO for abs_pi16
     xmm2 = simde_mm256_subs_epi16(rho_rpi_7_7, y1r);
 
     simde__m256i psi_r_p7_p7 = simde_mm256_abs_epi16(xmm2);
@@ -3880,8 +3782,8 @@ void nr_ulsch_qam64_qam64(c16_t *stream0_in,
     }
   }
 
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void nr_ulsch_compute_ML_llr(int32_t **rxdataF_comp,
@@ -3925,8 +3827,8 @@ void nr_ulsch_compute_ML_llr(int32_t **rxdataF_comp,
 
 void nr_ulsch_shift_llr(int16_t **llr_layers, uint32_t nb_re, uint32_t rxdataF_ext_offset, uint8_t mod_order, int shift)
 {
-  __m128i *llr_layers0 = (__m128i *)&llr_layers[0][rxdataF_ext_offset * mod_order];
-  __m128i *llr_layers1 = (__m128i *)&llr_layers[1][rxdataF_ext_offset * mod_order];
+  simde__m128i *llr_layers0 = (simde__m128i *)&llr_layers[0][rxdataF_ext_offset * mod_order];
+  simde__m128i *llr_layers1 = (simde__m128i *)&llr_layers[1][rxdataF_ext_offset * mod_order];
 
   uint8_t mem_offset = ((16 - ((long)llr_layers0)) & 0xF) >> 2;
 
@@ -3937,8 +3839,8 @@ void nr_ulsch_shift_llr(int16_t **llr_layers, uint32_t nb_re, uint32_t rxdataF_e
       llr_layers0_c16[i] = c16Shift(llr_layers0_c16[i], shift);
       llr_layers1_c16[i] = c16Shift(llr_layers1_c16[i], shift);
     }
-    llr_layers0 = (__m128i *)&llr_layers[0][rxdataF_ext_offset * mod_order + (mem_offset << 1)];
-    llr_layers1 = (__m128i *)&llr_layers[1][rxdataF_ext_offset * mod_order + (mem_offset << 1)];
+    llr_layers0 = (simde__m128i *)&llr_layers[0][rxdataF_ext_offset * mod_order + (mem_offset << 1)];
+    llr_layers1 = (simde__m128i *)&llr_layers[1][rxdataF_ext_offset * mod_order + (mem_offset << 1)];
   }
 
   for (int i = 0; i < nb_re >> 2; i++) {
diff --git a/openair1/PHY/NR_TRANSPORT/pucch_rx.c b/openair1/PHY/NR_TRANSPORT/pucch_rx.c
index 58048c522482b7d301882f14a60067fafdfa33d2..5b1436a55568019806f958996899fb8ebdb3a26c 100644
--- a/openair1/PHY/NR_TRANSPORT/pucch_rx.c
+++ b/openair1/PHY/NR_TRANSPORT/pucch_rx.c
@@ -269,11 +269,11 @@ void nr_decode_pucch0(PHY_VARS_gNB *gNB,
       if(re_offset[l] + nb_re_pucch > frame_parms->ofdm_symbol_size) {
         int neg_length = frame_parms->ofdm_symbol_size-re_offset[l];
         int pos_length = nb_re_pucch-neg_length;
-        memcpy1((void*)rp[aa][l],(void*)&tmp_rp[re_offset[l]],neg_length*sizeof(int32_t));
-        memcpy1((void*)&rp[aa][l][neg_length],(void*)tmp_rp,pos_length*sizeof(int32_t));
+        memcpy((void*)rp[aa][l],(void*)&tmp_rp[re_offset[l]],neg_length*sizeof(int32_t));
+        memcpy((void*)&rp[aa][l][neg_length],(void*)tmp_rp,pos_length*sizeof(int32_t));
       }
       else
-        memcpy1((void*)rp[aa][l],(void*)&tmp_rp[re_offset[l]],nb_re_pucch*sizeof(int32_t));
+        memcpy((void*)rp[aa][l],(void*)&tmp_rp[re_offset[l]],nb_re_pucch*sizeof(int32_t));
 
       c16_t *r = (c16_t*)&rp[aa][l];
 
@@ -957,21 +957,28 @@ void nr_decode_pucch1(c16_t **rxdataF,
   }
 }
 
-__m256i pucch2_3bit[8*2];
-__m256i pucch2_4bit[16*2];
-__m256i pucch2_5bit[32*2];
-__m256i pucch2_6bit[64*2];
-__m256i pucch2_7bit[128*2];
-__m256i pucch2_8bit[256*2];
-__m256i pucch2_9bit[512*2];
-__m256i pucch2_10bit[1024*2];
-__m256i pucch2_11bit[2048*2];
-
-static __m256i *const pucch2_lut[9] =
-    {pucch2_3bit, pucch2_4bit, pucch2_5bit, pucch2_6bit, pucch2_7bit, pucch2_8bit, pucch2_9bit, pucch2_10bit, pucch2_11bit};
-
-__m64 pucch2_polar_4bit[16];
-__m128i pucch2_polar_llr_num_lut[256],pucch2_polar_llr_den_lut[256];
+simde__m256i pucch2_3bit[8*2];
+simde__m256i pucch2_4bit[16*2];
+simde__m256i pucch2_5bit[32*2];
+simde__m256i pucch2_6bit[64*2];
+simde__m256i pucch2_7bit[128*2];
+simde__m256i pucch2_8bit[256*2];
+simde__m256i pucch2_9bit[512*2];
+simde__m256i pucch2_10bit[1024*2];
+simde__m256i pucch2_11bit[2048*2];
+
+static simde__m256i *pucch2_lut[9]={pucch2_3bit,
+			pucch2_4bit,
+			pucch2_5bit,
+			pucch2_6bit,
+			pucch2_7bit,
+			pucch2_8bit,
+			pucch2_9bit,
+			pucch2_10bit,
+			pucch2_11bit};
+
+simde__m64 pucch2_polar_4bit[16];
+simde__m128i pucch2_polar_llr_num_lut[256],pucch2_polar_llr_den_lut[256];
 
 void init_pucch2_luts() {
 
@@ -984,8 +991,8 @@ void init_pucch2_luts() {
 #ifdef DEBUG_NR_PUCCH_RX
       if (b==3) printf("in %d, out %x\n",i,out);
 #endif
-      __m256i *lut_i=&pucch2_lut[b-3][i<<1];
-      __m256i *lut_ip1=&pucch2_lut[b-3][1+(i<<1)];
+      simde__m256i *lut_i=&pucch2_lut[b-3][i<<1];
+      simde__m256i *lut_ip1=&pucch2_lut[b-3][1+(i<<1)];
       bit = (out&0x1) > 0 ? -1 : 1;
       *lut_i = simde_mm256_insert_epi16(*lut_i,bit,0);
       bit = (out&0x2) > 0 ? -1 : 1;
@@ -1053,51 +1060,51 @@ void init_pucch2_luts() {
     }
   }
   for (uint16_t i=0;i<16;i++) {
-    __m64 *lut_i=&pucch2_polar_4bit[i];
+    simde__m64 *lut_i=&pucch2_polar_4bit[i];
 
     bit = (i&0x1) > 0 ? -1 : 1;
-    *lut_i = _mm_insert_pi16(*lut_i,bit,0);
+    *lut_i = simde_mm_insert_pi16(*lut_i,bit,0);
     bit = (i&0x2) > 0 ? -1 : 1;
-    *lut_i = _mm_insert_pi16(*lut_i,bit,1);
+    *lut_i = simde_mm_insert_pi16(*lut_i,bit,1);
     bit = (i&0x4) > 0 ? -1 : 1;
-    *lut_i = _mm_insert_pi16(*lut_i,bit,2);
+    *lut_i = simde_mm_insert_pi16(*lut_i,bit,2);
     bit = (i&0x8) > 0 ? -1 : 1;
-    *lut_i = _mm_insert_pi16(*lut_i,bit,3);
+    *lut_i = simde_mm_insert_pi16(*lut_i,bit,3);
   }
   for (int i=0;i<256;i++) {
-    __m128i *lut_num_i=&pucch2_polar_llr_num_lut[i];
-    __m128i *lut_den_i=&pucch2_polar_llr_den_lut[i];
+    simde__m128i *lut_num_i=&pucch2_polar_llr_num_lut[i];
+    simde__m128i *lut_den_i=&pucch2_polar_llr_den_lut[i];
     bit = (i&0x1) > 0 ? 0 : 1;
-   *lut_num_i = _mm_insert_epi16(*lut_num_i,bit,0);
-   *lut_den_i = _mm_insert_epi16(*lut_den_i,1-bit,0);
+   *lut_num_i = simde_mm_insert_epi16(*lut_num_i,bit,0);
+   *lut_den_i = simde_mm_insert_epi16(*lut_den_i,1-bit,0);
 
     bit = (i&0x10) > 0 ? 0 : 1;
-   *lut_num_i = _mm_insert_epi16(*lut_num_i,bit,1);
-   *lut_den_i = _mm_insert_epi16(*lut_den_i,1-bit,1);
+   *lut_num_i = simde_mm_insert_epi16(*lut_num_i,bit,1);
+   *lut_den_i = simde_mm_insert_epi16(*lut_den_i,1-bit,1);
 
     bit = (i&0x2) > 0 ? 0 : 1;
-   *lut_num_i = _mm_insert_epi16(*lut_num_i,bit,2);
-   *lut_den_i = _mm_insert_epi16(*lut_den_i,1-bit,2);
+   *lut_num_i = simde_mm_insert_epi16(*lut_num_i,bit,2);
+   *lut_den_i = simde_mm_insert_epi16(*lut_den_i,1-bit,2);
 
     bit = (i&0x20) > 0 ? 0 : 1;
-   *lut_num_i = _mm_insert_epi16(*lut_num_i,bit,3);
-   *lut_den_i = _mm_insert_epi16(*lut_den_i,1-bit,3);
+   *lut_num_i = simde_mm_insert_epi16(*lut_num_i,bit,3);
+   *lut_den_i = simde_mm_insert_epi16(*lut_den_i,1-bit,3);
 
     bit = (i&0x4) > 0 ? 0 : 1;
-   *lut_num_i = _mm_insert_epi16(*lut_num_i,bit,4);
-   *lut_den_i = _mm_insert_epi16(*lut_den_i,1-bit,4);
+   *lut_num_i = simde_mm_insert_epi16(*lut_num_i,bit,4);
+   *lut_den_i = simde_mm_insert_epi16(*lut_den_i,1-bit,4);
 
     bit = (i&0x40) > 0 ? 0 : 1;
-   *lut_num_i = _mm_insert_epi16(*lut_num_i,bit,5);
-   *lut_den_i = _mm_insert_epi16(*lut_den_i,1-bit,5);
+   *lut_num_i = simde_mm_insert_epi16(*lut_num_i,bit,5);
+   *lut_den_i = simde_mm_insert_epi16(*lut_den_i,1-bit,5);
 
     bit = (i&0x8) > 0 ? 0 : 1;
-   *lut_num_i = _mm_insert_epi16(*lut_num_i,bit,6);
-   *lut_den_i = _mm_insert_epi16(*lut_den_i,1-bit,6);
+   *lut_num_i = simde_mm_insert_epi16(*lut_num_i,bit,6);
+   *lut_den_i = simde_mm_insert_epi16(*lut_den_i,1-bit,6);
 
     bit = (i&0x80) > 0 ? 0 : 1;
-   *lut_num_i = _mm_insert_epi16(*lut_num_i,bit,7);
-   *lut_den_i = _mm_insert_epi16(*lut_den_i,1-bit,7);
+   *lut_num_i = simde_mm_insert_epi16(*lut_num_i,bit,7);
+   *lut_den_i = simde_mm_insert_epi16(*lut_den_i,1-bit,7);
 
 #ifdef DEBUG_NR_PUCCH_RX
    printf("i %d, lut_num (%d,%d,%d,%d,%d,%d,%d,%d)\n",i,
@@ -1164,13 +1171,13 @@ void nr_decode_pucch2(PHY_VARS_gNB *gNB,
       c16_t *tmp_rp = ((c16_t *)&rxdataF[aa][soffset + (l2 + symb) * frame_parms->ofdm_symbol_size]);
 
       if (re_offset[symb] + nb_re_pucch < frame_parms->ofdm_symbol_size) {
-        memcpy1(rp[aa][symb], &tmp_rp[re_offset[symb]], nb_re_pucch * sizeof(c16_t));
+        memcpy(rp[aa][symb], &tmp_rp[re_offset[symb]], nb_re_pucch * sizeof(c16_t));
       }
       else {
         int neg_length = frame_parms->ofdm_symbol_size-re_offset[symb];
         int pos_length = nb_re_pucch-neg_length;
-        memcpy1(rp[aa][symb], &tmp_rp[re_offset[symb]], neg_length * sizeof(c16_t));
-        memcpy1(&rp[aa][symb][neg_length], tmp_rp, pos_length * sizeof(c16_t));
+        memcpy(rp[aa][symb], &tmp_rp[re_offset[symb]], neg_length * sizeof(c16_t));
+        memcpy(&rp[aa][symb][neg_length], tmp_rp, pos_length * sizeof(c16_t));
       }
     }
   }
@@ -1347,7 +1354,7 @@ void nr_decode_pucch2(PHY_VARS_gNB *gNB,
   printf("x2 %x, s %x\n",x2,s);
 #endif
   for (int symb=0;symb<pucch_pdu->nr_of_symbols;symb++) {
-    __m64 c_re[4], c_im[4];
+    simde__m64 c_re[4], c_im[4];
     int re_off=0;
     for (int prb=0;prb<prb_size_ext;prb+=2,re_off+=16) {
       for (int z = 0; z < 4; z++) {
@@ -1394,15 +1401,16 @@ void nr_decode_pucch2(PHY_VARS_gNB *gNB,
                r_re_ext[aa][symb][re_off + 15],
                r_im_ext[aa][symb][re_off + 15]);
 #endif
-        __m64 *r_re_ext_64 = (__m64 *)&r_re_ext[aa][symb][re_off];
-        __m64 *r_re_ext2_64 = (__m64 *)&r_re_ext2[aa][symb][re_off];
-        __m64 *r_im_ext_64 = (__m64 *)&r_im_ext[aa][symb][re_off];
-        __m64 *r_im_ext2_64 = (__m64 *)&r_im_ext2[aa][symb][re_off];
+
+        simde__m64 *r_re_ext_64 = (simde__m64 *)&r_re_ext[aa][symb][re_off];
+        simde__m64 *r_re_ext2_64 = (simde__m64 *)&r_re_ext2[aa][symb][re_off];
+        simde__m64 *r_im_ext_64 = (simde__m64 *)&r_im_ext[aa][symb][re_off];
+        simde__m64 *r_im_ext2_64 = (simde__m64 *)&r_im_ext2[aa][symb][re_off];
         for (int z = 0; z < 4; z++) {
-          r_re_ext2_64[z] = _mm_mullo_pi16(r_re_ext_64[z], c_im[z]);
-          r_re_ext_64[z] = _mm_mullo_pi16(r_re_ext_64[z], c_re[z]);
-          r_im_ext2_64[z] = _mm_mullo_pi16(r_im_ext_64[z], c_re[z]);
-          r_im_ext_64[z] = _mm_mullo_pi16(r_im_ext_64[z], c_im[z]);
+          r_re_ext2_64[z] = simde_mm_mullo_pi16(r_re_ext_64[z], c_im[z]);
+          r_re_ext_64[z] = simde_mm_mullo_pi16(r_re_ext_64[z], c_re[z]);
+          r_im_ext2_64[z] = simde_mm_mullo_pi16(r_im_ext_64[z], c_re[z]);
+          r_im_ext_64[z] = simde_mm_mullo_pi16(r_im_ext_64[z], c_im[z]);
         }
 
 #ifdef DEBUG_NR_PUCCH_RX
@@ -1441,19 +1449,19 @@ void nr_decode_pucch2(PHY_VARS_gNB *gNB,
   uint8_t corr_dB;
   int decoderState=2;
   if (nb_bit < 12) { // short blocklength case
-    __m256i *rp_re[Prx2][2];
-    __m256i *rp2_re[Prx2][2];
-    __m256i *rp_im[Prx2][2];
-    __m256i *rp2_im[Prx2][2];
+    simde__m256i *rp_re[Prx2][2];
+    simde__m256i *rp2_re[Prx2][2];
+    simde__m256i *rp_im[Prx2][2];
+    simde__m256i *rp2_im[Prx2][2];
     for (int aa=0;aa<Prx;aa++) {
       for (int symb=0;symb<pucch_pdu->nr_of_symbols;symb++) {
-        rp_re[aa][symb] = (__m256i*)r_re_ext[aa][symb];
-        rp_im[aa][symb] = (__m256i*)r_im_ext[aa][symb];
-        rp2_re[aa][symb] = (__m256i*)r_re_ext2[aa][symb];
-        rp2_im[aa][symb] = (__m256i*)r_im_ext2[aa][symb];
+        rp_re[aa][symb] = (simde__m256i*)r_re_ext[aa][symb];
+        rp_im[aa][symb] = (simde__m256i*)r_im_ext[aa][symb];
+        rp2_re[aa][symb] = (simde__m256i*)r_re_ext2[aa][symb];
+        rp2_im[aa][symb] = (simde__m256i*)r_im_ext2[aa][symb];
       }
     }
-    __m256i prod_re[Prx2],prod_im[Prx2];
+    simde__m256i prod_re[Prx2],prod_im[Prx2];
     uint64_t corr=0;
     int cw_ML=0;
     
@@ -1556,21 +1564,21 @@ void nr_decode_pucch2(PHY_VARS_gNB *gNB,
   }
   else { // polar coded case
 
-    __m64 *rp_re[Prx2][2];
-    __m64 *rp2_re[Prx2][2];
-    __m64 *rp_im[Prx2][2];
-    __m64 *rp2_im[Prx2][2];
-    __m128i llrs[pucch_pdu->prb_size*2*pucch_pdu->nr_of_symbols];
+    simde__m64 *rp_re[Prx2][2];
+    simde__m64 *rp2_re[Prx2][2];
+    simde__m64 *rp_im[Prx2][2];
+    simde__m64 *rp2_im[Prx2][2];
+    simde__m128i llrs[pucch_pdu->prb_size*2*pucch_pdu->nr_of_symbols];
 
     for (int aa=0;aa<Prx;aa++) {
       for (int symb=0;symb<pucch_pdu->nr_of_symbols;symb++) {
-        rp_re[aa][symb] = (__m64*)r_re_ext[aa][symb];
-        rp_im[aa][symb] = (__m64*)r_im_ext[aa][symb];
-        rp2_re[aa][symb] = (__m64*)r_re_ext2[aa][symb];
-        rp2_im[aa][symb] = (__m64*)r_im_ext2[aa][symb];
+        rp_re[aa][symb] = (simde__m64*)r_re_ext[aa][symb];
+        rp_im[aa][symb] = (simde__m64*)r_im_ext[aa][symb];
+        rp2_re[aa][symb] = (simde__m64*)r_re_ext2[aa][symb];
+        rp2_im[aa][symb] = (simde__m64*)r_im_ext2[aa][symb];
       }
     }
-    __m64 prod_re[Prx2],prod_im[Prx2];
+    simde__m64 prod_re[Prx2],prod_im[Prx2];
 
 #ifdef DEBUG_NR_PUCCH_RX
     for (int cw=0;cw<16;cw++) {
@@ -1586,22 +1594,22 @@ void nr_decode_pucch2(PHY_VARS_gNB *gNB,
     
     // non-coherent LLR computation on groups of 4 REs (half-PRBs)
     int32_t corr_re,corr_im,corr_tmp;
-    __m128i corr16,llr_num,llr_den;
+    simde__m128i corr16,llr_num,llr_den;
     uint64_t corr = 0;
     for (int symb=0;symb<pucch_pdu->nr_of_symbols;symb++) {
       for (int half_prb=0;half_prb<(2*pucch_pdu->prb_size);half_prb++) {
-        llr_num=_mm_set1_epi16(0);llr_den=_mm_set1_epi16(0);
+        llr_num=simde_mm_set1_epi16(0);llr_den=simde_mm_set1_epi16(0);
         for (int cw=0;cw<256;cw++) {
           corr_tmp=0;
           for (int aa=0;aa<Prx;aa++) {
-            prod_re[aa] = _mm_srai_pi16(_mm_adds_pi16(_mm_mullo_pi16(pucch2_polar_4bit[cw&15],rp_re[aa][symb][half_prb]),
-                                                      _mm_mullo_pi16(pucch2_polar_4bit[cw>>4],rp_im[aa][symb][half_prb])),5);
-            prod_im[aa] = _mm_srai_pi16(_mm_subs_pi16(_mm_mullo_pi16(pucch2_polar_4bit[cw&15],rp2_im[aa][symb][half_prb]),
-                                                      _mm_mullo_pi16(pucch2_polar_4bit[cw>>4],rp2_re[aa][symb][half_prb])),5);
-            prod_re[aa] = _mm_hadds_pi16(prod_re[aa],prod_re[aa]);// 0+1
-            prod_im[aa] = _mm_hadds_pi16(prod_im[aa],prod_im[aa]);
-            prod_re[aa] = _mm_hadds_pi16(prod_re[aa],prod_re[aa]);// 0+1+2+3
-            prod_im[aa] = _mm_hadds_pi16(prod_im[aa],prod_im[aa]);
+            prod_re[aa] = simde_mm_srai_pi16(simde_mm_adds_pi16(simde_mm_mullo_pi16(pucch2_polar_4bit[cw&15],rp_re[aa][symb][half_prb]),
+                                                      simde_mm_mullo_pi16(pucch2_polar_4bit[cw>>4],rp_im[aa][symb][half_prb])),5);
+            prod_im[aa] = simde_mm_srai_pi16(simde_mm_subs_pi16(simde_mm_mullo_pi16(pucch2_polar_4bit[cw&15],rp2_im[aa][symb][half_prb]),
+                                                      simde_mm_mullo_pi16(pucch2_polar_4bit[cw>>4],rp2_re[aa][symb][half_prb])),5);
+            prod_re[aa] = simde_mm_hadds_pi16(prod_re[aa],prod_re[aa]);// 0+1
+            prod_im[aa] = simde_mm_hadds_pi16(prod_im[aa],prod_im[aa]);
+            prod_re[aa] = simde_mm_hadds_pi16(prod_re[aa],prod_re[aa]);// 0+1+2+3
+            prod_im[aa] = simde_mm_hadds_pi16(prod_im[aa],prod_im[aa]);
 
             // this is for UL CQI measurement
             if (cw==0) corr += ((int64_t)corr32_re[symb][half_prb>>2][aa]*corr32_re[symb][half_prb>>2][aa])+
@@ -1630,12 +1638,12 @@ void nr_decode_pucch2(PHY_VARS_gNB *gNB,
                 corr_tmp);
 */
 	}
-	corr16 = _mm_set1_epi16((int16_t)(corr_tmp>>8));
+	corr16 = simde_mm_set1_epi16((int16_t)(corr_tmp>>8));
 
 	LOG_D(PHY,"half_prb %d cw %d corr16 %d\n",half_prb,cw,corr_tmp>>8);
 
-	llr_num = _mm_max_epi16(_mm_mullo_epi16(corr16,pucch2_polar_llr_num_lut[cw]),llr_num);
-	llr_den = _mm_max_epi16(_mm_mullo_epi16(corr16,pucch2_polar_llr_den_lut[cw]),llr_den);
+	llr_num = simde_mm_max_epi16(simde_mm_mullo_epi16(corr16,pucch2_polar_llr_num_lut[cw]),llr_num);
+	llr_den = simde_mm_max_epi16(simde_mm_mullo_epi16(corr16,pucch2_polar_llr_den_lut[cw]),llr_den);
 
 	LOG_D(PHY,"lut_num (%d,%d,%d,%d,%d,%d,%d,%d)\n",
 	      ((int16_t*)&pucch2_polar_llr_num_lut[cw])[0],
@@ -1668,7 +1676,7 @@ void nr_decode_pucch2(PHY_VARS_gNB *gNB,
 
       }
       // compute llrs
-        llrs[half_prb + (symb*2*pucch_pdu->prb_size)] = _mm_subs_epi16(llr_num,llr_den);
+        llrs[half_prb + (symb*2*pucch_pdu->prb_size)] = simde_mm_subs_epi16(llr_num,llr_den);
         LOG_D(PHY,"llrs[%d] : (%d,%d,%d,%d,%d,%d,%d,%d)\n",
               half_prb,
               ((int16_t*)&llrs[half_prb])[0],
diff --git a/openair1/PHY/NR_UE_ESTIMATION/nr_ue_measurements.c b/openair1/PHY/NR_UE_ESTIMATION/nr_ue_measurements.c
index be1515072d8c111c772cbc9aba958a944e369aa0..a6aa2ba22f449c41d521f49f6bdfaa1a2ed628bf 100644
--- a/openair1/PHY/NR_UE_ESTIMATION/nr_ue_measurements.c
+++ b/openair1/PHY/NR_UE_ESTIMATION/nr_ue_measurements.c
@@ -168,11 +168,6 @@ void nr_ue_measurements(PHY_VARS_NR_UE *ue,
       ue->measurements.rx_power_avg[gNB_id],
       ue->measurements.n0_power_tot);
   }
-
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
 }
 
 // This function implements:
diff --git a/openair1/PHY/NR_UE_TRANSPORT/csi_rx.c b/openair1/PHY/NR_UE_TRANSPORT/csi_rx.c
index 068e1dacf374f1b9f9e10125cf2a95cc555886cc..9671f301c7e5372a3aded000057869b786361ba5 100644
--- a/openair1/PHY/NR_UE_TRANSPORT/csi_rx.c
+++ b/openair1/PHY/NR_UE_TRANSPORT/csi_rx.c
@@ -58,30 +58,30 @@ void nr_det_A_MF_2x2(int32_t *a_mf_00,
 
   int16_t nr_conjug2[8]__attribute__((aligned(16))) = {1,-1,1,-1,1,-1,1,-1} ;
 
-  __m128i ad_re_128, bc_re_128, det_re_128;
+  simde__m128i ad_re_128, bc_re_128, det_re_128;
 
-  __m128i *a_mf_00_128 = (__m128i *)a_mf_00;
-  __m128i *a_mf_01_128 = (__m128i *)a_mf_01;
-  __m128i *a_mf_10_128 = (__m128i *)a_mf_10;
-  __m128i *a_mf_11_128 = (__m128i *)a_mf_11;
-  __m128i *det_fin_128 = (__m128i *)det_fin;
+  simde__m128i *a_mf_00_128 = (simde__m128i *)a_mf_00;
+  simde__m128i *a_mf_01_128 = (simde__m128i *)a_mf_01;
+  simde__m128i *a_mf_10_128 = (simde__m128i *)a_mf_10;
+  simde__m128i *a_mf_11_128 = (simde__m128i *)a_mf_11;
+  simde__m128i *det_fin_128 = (simde__m128i *)det_fin;
 
   for (int rb = 0; rb<3*nb_rb; rb++) {
 
     //complex multiplication (I_a+jQ_a)(I_d+jQ_d) = (I_aI_d - Q_aQ_d) + j(Q_aI_d + I_aQ_d)
     //The imag part is often zero, we compute only the real part
-    ad_re_128 = _mm_sign_epi16(a_mf_00_128[0],*(__m128i*)&nr_conjug2[0]);
-    ad_re_128 = _mm_madd_epi16(ad_re_128,a_mf_11_128[0]); //Re: I_a0*I_d0 - Q_a1*Q_d1
+    ad_re_128 = simde_mm_sign_epi16(a_mf_00_128[0],*(simde__m128i*)&nr_conjug2[0]);
+    ad_re_128 = simde_mm_madd_epi16(ad_re_128,a_mf_11_128[0]); //Re: I_a0*I_d0 - Q_a1*Q_d1
 
     //complex multiplication (I_b+jQ_b)(I_c+jQ_c) = (I_bI_c - Q_bQ_c) + j(Q_bI_c + I_bQ_c)
     //The imag part is often zero, we compute only the real part
-    bc_re_128 = _mm_sign_epi16(a_mf_01_128[0],*(__m128i*)&nr_conjug2[0]);
-    bc_re_128 = _mm_madd_epi16(bc_re_128,a_mf_10_128[0]); //Re: I_b0*I_c0 - Q_b1*Q_c1
+    bc_re_128 = simde_mm_sign_epi16(a_mf_01_128[0],*(simde__m128i*)&nr_conjug2[0]);
+    bc_re_128 = simde_mm_madd_epi16(bc_re_128,a_mf_10_128[0]); //Re: I_b0*I_c0 - Q_b1*Q_c1
 
-    det_re_128 = _mm_sub_epi32(ad_re_128, bc_re_128);
+    det_re_128 = simde_mm_sub_epi32(ad_re_128, bc_re_128);
 
     //det in Q30 format
-    det_fin_128[0] = _mm_abs_epi32(det_re_128);
+    det_fin_128[0] = simde_mm_abs_epi32(det_re_128);
 
     det_fin_128+=1;
     a_mf_00_128+=1;
@@ -89,22 +89,22 @@ void nr_det_A_MF_2x2(int32_t *a_mf_00,
     a_mf_10_128+=1;
     a_mf_11_128+=1;
   }
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void nr_squared_matrix_element(int32_t *a,
                                int32_t *a_sq,
                                const unsigned short nb_rb) {
-  __m128i *a_128 = (__m128i *)a;
-  __m128i *a_sq_128 = (__m128i *)a_sq;
+  simde__m128i *a_128 = (simde__m128i *)a;
+  simde__m128i *a_sq_128 = (simde__m128i *)a_sq;
   for (int rb=0; rb<3*nb_rb; rb++) {
-    a_sq_128[0] = _mm_madd_epi16(a_128[0], a_128[0]);
+    a_sq_128[0] = simde_mm_madd_epi16(a_128[0], a_128[0]);
     a_sq_128+=1;
     a_128+=1;
   }
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void nr_numer_2x2(int32_t *a_00_sq,
@@ -113,23 +113,23 @@ void nr_numer_2x2(int32_t *a_00_sq,
                   int32_t *a_11_sq,
                   int32_t *num_fin,
                   const unsigned short nb_rb) {
-  __m128i *a_00_sq_128 = (__m128i *)a_00_sq;
-  __m128i *a_01_sq_128 = (__m128i *)a_01_sq;
-  __m128i *a_10_sq_128 = (__m128i *)a_10_sq;
-  __m128i *a_11_sq_128 = (__m128i *)a_11_sq;
-  __m128i *num_fin_128 = (__m128i *)num_fin;
+  simde__m128i *a_00_sq_128 = (simde__m128i *)a_00_sq;
+  simde__m128i *a_01_sq_128 = (simde__m128i *)a_01_sq;
+  simde__m128i *a_10_sq_128 = (simde__m128i *)a_10_sq;
+  simde__m128i *a_11_sq_128 = (simde__m128i *)a_11_sq;
+  simde__m128i *num_fin_128 = (simde__m128i *)num_fin;
   for (int rb=0; rb<3*nb_rb; rb++) {
-    __m128i sq_a_plus_sq_d_128 = _mm_add_epi32(a_00_sq_128[0], a_11_sq_128[0]);
-    __m128i sq_b_plus_sq_c_128 = _mm_add_epi32(a_01_sq_128[0], a_10_sq_128[0]);
-    num_fin_128[0] = _mm_add_epi32(sq_a_plus_sq_d_128, sq_b_plus_sq_c_128);
+    simde__m128i sq_a_plus_sq_d_128 = simde_mm_add_epi32(a_00_sq_128[0], a_11_sq_128[0]);
+    simde__m128i sq_b_plus_sq_c_128 = simde_mm_add_epi32(a_01_sq_128[0], a_10_sq_128[0]);
+    num_fin_128[0] = simde_mm_add_epi32(sq_a_plus_sq_d_128, sq_b_plus_sq_c_128);
     num_fin_128+=1;
     a_00_sq_128+=1;
     a_01_sq_128+=1;
     a_10_sq_128+=1;
     a_11_sq_128+=1;
   }
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 bool is_csi_rs_in_symbol(const fapi_nr_dl_config_csirs_pdu_rel15_t csirs_config_pdu, const int symbol) {
diff --git a/openair1/PHY/NR_UE_TRANSPORT/dci_nr.c b/openair1/PHY/NR_UE_TRANSPORT/dci_nr.c
index 65491228c40102dfbfd19308d2718e943b4163d6..331a08e1a93c1c352cc5b62f6dca529df2877ab7 100644
--- a/openair1/PHY/NR_UE_TRANSPORT/dci_nr.c
+++ b/openair1/PHY/NR_UE_TRANSPORT/dci_nr.c
@@ -279,30 +279,18 @@ void nr_pdcch_channel_level(int32_t rx_size,
                             uint8_t nb_rb) {
   int16_t rb;
   uint8_t aarx;
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *dl_ch128;
-  __m128i avg128P;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *dl_ch128;
-  int32x4_t *avg128P;
-#endif
+  simde__m128i *dl_ch128;
+  simde__m128i avg128P;
 
   for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
     //clear average level
-#if defined(__x86_64__) || defined(__i386__)
-    avg128P = _mm_setzero_si128();
-    dl_ch128=(__m128i *)&dl_ch_estimates_ext[aarx][symbol*nb_rb*12];
-#elif defined(__arm__) || defined(__aarch64__)
-    dl_ch128=(int16x8_t *)&dl_ch_estimates_ext[aarx][symbol*nb_rb*12];
-#endif
+    avg128P = simde_mm_setzero_si128();
+    dl_ch128=(simde__m128i *)&dl_ch_estimates_ext[aarx][symbol*nb_rb*12];
 
     for (rb=0; rb<(nb_rb*3)>>2; rb++) {
-#if defined(__x86_64__) || defined(__i386__)
-      avg128P = _mm_add_epi32(avg128P,_mm_madd_epi16(dl_ch128[0],dl_ch128[0]));
-      avg128P = _mm_add_epi32(avg128P,_mm_madd_epi16(dl_ch128[1],dl_ch128[1]));
-      avg128P = _mm_add_epi32(avg128P,_mm_madd_epi16(dl_ch128[2],dl_ch128[2]));
-#elif defined(__arm__) || defined(__aarch64__)
-#endif
+      avg128P = simde_mm_add_epi32(avg128P,simde_mm_madd_epi16(dl_ch128[0],dl_ch128[0]));
+      avg128P = simde_mm_add_epi32(avg128P,simde_mm_madd_epi16(dl_ch128[1],dl_ch128[1]));
+      avg128P = simde_mm_add_epi32(avg128P,simde_mm_madd_epi16(dl_ch128[2],dl_ch128[2]));
       //      for (int i=0;i<24;i+=2) printf("pdcch channel re %d (%d,%d)\n",(rb*12)+(i>>1),((int16_t*)dl_ch128)[i],((int16_t*)dl_ch128)[i+1]);
       dl_ch128+=3;
       /*
@@ -321,17 +309,11 @@ void nr_pdcch_channel_level(int32_t rx_size,
     LOG_DDD("Channel level : %d\n",avg[aarx]);
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
-#if defined(__x86_64) || defined(__i386__)
-  __m128i mmtmpPD0,mmtmpPD1,mmtmpPD2,mmtmpPD3;
-#elif defined(__arm__) || defined(__aarch64__)
-
-#endif
+  simde__m128i mmtmpPD0,mmtmpPD1,mmtmpPD2,mmtmpPD3;
 
 
 
@@ -524,81 +506,69 @@ void nr_pdcch_channel_compensation(int32_t rx_size, int32_t rxdataF_ext[][rx_siz
                                    uint32_t coreset_nbr_rb) {
   uint16_t rb; //,nb_rb=20;
   uint8_t aarx;
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i mmtmpP0,mmtmpP1,mmtmpP2,mmtmpP3;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t mmtmpP0,mmtmpP1,mmtmpP2,mmtmpP3;
-#endif
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *dl_ch128,*rxdataF128,*rxdataF_comp128;
-#elif defined(__arm__) || defined(__aarch64__)
-#endif
+  simde__m128i mmtmpP0,mmtmpP1,mmtmpP2,mmtmpP3;
+  simde__m128i *dl_ch128,*rxdataF128,*rxdataF_comp128;
 
   for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-#if defined(__x86_64__) || defined(__i386__)
-    dl_ch128          = (__m128i *)&dl_ch_estimates_ext[aarx][symbol*coreset_nbr_rb*12];
-    rxdataF128        = (__m128i *)&rxdataF_ext[aarx][symbol*coreset_nbr_rb*12];
-    rxdataF_comp128   = (__m128i *)&rxdataF_comp[aarx][symbol*coreset_nbr_rb*12];
+    dl_ch128          = (simde__m128i *)&dl_ch_estimates_ext[aarx][symbol*coreset_nbr_rb*12];
+    rxdataF128        = (simde__m128i *)&rxdataF_ext[aarx][symbol*coreset_nbr_rb*12];
+    rxdataF_comp128   = (simde__m128i *)&rxdataF_comp[aarx][symbol*coreset_nbr_rb*12];
     //printf("ch compensation dl_ch ext addr %p \n", &dl_ch_estimates_ext[(aatx<<1)+aarx][symbol*20*12]);
     //printf("rxdataf ext addr %p symbol %d\n", &rxdataF_ext[aarx][symbol*20*12], symbol);
     //printf("rxdataf_comp addr %p\n",&rxdataF_comp[(aatx<<1)+aarx][symbol*20*12]);
-#elif defined(__arm__) || defined(__aarch64__)
-    // to be filled in
-#endif
 
     for (rb=0; rb<(coreset_nbr_rb*3)>>2; rb++) {
-#if defined(__x86_64__) || defined(__i386__)
       // multiply by conjugated channel
-      mmtmpP0 = _mm_madd_epi16(dl_ch128[0],rxdataF128[0]);
+      mmtmpP0 = simde_mm_madd_epi16(dl_ch128[0],rxdataF128[0]);
       //print_ints("re",&mmtmpP0);
       // mmtmpP0 contains real part of 4 consecutive outputs (32-bit)
-      mmtmpP1 = _mm_shufflelo_epi16(dl_ch128[0],_MM_SHUFFLE(2,3,0,1));
-      mmtmpP1 = _mm_shufflehi_epi16(mmtmpP1,_MM_SHUFFLE(2,3,0,1));
-      mmtmpP1 = _mm_sign_epi16(mmtmpP1,*(__m128i *)&conjugate[0]);
+      mmtmpP1 = simde_mm_shufflelo_epi16(dl_ch128[0], SIMDE_MM_SHUFFLE(2,3,0,1));
+      mmtmpP1 = simde_mm_shufflehi_epi16(mmtmpP1, SIMDE_MM_SHUFFLE(2,3,0,1));
+      mmtmpP1 = simde_mm_sign_epi16(mmtmpP1,*(simde__m128i *)&conjugate[0]);
       //print_ints("im",&mmtmpP1);
-      mmtmpP1 = _mm_madd_epi16(mmtmpP1,rxdataF128[0]);
+      mmtmpP1 = simde_mm_madd_epi16(mmtmpP1,rxdataF128[0]);
       // mmtmpP1 contains imag part of 4 consecutive outputs (32-bit)
-      mmtmpP0 = _mm_srai_epi32(mmtmpP0,output_shift);
+      mmtmpP0 = simde_mm_srai_epi32(mmtmpP0,output_shift);
       //  print_ints("re(shift)",&mmtmpP0);
-      mmtmpP1 = _mm_srai_epi32(mmtmpP1,output_shift);
+      mmtmpP1 = simde_mm_srai_epi32(mmtmpP1,output_shift);
       //  print_ints("im(shift)",&mmtmpP1);
-      mmtmpP2 = _mm_unpacklo_epi32(mmtmpP0,mmtmpP1);
-      mmtmpP3 = _mm_unpackhi_epi32(mmtmpP0,mmtmpP1);
+      mmtmpP2 = simde_mm_unpacklo_epi32(mmtmpP0,mmtmpP1);
+      mmtmpP3 = simde_mm_unpackhi_epi32(mmtmpP0,mmtmpP1);
       //print_ints("c0",&mmtmpP2);
       //print_ints("c1",&mmtmpP3);
-      rxdataF_comp128[0] = _mm_packs_epi32(mmtmpP2,mmtmpP3);
+      rxdataF_comp128[0] = simde_mm_packs_epi32(mmtmpP2,mmtmpP3);
 //      print_shorts("rx:",(int16_t*)rxdataF128);
 //      print_shorts("ch:",(int16_t*)dl_ch128);
 //      print_shorts("pack:",(int16_t*)rxdataF_comp128);
       // multiply by conjugated channel
-      mmtmpP0 = _mm_madd_epi16(dl_ch128[1],rxdataF128[1]);
+      mmtmpP0 = simde_mm_madd_epi16(dl_ch128[1],rxdataF128[1]);
       // mmtmpP0 contains real part of 4 consecutive outputs (32-bit)
-      mmtmpP1 = _mm_shufflelo_epi16(dl_ch128[1],_MM_SHUFFLE(2,3,0,1));
-      mmtmpP1 = _mm_shufflehi_epi16(mmtmpP1,_MM_SHUFFLE(2,3,0,1));
-      mmtmpP1 = _mm_sign_epi16(mmtmpP1,*(__m128i *)&conjugate[0]);
-      mmtmpP1 = _mm_madd_epi16(mmtmpP1,rxdataF128[1]);
+      mmtmpP1 = simde_mm_shufflelo_epi16(dl_ch128[1], SIMDE_MM_SHUFFLE(2,3,0,1));
+      mmtmpP1 = simde_mm_shufflehi_epi16(mmtmpP1, SIMDE_MM_SHUFFLE(2,3,0,1));
+      mmtmpP1 = simde_mm_sign_epi16(mmtmpP1,*(simde__m128i *)&conjugate[0]);
+      mmtmpP1 = simde_mm_madd_epi16(mmtmpP1,rxdataF128[1]);
       // mmtmpP1 contains imag part of 4 consecutive outputs (32-bit)
-      mmtmpP0 = _mm_srai_epi32(mmtmpP0,output_shift);
-      mmtmpP1 = _mm_srai_epi32(mmtmpP1,output_shift);
-      mmtmpP2 = _mm_unpacklo_epi32(mmtmpP0,mmtmpP1);
-      mmtmpP3 = _mm_unpackhi_epi32(mmtmpP0,mmtmpP1);
-      rxdataF_comp128[1] = _mm_packs_epi32(mmtmpP2,mmtmpP3);
+      mmtmpP0 = simde_mm_srai_epi32(mmtmpP0,output_shift);
+      mmtmpP1 = simde_mm_srai_epi32(mmtmpP1,output_shift);
+      mmtmpP2 = simde_mm_unpacklo_epi32(mmtmpP0,mmtmpP1);
+      mmtmpP3 = simde_mm_unpackhi_epi32(mmtmpP0,mmtmpP1);
+      rxdataF_comp128[1] = simde_mm_packs_epi32(mmtmpP2,mmtmpP3);
       //print_shorts("rx:",rxdataF128+1);
       //print_shorts("ch:",dl_ch128+1);
       //print_shorts("pack:",rxdataF_comp128+1);
       // multiply by conjugated channel
-      mmtmpP0 = _mm_madd_epi16(dl_ch128[2],rxdataF128[2]);
+      mmtmpP0 = simde_mm_madd_epi16(dl_ch128[2],rxdataF128[2]);
       // mmtmpP0 contains real part of 4 consecutive outputs (32-bit)
-      mmtmpP1 = _mm_shufflelo_epi16(dl_ch128[2],_MM_SHUFFLE(2,3,0,1));
-      mmtmpP1 = _mm_shufflehi_epi16(mmtmpP1,_MM_SHUFFLE(2,3,0,1));
-      mmtmpP1 = _mm_sign_epi16(mmtmpP1,*(__m128i *)&conjugate[0]);
-      mmtmpP1 = _mm_madd_epi16(mmtmpP1,rxdataF128[2]);
+      mmtmpP1 = simde_mm_shufflelo_epi16(dl_ch128[2], SIMDE_MM_SHUFFLE(2,3,0,1));
+      mmtmpP1 = simde_mm_shufflehi_epi16(mmtmpP1, SIMDE_MM_SHUFFLE(2,3,0,1));
+      mmtmpP1 = simde_mm_sign_epi16(mmtmpP1,*(simde__m128i *)&conjugate[0]);
+      mmtmpP1 = simde_mm_madd_epi16(mmtmpP1,rxdataF128[2]);
       // mmtmpP1 contains imag part of 4 consecutive outputs (32-bit)
-      mmtmpP0 = _mm_srai_epi32(mmtmpP0,output_shift);
-      mmtmpP1 = _mm_srai_epi32(mmtmpP1,output_shift);
-      mmtmpP2 = _mm_unpacklo_epi32(mmtmpP0,mmtmpP1);
-      mmtmpP3 = _mm_unpackhi_epi32(mmtmpP0,mmtmpP1);
-      rxdataF_comp128[2] = _mm_packs_epi32(mmtmpP2,mmtmpP3);
+      mmtmpP0 = simde_mm_srai_epi32(mmtmpP0,output_shift);
+      mmtmpP1 = simde_mm_srai_epi32(mmtmpP1,output_shift);
+      mmtmpP2 = simde_mm_unpacklo_epi32(mmtmpP0,mmtmpP1);
+      mmtmpP3 = simde_mm_unpackhi_epi32(mmtmpP0,mmtmpP1);
+      rxdataF_comp128[2] = simde_mm_packs_epi32(mmtmpP2,mmtmpP3);
       ///////////////////////////////////////////////////////////////////////////////////////////////
       //print_shorts("rx:",rxdataF128+2);
       //print_shorts("ch:",dl_ch128+2);
@@ -613,16 +583,11 @@ void nr_pdcch_channel_compensation(int32_t rx_size, int32_t rxdataF_ext[][rx_siz
       dl_ch128+=3;
       rxdataF128+=3;
       rxdataF_comp128+=3;
-#elif defined(__arm__) || defined(__aarch64__)
-      // to be filled in
-#endif
     }
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 
@@ -630,36 +595,21 @@ void nr_pdcch_detection_mrc(NR_DL_FRAME_PARMS *frame_parms,
                          int32_t rx_size,
                          int32_t rxdataF_comp[][rx_size],
                          uint8_t symbol) {
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *rxdataF_comp128_0,*rxdataF_comp128_1;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *rxdataF_comp128_0,*rxdataF_comp128_1;
-#endif
+  simde__m128i *rxdataF_comp128_0,*rxdataF_comp128_1;
   int32_t i;
 
   if (frame_parms->nb_antennas_rx>1) {
-#if defined(__x86_64__) || defined(__i386__)
-    rxdataF_comp128_0   = (__m128i *)&rxdataF_comp[0][symbol*frame_parms->N_RB_DL*12];
-    rxdataF_comp128_1   = (__m128i *)&rxdataF_comp[1][symbol*frame_parms->N_RB_DL*12];
-#elif defined(__arm__) || defined(__aarch64__)
-    rxdataF_comp128_0   = (int16x8_t *)&rxdataF_comp[0][symbol*frame_parms->N_RB_DL*12];
-    rxdataF_comp128_1   = (int16x8_t *)&rxdataF_comp[1][symbol*frame_parms->N_RB_DL*12];
-#endif
+    rxdataF_comp128_0   = (simde__m128i *)&rxdataF_comp[0][symbol*frame_parms->N_RB_DL*12];
+    rxdataF_comp128_1   = (simde__m128i *)&rxdataF_comp[1][symbol*frame_parms->N_RB_DL*12];
 
     // MRC on each re of rb
     for (i=0; i<frame_parms->N_RB_DL*3; i++) {
-#if defined(__x86_64__) || defined(__i386__)
-      rxdataF_comp128_0[i] = _mm_adds_epi16(_mm_srai_epi16(rxdataF_comp128_0[i],1),_mm_srai_epi16(rxdataF_comp128_1[i],1));
-#elif defined(__arm__) || defined(__aarch64__)
-      rxdataF_comp128_0[i] = vhaddq_s16(rxdataF_comp128_0[i],rxdataF_comp128_1[i]);
-#endif
+      rxdataF_comp128_0[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(rxdataF_comp128_0[i],1),simde_mm_srai_epi16(rxdataF_comp128_1[i],1));
     }
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 int32_t nr_rx_pdcch(PHY_VARS_NR_UE *ue,
diff --git a/openair1/PHY/NR_UE_TRANSPORT/nr_dlsch_decoding.c b/openair1/PHY/NR_UE_TRANSPORT/nr_dlsch_decoding.c
index 8c11e103e54cd77678fa43349c46e0ff9ca3cba5..b40f68b780637eca42f8d268e7167055c4350dc1 100644
--- a/openair1/PHY/NR_UE_TRANSPORT/nr_dlsch_decoding.c
+++ b/openair1/PHY/NR_UE_TRANSPORT/nr_dlsch_decoding.c
@@ -265,10 +265,10 @@ static void nr_processDLSegment(void *arg)
     memcpy((z+Kr),harq_process->d[r]+(Kr-2*harq_process->Z),(kc*harq_process->Z-Kr)*sizeof(int16_t));
 
     //Saturate coded bits before decoding into 8 bits values
-    __m128i *pv = (__m128i*)&z;
-    __m128i *pl = (__m128i*)&l;
-    for (int i=0, j=0; j < ((kc*harq_process->Z)>>4)+1;  i+=2, j++) {
-      pl[j] = _mm_packs_epi16(pv[i],pv[i+1]);
+    simde__m128i *pv = (simde__m128i*)&z;
+    simde__m128i *pl = (simde__m128i*)&l;
+    for (i=0, j=0; j < ((kc*harq_process->Z)>>4)+1;  i+=2, j++) {
+      pl[j] = simde_mm_packs_epi16(pv[i],pv[i+1]);
     }
 
     //VCD_SIGNAL_DUMPER_DUMP_FUNCTION_BY_NAME(VCD_SIGNAL_DUMPER_FUNCTIONS_DLSCH_LDPC, VCD_FUNCTION_IN);
diff --git a/openair1/PHY/NR_UE_TRANSPORT/nr_dlsch_demodulation.c b/openair1/PHY/NR_UE_TRANSPORT/nr_dlsch_demodulation.c
index 08e641d127fcad55c53b1b7f3fca181496e5f271..300d6df911fbfc33c9e3b8926044e7c7c0c5e544 100644
--- a/openair1/PHY/NR_UE_TRANSPORT/nr_dlsch_demodulation.c
+++ b/openair1/PHY/NR_UE_TRANSPORT/nr_dlsch_demodulation.c
@@ -819,119 +819,116 @@ void nr_dlsch_channel_compensation(uint32_t rx_size_symbol,
                                    PHY_NR_MEASUREMENTS *measurements)
 {
 
-#if defined(__i386) || defined(__x86_64)
 
   unsigned short rb;
   unsigned char aarx,atx;
-  __m128i *dl_ch128,*dl_ch128_2,*dl_ch_mag128,*dl_ch_mag128b,*dl_ch_mag128r,*rxdataF128,*rxdataF_comp128,*rho128;
-  __m128i mmtmpD0,mmtmpD1,mmtmpD2,mmtmpD3,QAM_amp128={0},QAM_amp128b={0},QAM_amp128r={0};
+  simde__m128i *dl_ch128,*dl_ch128_2,*dl_ch_mag128,*dl_ch_mag128b,*dl_ch_mag128r,*rxdataF128,*rxdataF_comp128,*rho128;
+  simde__m128i mmtmpD0,mmtmpD1,mmtmpD2,mmtmpD3,QAM_amp128={0},QAM_amp128b={0},QAM_amp128r={0};
 
   uint32_t nb_rb_0 = length / 12 + ((length % 12) ? 1 : 0);
   for (int l = 0; l < n_layers; l++) {
     if (mod_order == 4) {
-      QAM_amp128 = _mm_set1_epi16(QAM16_n1);  // 2/sqrt(10)
-      QAM_amp128b = _mm_setzero_si128();
-      QAM_amp128r = _mm_setzero_si128();
+      QAM_amp128 = simde_mm_set1_epi16(QAM16_n1);  // 2/sqrt(10)
+      QAM_amp128b = simde_mm_setzero_si128();
+      QAM_amp128r = simde_mm_setzero_si128();
     } else if (mod_order == 6) {
-      QAM_amp128  = _mm_set1_epi16(QAM64_n1); //
-      QAM_amp128b = _mm_set1_epi16(QAM64_n2);
-      QAM_amp128r = _mm_setzero_si128();
+      QAM_amp128  = simde_mm_set1_epi16(QAM64_n1); //
+      QAM_amp128b = simde_mm_set1_epi16(QAM64_n2);
+      QAM_amp128r = simde_mm_setzero_si128();
     } else if (mod_order == 8) {
-      QAM_amp128 = _mm_set1_epi16(QAM256_n1);
-      QAM_amp128b = _mm_set1_epi16(QAM256_n2);
-      QAM_amp128r = _mm_set1_epi16(QAM256_n3);
+      QAM_amp128 = simde_mm_set1_epi16(QAM256_n1);
+      QAM_amp128b = simde_mm_set1_epi16(QAM256_n2);
+      QAM_amp128r = simde_mm_set1_epi16(QAM256_n3);
     }
 
     //    printf("comp: rxdataF_comp %p, symbol %d\n",rxdataF_comp[0],symbol);
 
     for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
 
-      dl_ch128          = (__m128i *)dl_ch_estimates_ext[(l * frame_parms->nb_antennas_rx) + aarx];
-      dl_ch_mag128 = (__m128i *)dl_ch_mag[l][aarx];
-      dl_ch_mag128b = (__m128i *)dl_ch_magb[l][aarx];
-      dl_ch_mag128r = (__m128i *)dl_ch_magr[l][aarx];
-      rxdataF128        = (__m128i *)rxdataF_ext[aarx];
-      rxdataF_comp128 = (__m128i *)(rxdataF_comp[l][aarx] + symbol * nb_rb * 12);
+      dl_ch128          = (simde__m128i *)dl_ch_estimates_ext[(l * frame_parms->nb_antennas_rx) + aarx];
+      dl_ch_mag128 = (simde__m128i *)dl_ch_mag[l][aarx];
+      dl_ch_mag128b = (simde__m128i *)dl_ch_magb[l][aarx];
+      dl_ch_mag128r = (simde__m128i *)dl_ch_magr[l][aarx];
+      rxdataF128        = (simde__m128i *)rxdataF_ext[aarx];
+      rxdataF_comp128 = (simde__m128i *)(rxdataF_comp[l][aarx] + symbol * nb_rb * 12);
 
       for (rb=0; rb<nb_rb_0; rb++) {
         if (mod_order>2) {
           // get channel amplitude if not QPSK
 
-          mmtmpD0 = _mm_madd_epi16(dl_ch128[0],dl_ch128[0]);
-          mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-
-          mmtmpD1 = _mm_madd_epi16(dl_ch128[1],dl_ch128[1]);
-          mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
-
-          mmtmpD0 = _mm_packs_epi32(mmtmpD0,mmtmpD1); //|H[0]|^2 |H[1]|^2 |H[2]|^2 |H[3]|^2 |H[4]|^2 |H[5]|^2 |H[6]|^2 |H[7]|^2
+          mmtmpD0 = simde_mm_madd_epi16(dl_ch128[0],dl_ch128[0]);
+          mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
+          mmtmpD1 = simde_mm_madd_epi16(dl_ch128[1],dl_ch128[1]);
+          mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
+          mmtmpD0 = simde_mm_packs_epi32(mmtmpD0,mmtmpD1); //|H[0]|^2 |H[1]|^2 |H[2]|^2 |H[3]|^2 |H[4]|^2 |H[5]|^2 |H[6]|^2 |H[7]|^2
 
           // store channel magnitude here in a new field of dlsch
 
-          dl_ch_mag128[0] = _mm_unpacklo_epi16(mmtmpD0,mmtmpD0);
+          dl_ch_mag128[0] = simde_mm_unpacklo_epi16(mmtmpD0,mmtmpD0);
           dl_ch_mag128b[0] = dl_ch_mag128[0];
           dl_ch_mag128r[0] = dl_ch_mag128[0];
-          dl_ch_mag128[0] = _mm_mulhi_epi16(dl_ch_mag128[0],QAM_amp128);
-          dl_ch_mag128[0] = _mm_slli_epi16(dl_ch_mag128[0],1);
+          dl_ch_mag128[0] = simde_mm_mulhi_epi16(dl_ch_mag128[0],QAM_amp128);
+          dl_ch_mag128[0] = simde_mm_slli_epi16(dl_ch_mag128[0],1);
 
-          dl_ch_mag128b[0] = _mm_mulhi_epi16(dl_ch_mag128b[0],QAM_amp128b);
-          dl_ch_mag128b[0] = _mm_slli_epi16(dl_ch_mag128b[0],1);
+          dl_ch_mag128b[0] = simde_mm_mulhi_epi16(dl_ch_mag128b[0],QAM_amp128b);
+          dl_ch_mag128b[0] = simde_mm_slli_epi16(dl_ch_mag128b[0],1);
 
-          dl_ch_mag128r[0] = _mm_mulhi_epi16(dl_ch_mag128r[0],QAM_amp128r);
-          dl_ch_mag128r[0] = _mm_slli_epi16(dl_ch_mag128r[0],1);
+          dl_ch_mag128r[0] = simde_mm_mulhi_epi16(dl_ch_mag128r[0],QAM_amp128r);
+          dl_ch_mag128r[0] = simde_mm_slli_epi16(dl_ch_mag128r[0],1);
 
     //print_ints("Re(ch):",(int16_t*)&mmtmpD0);
     //print_shorts("QAM_amp:",(int16_t*)&QAM_amp128);
     //print_shorts("mag:",(int16_t*)&dl_ch_mag128[0]);
-          dl_ch_mag128[1] = _mm_unpackhi_epi16(mmtmpD0,mmtmpD0);
+          dl_ch_mag128[1] = simde_mm_unpackhi_epi16(mmtmpD0,mmtmpD0);
           dl_ch_mag128b[1] = dl_ch_mag128[1];
           dl_ch_mag128r[1] = dl_ch_mag128[1];
-          dl_ch_mag128[1] = _mm_mulhi_epi16(dl_ch_mag128[1],QAM_amp128);
-          dl_ch_mag128[1] = _mm_slli_epi16(dl_ch_mag128[1],1);
+          dl_ch_mag128[1] = simde_mm_mulhi_epi16(dl_ch_mag128[1],QAM_amp128);
+          dl_ch_mag128[1] = simde_mm_slli_epi16(dl_ch_mag128[1],1);
 
-          dl_ch_mag128b[1] = _mm_mulhi_epi16(dl_ch_mag128b[1],QAM_amp128b);
-          dl_ch_mag128b[1] = _mm_slli_epi16(dl_ch_mag128b[1],1);
+          dl_ch_mag128b[1] = simde_mm_mulhi_epi16(dl_ch_mag128b[1],QAM_amp128b);
+          dl_ch_mag128b[1] = simde_mm_slli_epi16(dl_ch_mag128b[1],1);
 
-          dl_ch_mag128r[1] = _mm_mulhi_epi16(dl_ch_mag128r[1],QAM_amp128r);
-          dl_ch_mag128r[1] = _mm_slli_epi16(dl_ch_mag128r[1],1);
+          dl_ch_mag128r[1] = simde_mm_mulhi_epi16(dl_ch_mag128r[1],QAM_amp128r);
+          dl_ch_mag128r[1] = simde_mm_slli_epi16(dl_ch_mag128r[1],1);
 
-          mmtmpD0 = _mm_madd_epi16(dl_ch128[2],dl_ch128[2]);//[H_I(0)^2+H_Q(0)^2 H_I(1)^2+H_Q(1)^2 H_I(2)^2+H_Q(2)^2 H_I(3)^2+H_Q(3)^2]
-          mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-          mmtmpD1 = _mm_packs_epi32(mmtmpD0,mmtmpD0);//[|H(0)|^2 |H(1)|^2 |H(2)|^2 |H(3)|^2 |H(0)|^2 |H(1)|^2 |H(2)|^2 |H(3)|^2]
+          mmtmpD0 = simde_mm_madd_epi16(dl_ch128[2],dl_ch128[2]);//[H_I(0)^2+H_Q(0)^2 H_I(1)^2+H_Q(1)^2 H_I(2)^2+H_Q(2)^2 H_I(3)^2+H_Q(3)^2]
+          mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
+          mmtmpD1 = simde_mm_packs_epi32(mmtmpD0,mmtmpD0);//[|H(0)|^2 |H(1)|^2 |H(2)|^2 |H(3)|^2 |H(0)|^2 |H(1)|^2 |H(2)|^2 |H(3)|^2]
 
-          dl_ch_mag128[2] = _mm_unpacklo_epi16(mmtmpD1,mmtmpD1);//[|H(0)|^2 |H(0)|^2 |H(1)|^2 |H(1)|^2 |H(2)|^2 |H(2)|^2 |H(3)|^2 |H(3)|^2]
+          dl_ch_mag128[2] = simde_mm_unpacklo_epi16(mmtmpD1,mmtmpD1);//[|H(0)|^2 |H(0)|^2 |H(1)|^2 |H(1)|^2 |H(2)|^2 |H(2)|^2 |H(3)|^2 |H(3)|^2]
           dl_ch_mag128b[2] = dl_ch_mag128[2];
           dl_ch_mag128r[2] = dl_ch_mag128[2];
 
-          dl_ch_mag128[2] = _mm_mulhi_epi16(dl_ch_mag128[2],QAM_amp128);
-          dl_ch_mag128[2] = _mm_slli_epi16(dl_ch_mag128[2],1);
+          dl_ch_mag128[2] = simde_mm_mulhi_epi16(dl_ch_mag128[2],QAM_amp128);
+          dl_ch_mag128[2] = simde_mm_slli_epi16(dl_ch_mag128[2],1);
 
-          dl_ch_mag128b[2] = _mm_mulhi_epi16(dl_ch_mag128b[2],QAM_amp128b);
-          dl_ch_mag128b[2] = _mm_slli_epi16(dl_ch_mag128b[2],1);
+          dl_ch_mag128b[2] = simde_mm_mulhi_epi16(dl_ch_mag128b[2],QAM_amp128b);
+          dl_ch_mag128b[2] = simde_mm_slli_epi16(dl_ch_mag128b[2],1);
 
-          dl_ch_mag128r[2] = _mm_mulhi_epi16(dl_ch_mag128r[2],QAM_amp128r);
-          dl_ch_mag128r[2] = _mm_slli_epi16(dl_ch_mag128r[2],1);
+          dl_ch_mag128r[2] = simde_mm_mulhi_epi16(dl_ch_mag128r[2],QAM_amp128r);
+          dl_ch_mag128r[2] = simde_mm_slli_epi16(dl_ch_mag128r[2],1);
         }
 
         // multiply by conjugated channel
-        mmtmpD0 = _mm_madd_epi16(dl_ch128[0],rxdataF128[0]);
+        mmtmpD0 = simde_mm_madd_epi16(dl_ch128[0],rxdataF128[0]);
         //  print_ints("re",&mmtmpD0);
 
         // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-        mmtmpD1 = _mm_shufflelo_epi16(dl_ch128[0],_MM_SHUFFLE(2,3,0,1));
-        mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-        mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i*)&conjugate[0]);
+        mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch128[0],SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1,SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i*)&conjugate[0]);
         //  print_ints("im",&mmtmpD1);
-        mmtmpD1 = _mm_madd_epi16(mmtmpD1,rxdataF128[0]);
+        mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,rxdataF128[0]);
         // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-        mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
+        mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
         //  print_ints("re(shift)",&mmtmpD0);
-        mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
+        mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
         //  print_ints("im(shift)",&mmtmpD1);
-        mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-        mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+        mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+        mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
         //        print_ints("c0",&mmtmpD2);
         //  print_ints("c1",&mmtmpD3);
-        rxdataF_comp128[0] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+        rxdataF_comp128[0] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
 
 #ifdef DEBUG_DLSCH_DEMOD
         printf("%%arx%d atx%d rb_index %d symbol %d shift %d\n",aarx,l,rb,symbol,output_shift);
@@ -944,19 +941,19 @@ void nr_dlsch_channel_compensation(uint32_t rx_size_symbol,
 #endif
 
         // multiply by conjugated channel
-        mmtmpD0 = _mm_madd_epi16(dl_ch128[1],rxdataF128[1]);
+        mmtmpD0 = simde_mm_madd_epi16(dl_ch128[1],rxdataF128[1]);
         // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-        mmtmpD1 = _mm_shufflelo_epi16(dl_ch128[1],_MM_SHUFFLE(2,3,0,1));
-        mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-        mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i*)conjugate);
-        mmtmpD1 = _mm_madd_epi16(mmtmpD1,rxdataF128[1]);
+        mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch128[1],SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1,SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i*)conjugate);
+        mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,rxdataF128[1]);
         // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-        mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-        mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
-        mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-        mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+        mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
+        mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
+        mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+        mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
 
-        rxdataF_comp128[1] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+        rxdataF_comp128[1] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
 #ifdef DEBUG_DLSCH_DEMOD
         print_shorts("rx:",(int16_t*)&rxdataF128[1]);
         print_shorts("ch:",(int16_t*)&dl_ch128[1]);
@@ -964,19 +961,19 @@ void nr_dlsch_channel_compensation(uint32_t rx_size_symbol,
 #endif
 
         // multiply by conjugated channel
-        mmtmpD0 = _mm_madd_epi16(dl_ch128[2],rxdataF128[2]);
+        mmtmpD0 = simde_mm_madd_epi16(dl_ch128[2],rxdataF128[2]);
         // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-        mmtmpD1 = _mm_shufflelo_epi16(dl_ch128[2],_MM_SHUFFLE(2,3,0,1));
-        mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-        mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i*)conjugate);
-        mmtmpD1 = _mm_madd_epi16(mmtmpD1,rxdataF128[2]);
+        mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch128[2],SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1,SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i*)conjugate);
+        mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,rxdataF128[2]);
         // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-        mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-        mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
-        mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-        mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+        mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
+        mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
+        mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+        mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
 
-        rxdataF_comp128[2] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+        rxdataF_comp128[2] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
 #ifdef DEBUG_DLSCH_DEMOD
         print_shorts("rx:",(int16_t*)&rxdataF128[2]);
         print_shorts("ch:",(int16_t*)&dl_ch128[2]);
@@ -1008,30 +1005,30 @@ void nr_dlsch_channel_compensation(uint32_t rx_size_symbol,
         for (atx = 0; atx < n_layers; atx++) {
           //avg_rho_re[aarx][l*n_layers+atx] = 0;
           //avg_rho_im[aarx][l*n_layers+atx] = 0;
-          rho128        = (__m128i *)&rho[aarx][l * n_layers + atx][symbol * nb_rb * 12];
-          dl_ch128      = (__m128i *)dl_ch_estimates_ext[l * frame_parms->nb_antennas_rx + aarx];
-          dl_ch128_2    = (__m128i *)dl_ch_estimates_ext[atx * frame_parms->nb_antennas_rx + aarx];
+          rho128        = (simde__m128i *)&rho[aarx][l * n_layers + atx][symbol * nb_rb * 12];
+          dl_ch128      = (simde__m128i *)dl_ch_estimates_ext[l * frame_parms->nb_antennas_rx + aarx];
+          dl_ch128_2    = (simde__m128i *)dl_ch_estimates_ext[atx * frame_parms->nb_antennas_rx + aarx];
 
           for (rb=0; rb<nb_rb_0; rb++) {
             // multiply by conjugated channel
-            mmtmpD0 = _mm_madd_epi16(dl_ch128[0],dl_ch128_2[0]);
+            mmtmpD0 = simde_mm_madd_epi16(dl_ch128[0],dl_ch128_2[0]);
             //  print_ints("re",&mmtmpD0);
             // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-            mmtmpD1 = _mm_shufflelo_epi16(dl_ch128[0],_MM_SHUFFLE(2,3,0,1));
-            mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-            mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i*)&conjugate[0]);
+            mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch128[0],SIMDE_MM_SHUFFLE(2,3,0,1));
+            mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1,SIMDE_MM_SHUFFLE(2,3,0,1));
+            mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i*)&conjugate[0]);
             //  print_ints("im",&mmtmpD1);
-            mmtmpD1 = _mm_madd_epi16(mmtmpD1,dl_ch128_2[0]);
+            mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,dl_ch128_2[0]);
             // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-            mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
+            mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
             //  print_ints("re(shift)",&mmtmpD0);
-            mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
+            mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
             //  print_ints("im(shift)",&mmtmpD1);
-            mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-            mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+            mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+            mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
             //        print_ints("c0",&mmtmpD2);
             //  print_ints("c1",&mmtmpD3);
-            rho128[0] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+            rho128[0] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
             //print_shorts("rx:",dl_ch128_2);
             //print_shorts("ch:",dl_ch128);
             //print_shorts("pack:",rho128);
@@ -1046,18 +1043,18 @@ void nr_dlsch_channel_compensation(uint32_t rx_size_symbol,
               ((int16_t*)&rho128[0])[7])/16;*/
 
             // multiply by conjugated channel
-            mmtmpD0 = _mm_madd_epi16(dl_ch128[1],dl_ch128_2[1]);
+            mmtmpD0 = simde_mm_madd_epi16(dl_ch128[1],dl_ch128_2[1]);
             // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-            mmtmpD1 = _mm_shufflelo_epi16(dl_ch128[1],_MM_SHUFFLE(2,3,0,1));
-            mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-            mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i*)conjugate);
-            mmtmpD1 = _mm_madd_epi16(mmtmpD1,dl_ch128_2[1]);
+            mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch128[1],SIMDE_MM_SHUFFLE(2,3,0,1));
+            mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1,SIMDE_MM_SHUFFLE(2,3,0,1));
+            mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i*)conjugate);
+            mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,dl_ch128_2[1]);
             // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-            mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-            mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
-            mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-            mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
-            rho128[1] =_mm_packs_epi32(mmtmpD2,mmtmpD3);
+            mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
+            mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
+            mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+            mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+            rho128[1] =simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
             //print_shorts("rx:",dl_ch128_2+1);
             //print_shorts("ch:",dl_ch128+1);
             //print_shorts("pack:",rho128+1);
@@ -1072,19 +1069,19 @@ void nr_dlsch_channel_compensation(uint32_t rx_size_symbol,
               ((int16_t*)&rho128[1])[5] +
               ((int16_t*)&rho128[1])[7])/16;*/
 
-            mmtmpD0 = _mm_madd_epi16(dl_ch128[2],dl_ch128_2[2]);
+            mmtmpD0 = simde_mm_madd_epi16(dl_ch128[2],dl_ch128_2[2]);
             // mmtmpD0 contains real part of 4 consecutive outputs (32-bit)
-            mmtmpD1 = _mm_shufflelo_epi16(dl_ch128[2],_MM_SHUFFLE(2,3,0,1));
-            mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-            mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i*)conjugate);
-            mmtmpD1 = _mm_madd_epi16(mmtmpD1,dl_ch128_2[2]);
+            mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch128[2],SIMDE_MM_SHUFFLE(2,3,0,1));
+            mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1,SIMDE_MM_SHUFFLE(2,3,0,1));
+            mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i*)conjugate);
+            mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,dl_ch128_2[2]);
             // mmtmpD1 contains imag part of 4 consecutive outputs (32-bit)
-            mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift);
-            mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift);
-            mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-            mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+            mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift);
+            mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift);
+            mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+            mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
 
-            rho128[2] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+            rho128[2] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
             //print_shorts("rx:",dl_ch128_2+2);
             //print_shorts("ch:",dl_ch128+2);
             //print_shorts("pack:",rho128+2);
@@ -1115,190 +1112,6 @@ void nr_dlsch_channel_compensation(uint32_t rx_size_symbol,
       }
     }
   }
-  _mm_empty();
-  _m_empty();
-
-#elif defined(__arm__) || defined(__aarch64__)
-
-  unsigned short rb;
-
-  int16x4_t *dl_ch128,*dl_ch128_2,*rxdataF128;
-  int32x4_t mmtmpD0,mmtmpD1,mmtmpD0b,mmtmpD1b;
-  int16x8_t *dl_ch_mag128,*dl_ch_mag128b,mmtmpD2,mmtmpD3,mmtmpD4;
-  int16x8_t QAM_amp128,QAM_amp128b;
-  int16x4x2_t *rxdataF_comp128,*rho128;
-
-  int16_t conj[4]__attribute__((aligned(16))) = {1,-1,1,-1};
-  int32x4_t output_shift128 = vmovq_n_s32(-(int32_t)output_shift);
-
-  unsigned char symbol_mod = (symbol>=(7-frame_parms->Ncp)) ? symbol-(7-frame_parms->Ncp) : symbol;
-
-  uint32_t nb_rb_0 = length/12 + ((length%12)?1:0);
-
-  for (int l = 0; l < n_layers; l++) {
-    if (mod_order == 4) {
-      QAM_amp128  = vmovq_n_s16(QAM16_n1);  // 2/sqrt(10)
-      QAM_amp128b = vmovq_n_s16(0);
-    } else if (mod_order == 6) {
-      QAM_amp128  = vmovq_n_s16(QAM64_n1); //
-      QAM_amp128b = vmovq_n_s16(QAM64_n2);
-    }
-    //    printf("comp: rxdataF_comp %p, symbol %d\n",rxdataF_comp[0],symbol);
-
-    for (int aarx = 0; aarx < frame_parms->nb_antennas_rx; aarx++) {
-      dl_ch128          = (int16x4_t*)dl_ch_estimates_ext[(l << 1) + aarx];
-      dl_ch_mag128 = (int16x8_t *)dl_ch_mag[l][aarx];
-      dl_ch_mag128b = (int16x8_t *)dl_ch_magb[l][aarx];
-      rxdataF128 = (int16x4_t *)rxdataF_ext[aarx];
-      rxdataF_comp128 = (int16x4x2_t *)(rxdataF_comp[l][aarx] + symbol * nb_rb * 12);
-
-      for (rb=0; rb<nb_rb_0; rb++) {
-  if (mod_order>2) {
-    // get channel amplitude if not QPSK
-    mmtmpD0 = vmull_s16(dl_ch128[0], dl_ch128[0]);
-    // mmtmpD0 = [ch0*ch0,ch1*ch1,ch2*ch2,ch3*ch3];
-    mmtmpD0 = vqshlq_s32(vqaddq_s32(mmtmpD0,vrev64q_s32(mmtmpD0)),output_shift128);
-    // mmtmpD0 = [ch0*ch0 + ch1*ch1,ch0*ch0 + ch1*ch1,ch2*ch2 + ch3*ch3,ch2*ch2 + ch3*ch3]>>output_shift128 on 32-bits
-    mmtmpD1 = vmull_s16(dl_ch128[1], dl_ch128[1]);
-    mmtmpD1 = vqshlq_s32(vqaddq_s32(mmtmpD1,vrev64q_s32(mmtmpD1)),output_shift128);
-    mmtmpD2 = vcombine_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-    // mmtmpD2 = [ch0*ch0 + ch1*ch1,ch0*ch0 + ch1*ch1,ch2*ch2 + ch3*ch3,ch2*ch2 + ch3*ch3,ch4*ch4 + ch5*ch5,ch4*ch4 + ch5*ch5,ch6*ch6 + ch7*ch7,ch6*ch6 + ch7*ch7]>>output_shift128 on 16-bits
-    mmtmpD0 = vmull_s16(dl_ch128[2], dl_ch128[2]);
-    mmtmpD0 = vqshlq_s32(vqaddq_s32(mmtmpD0,vrev64q_s32(mmtmpD0)),output_shift128);
-    mmtmpD1 = vmull_s16(dl_ch128[3], dl_ch128[3]);
-    mmtmpD1 = vqshlq_s32(vqaddq_s32(mmtmpD1,vrev64q_s32(mmtmpD1)),output_shift128);
-    mmtmpD3 = vcombine_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-
-    mmtmpD0 = vmull_s16(dl_ch128[4], dl_ch128[4]);
-    mmtmpD0 = vqshlq_s32(vqaddq_s32(mmtmpD0,vrev64q_s32(mmtmpD0)),output_shift128);
-    mmtmpD1 = vmull_s16(dl_ch128[5], dl_ch128[5]);
-    mmtmpD1 = vqshlq_s32(vqaddq_s32(mmtmpD1,vrev64q_s32(mmtmpD1)),output_shift128);
-    mmtmpD4 = vcombine_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-
-    dl_ch_mag128b[0] = vqdmulhq_s16(mmtmpD2,QAM_amp128b);
-    dl_ch_mag128b[1] = vqdmulhq_s16(mmtmpD3,QAM_amp128b);
-    dl_ch_mag128[0] = vqdmulhq_s16(mmtmpD2,QAM_amp128);
-    dl_ch_mag128[1] = vqdmulhq_s16(mmtmpD3,QAM_amp128);
-
-    dl_ch_mag128b[2] = vqdmulhq_s16(mmtmpD4,QAM_amp128b);
-    dl_ch_mag128[2]  = vqdmulhq_s16(mmtmpD4,QAM_amp128);
-  }
-
-  mmtmpD0 = vmull_s16(dl_ch128[0], rxdataF128[0]);
-  //mmtmpD0 = [Re(ch[0])Re(rx[0]) Im(ch[0])Im(ch[0]) Re(ch[1])Re(rx[1]) Im(ch[1])Im(ch[1])]
-  mmtmpD1 = vmull_s16(dl_ch128[1], rxdataF128[1]);
-  //mmtmpD1 = [Re(ch[2])Re(rx[2]) Im(ch[2])Im(ch[2]) Re(ch[3])Re(rx[3]) Im(ch[3])Im(ch[3])]
-  mmtmpD0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0),vget_high_s32(mmtmpD0)),
-             vpadd_s32(vget_low_s32(mmtmpD1),vget_high_s32(mmtmpD1)));
-  //mmtmpD0 = [Re(ch[0])Re(rx[0])+Im(ch[0])Im(ch[0]) Re(ch[1])Re(rx[1])+Im(ch[1])Im(ch[1]) Re(ch[2])Re(rx[2])+Im(ch[2])Im(ch[2]) Re(ch[3])Re(rx[3])+Im(ch[3])Im(ch[3])]
-
-  mmtmpD0b = vmull_s16(vrev32_s16(vmul_s16(dl_ch128[0],*(int16x4_t*)conj)), rxdataF128[0]);
-  //mmtmpD0 = [-Im(ch[0])Re(rx[0]) Re(ch[0])Im(rx[0]) -Im(ch[1])Re(rx[1]) Re(ch[1])Im(rx[1])]
-  mmtmpD1b = vmull_s16(vrev32_s16(vmul_s16(dl_ch128[1],*(int16x4_t*)conj)), rxdataF128[1]);
-  //mmtmpD0 = [-Im(ch[2])Re(rx[2]) Re(ch[2])Im(rx[2]) -Im(ch[3])Re(rx[3]) Re(ch[3])Im(rx[3])]
-  mmtmpD1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0b),vget_high_s32(mmtmpD0b)),
-             vpadd_s32(vget_low_s32(mmtmpD1b),vget_high_s32(mmtmpD1b)));
-  //mmtmpD1 = [-Im(ch[0])Re(rx[0])+Re(ch[0])Im(rx[0]) -Im(ch[1])Re(rx[1])+Re(ch[1])Im(rx[1]) -Im(ch[2])Re(rx[2])+Re(ch[2])Im(rx[2]) -Im(ch[3])Re(rx[3])+Re(ch[3])Im(rx[3])]
-
-  mmtmpD0 = vqshlq_s32(mmtmpD0,output_shift128);
-  mmtmpD1 = vqshlq_s32(mmtmpD1,output_shift128);
-  rxdataF_comp128[0] = vzip_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-  mmtmpD0 = vmull_s16(dl_ch128[2], rxdataF128[2]);
-  mmtmpD1 = vmull_s16(dl_ch128[3], rxdataF128[3]);
-  mmtmpD0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0),vget_high_s32(mmtmpD0)),
-             vpadd_s32(vget_low_s32(mmtmpD1),vget_high_s32(mmtmpD1)));
-  mmtmpD0b = vmull_s16(vrev32_s16(vmul_s16(dl_ch128[2],*(int16x4_t*)conj)), rxdataF128[2]);
-  mmtmpD1b = vmull_s16(vrev32_s16(vmul_s16(dl_ch128[3],*(int16x4_t*)conj)), rxdataF128[3]);
-  mmtmpD1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0b),vget_high_s32(mmtmpD0b)),
-             vpadd_s32(vget_low_s32(mmtmpD1b),vget_high_s32(mmtmpD1b)));
-  mmtmpD0 = vqshlq_s32(mmtmpD0,output_shift128);
-  mmtmpD1 = vqshlq_s32(mmtmpD1,output_shift128);
-  rxdataF_comp128[1] = vzip_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-
-
-  mmtmpD0 = vmull_s16(dl_ch128[4], rxdataF128[4]);
-  mmtmpD1 = vmull_s16(dl_ch128[5], rxdataF128[5]);
-  mmtmpD0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0),vget_high_s32(mmtmpD0)),
-                         vpadd_s32(vget_low_s32(mmtmpD1),vget_high_s32(mmtmpD1)));
-
-  mmtmpD0b = vmull_s16(vrev32_s16(vmul_s16(dl_ch128[4],*(int16x4_t*)conj)), rxdataF128[4]);
-  mmtmpD1b = vmull_s16(vrev32_s16(vmul_s16(dl_ch128[5],*(int16x4_t*)conj)), rxdataF128[5]);
-  mmtmpD1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0b),vget_high_s32(mmtmpD0b)),
-                         vpadd_s32(vget_low_s32(mmtmpD1b),vget_high_s32(mmtmpD1b)));
-
-
-  mmtmpD0 = vqshlq_s32(mmtmpD0,output_shift128);
-  mmtmpD1 = vqshlq_s32(mmtmpD1,output_shift128);
-  rxdataF_comp128[2] = vzip_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-
-
-  dl_ch128+=6;
-  dl_ch_mag128+=3;
-  dl_ch_mag128b+=3;
-  rxdataF128+=6;
-  rxdataF_comp128+=3;
-
-      }
-    }
-  }
-
-  if (rho) {
-    for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-      rho128        = (int16x4x2_t*)&rho[aarx][symbol*frame_parms->N_RB_DL*12];
-      dl_ch128      = (int16x4_t*)dl_ch_estimates_ext[aarx];
-      dl_ch128_2    = (int16x4_t*)dl_ch_estimates_ext[2+aarx];
-      for (rb=0; rb<nb_rb_0; rb++) {
-  mmtmpD0 = vmull_s16(dl_ch128[0], dl_ch128_2[0]);
-  mmtmpD1 = vmull_s16(dl_ch128[1], dl_ch128_2[1]);
-  mmtmpD0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0),vget_high_s32(mmtmpD0)),
-             vpadd_s32(vget_low_s32(mmtmpD1),vget_high_s32(mmtmpD1)));
-  mmtmpD0b = vmull_s16(vrev32_s16(vmul_s16(dl_ch128[0],*(int16x4_t*)conj)), dl_ch128_2[0]);
-  mmtmpD1b = vmull_s16(vrev32_s16(vmul_s16(dl_ch128[1],*(int16x4_t*)conj)), dl_ch128_2[1]);
-  mmtmpD1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0b),vget_high_s32(mmtmpD0b)),
-             vpadd_s32(vget_low_s32(mmtmpD1b),vget_high_s32(mmtmpD1b)));
-
-  mmtmpD0 = vqshlq_s32(mmtmpD0,output_shift128);
-  mmtmpD1 = vqshlq_s32(mmtmpD1,output_shift128);
-  rho128[0] = vzip_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-
-  mmtmpD0 = vmull_s16(dl_ch128[2], dl_ch128_2[2]);
-  mmtmpD1 = vmull_s16(dl_ch128[3], dl_ch128_2[3]);
-  mmtmpD0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0),vget_high_s32(mmtmpD0)),
-             vpadd_s32(vget_low_s32(mmtmpD1),vget_high_s32(mmtmpD1)));
-  mmtmpD0b = vmull_s16(vrev32_s16(vmul_s16(dl_ch128[2],*(int16x4_t*)conj)), dl_ch128_2[2]);
-  mmtmpD1b = vmull_s16(vrev32_s16(vmul_s16(dl_ch128[3],*(int16x4_t*)conj)), dl_ch128_2[3]);
-  mmtmpD1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0b),vget_high_s32(mmtmpD0b)),
-             vpadd_s32(vget_low_s32(mmtmpD1b),vget_high_s32(mmtmpD1b)));
-
-  mmtmpD0 = vqshlq_s32(mmtmpD0,output_shift128);
-  mmtmpD1 = vqshlq_s32(mmtmpD1,output_shift128);
-  rho128[1] = vzip_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-
-  mmtmpD0 = vmull_s16(dl_ch128[0], dl_ch128_2[0]);
-  mmtmpD1 = vmull_s16(dl_ch128[1], dl_ch128_2[1]);
-  mmtmpD0 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0),vget_high_s32(mmtmpD0)),
-             vpadd_s32(vget_low_s32(mmtmpD1),vget_high_s32(mmtmpD1)));
-  mmtmpD0b = vmull_s16(vrev32_s16(vmul_s16(dl_ch128[4],*(int16x4_t*)conj)), dl_ch128_2[4]);
-  mmtmpD1b = vmull_s16(vrev32_s16(vmul_s16(dl_ch128[5],*(int16x4_t*)conj)), dl_ch128_2[5]);
-  mmtmpD1 = vcombine_s32(vpadd_s32(vget_low_s32(mmtmpD0b),vget_high_s32(mmtmpD0b)),
-             vpadd_s32(vget_low_s32(mmtmpD1b),vget_high_s32(mmtmpD1b)));
-
-  mmtmpD0 = vqshlq_s32(mmtmpD0,output_shift128);
-  mmtmpD1 = vqshlq_s32(mmtmpD1,output_shift128);
-  rho128[2] = vzip_s16(vmovn_s32(mmtmpD0),vmovn_s32(mmtmpD1));
-
-
-  dl_ch128+=6;
-  dl_ch128_2+=6;
-  rho128+=3;
-      }
-
-      if (first_symbol_flag==1) {
-  measurements->rx_correlation[0][aarx] = signal_energy(&rho[aarx][symbol*frame_parms->N_RB_DL*12],rb*12);
-      }
-    }
-  }
-#endif
 }
 
 void nr_dlsch_scale_channel(uint32_t rx_size_symbol,
@@ -1310,13 +1123,13 @@ void nr_dlsch_scale_channel(uint32_t rx_size_symbol,
                             uint8_t pilots,
                             uint32_t len,
                             unsigned short nb_rb)
+
 {
 
-#if defined(__x86_64__)||defined(__i386__)
 
   short rb, ch_amp;
   unsigned char aatx,aarx;
-  __m128i *dl_ch128, ch_amp128;
+  simde__m128i *dl_ch128, ch_amp128;
 
   uint32_t nb_rb_0 = len/12 + ((len%12)?1:0);
 
@@ -1327,32 +1140,28 @@ void nr_dlsch_scale_channel(uint32_t rx_size_symbol,
   LOG_D(PHY,"Scaling PDSCH Chest in OFDM symbol %d by %d, pilots %d nb_rb %d NCP %d symbol %d\n",symbol,ch_amp,pilots,nb_rb,frame_parms->Ncp,symbol);
   // printf("Scaling PDSCH Chest in OFDM symbol %d by %d\n",symbol_mod,ch_amp);
 
-  ch_amp128 = _mm_set1_epi16(ch_amp); // Q3.13
+  ch_amp128 = simde_mm_set1_epi16(ch_amp); // Q3.13
 
   for (aatx=0; aatx<n_tx; aatx++) {
     for (aarx=0; aarx<n_rx; aarx++) {
 
-      dl_ch128=(__m128i *)dl_ch_estimates_ext[(aatx*n_rx)+aarx];
+      dl_ch128=(simde__m128i *)dl_ch_estimates_ext[(aatx*n_rx)+aarx];
 
       for (rb=0;rb<nb_rb_0;rb++) {
 
-        dl_ch128[0] = _mm_mulhi_epi16(dl_ch128[0],ch_amp128);
-        dl_ch128[0] = _mm_slli_epi16(dl_ch128[0],3);
+        dl_ch128[0] = simde_mm_mulhi_epi16(dl_ch128[0],ch_amp128);
+        dl_ch128[0] = simde_mm_slli_epi16(dl_ch128[0],3);
 
-        dl_ch128[1] = _mm_mulhi_epi16(dl_ch128[1],ch_amp128);
-        dl_ch128[1] = _mm_slli_epi16(dl_ch128[1],3);
+        dl_ch128[1] = simde_mm_mulhi_epi16(dl_ch128[1],ch_amp128);
+        dl_ch128[1] = simde_mm_slli_epi16(dl_ch128[1],3);
 
-        dl_ch128[2] = _mm_mulhi_epi16(dl_ch128[2],ch_amp128);
-        dl_ch128[2] = _mm_slli_epi16(dl_ch128[2],3);
+        dl_ch128[2] = simde_mm_mulhi_epi16(dl_ch128[2],ch_amp128);
+        dl_ch128[2] = simde_mm_slli_epi16(dl_ch128[2],3);
         dl_ch128+=3;
 
       }
     }
   }
-
-#elif defined(__arm__) || defined(__aarch64__)
-
-#endif
 }
 
 
@@ -1367,11 +1176,10 @@ void nr_dlsch_channel_level(uint32_t rx_size_symbol,
 			                      unsigned short nb_rb)
 {
 
-#if defined(__x86_64__)||defined(__i386__)
 
   short rb;
   unsigned char aatx,aarx;
-  __m128i *dl_ch128, avg128D;
+  simde__m128i *dl_ch128, avg128D;
 
   //nb_rb*nre = y * 2^x
   int16_t x = factor2(len);
@@ -1385,14 +1193,14 @@ void nr_dlsch_channel_level(uint32_t rx_size_symbol,
   for (aatx=0; aatx<n_tx; aatx++)
     for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
       //clear average level
-      avg128D = _mm_setzero_si128();
+      avg128D = simde_mm_setzero_si128();
 
-      dl_ch128=(__m128i *)dl_ch_estimates_ext[(aatx*frame_parms->nb_antennas_rx)+aarx];
+      dl_ch128=(simde__m128i *)dl_ch_estimates_ext[(aatx*frame_parms->nb_antennas_rx)+aarx];
 
       for (rb=0;rb<nb_rb_0;rb++) {
-        avg128D = _mm_add_epi32(avg128D,_mm_srai_epi32(_mm_madd_epi16(dl_ch128[0],dl_ch128[0]),x));
-        avg128D = _mm_add_epi32(avg128D,_mm_srai_epi32(_mm_madd_epi16(dl_ch128[1],dl_ch128[1]),x));
-        avg128D = _mm_add_epi32(avg128D,_mm_srai_epi32(_mm_madd_epi16(dl_ch128[2],dl_ch128[2]),x));
+        avg128D = simde_mm_add_epi32(avg128D,simde_mm_srai_epi32(simde_mm_madd_epi16(dl_ch128[0],dl_ch128[0]),x));
+        avg128D = simde_mm_add_epi32(avg128D,simde_mm_srai_epi32(simde_mm_madd_epi16(dl_ch128[1],dl_ch128[1]),x));
+        avg128D = simde_mm_add_epi32(avg128D,simde_mm_srai_epi32(simde_mm_madd_epi16(dl_ch128[2],dl_ch128[2]),x));
         dl_ch128+=3;
       }
 
@@ -1403,92 +1211,33 @@ void nr_dlsch_channel_level(uint32_t rx_size_symbol,
                 //  printf("Channel level : %d\n",avg[(aatx<<1)+aarx]);
     }
 
-  _mm_empty();
-  _m_empty();
-
-#elif defined(__arm__) || defined(__aarch64__)
-
-  short rb;
-  unsigned char aatx,aarx,nre=12,symbol_mod;
-  int32x4_t avg128D;
-  int16x4_t *dl_ch128;
-
-  symbol_mod = (symbol>=(7-frame_parms->Ncp)) ? symbol-(7-frame_parms->Ncp) : symbol;
-  uint32_t nb_rb_0 = len/12 + ((len%12)?1:0);
-  for (aatx=0; aatx<frame_parms->nb_antenna_ports_gNB; aatx++)
-    for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-      //clear average level
-      avg128D = vdupq_n_s32(0);
-      // 5 is always a symbol with no pilots for both normal and extended prefix
-
-      dl_ch128=(int16x4_t *)dl_ch_estimates_ext[(aatx<<1)+aarx];
-
-      for (rb=0; rb<nb_rb_0; rb++) {
-        //  printf("rb %d : ",rb);
-        //  print_shorts("ch",&dl_ch128[0]);
-        avg128D = vqaddq_s32(avg128D,vmull_s16(dl_ch128[0],dl_ch128[0]));
-        avg128D = vqaddq_s32(avg128D,vmull_s16(dl_ch128[1],dl_ch128[1]));
-        avg128D = vqaddq_s32(avg128D,vmull_s16(dl_ch128[2],dl_ch128[2]));
-        avg128D = vqaddq_s32(avg128D,vmull_s16(dl_ch128[3],dl_ch128[3]));
-
-        if (((symbol_mod == 0) || (symbol_mod == (frame_parms->Ncp-1)))&&(frame_parms->nb_antenna_ports_gNB!=1)) {
-          dl_ch128+=4;
-        } else {
-          avg128D = vqaddq_s32(avg128D,vmull_s16(dl_ch128[4],dl_ch128[4]));
-          avg128D = vqaddq_s32(avg128D,vmull_s16(dl_ch128[5],dl_ch128[5]));
-          dl_ch128+=6;
-        }
-
-        /*
-          if (rb==0) {
-          print_shorts("dl_ch128",&dl_ch128[0]);
-          print_shorts("dl_ch128",&dl_ch128[1]);
-          print_shorts("dl_ch128",&dl_ch128[2]);
-          }
-        */
-      }
-
-      if (symbol==2) //assume start symbol 2
-          nre=6;
-      else
-          nre=12;
-
-      avg[(aatx<<1)+aarx] = (((int32_t*)&avg128D)[0] +
-                             ((int32_t*)&avg128D)[1] +
-                             ((int32_t*)&avg128D)[2] +
-                             ((int32_t*)&avg128D)[3])/(nb_rb*nre);
-
-      //            printf("Channel level : %d\n",avg[(aatx<<1)+aarx]);
-    }
-
-
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 static void nr_dlsch_channel_level_median(uint32_t rx_size_symbol, int32_t dl_ch_estimates_ext[][rx_size_symbol], int32_t *median, int n_tx, int n_rx, int length)
 {
 
-#if defined(__x86_64__)||defined(__i386__)
 
   short ii;
   int aatx,aarx;
   int length2;
   int max = 0, min=0;
   int norm_pack;
-  __m128i *dl_ch128, norm128D;
+  simde__m128i *dl_ch128, norm128D;
 
   for (aatx=0; aatx<n_tx; aatx++) {
     for (aarx=0; aarx<n_rx; aarx++) {
       max = median[aatx*n_rx + aarx];//initialize the med point for max
       min = median[aatx*n_rx + aarx];//initialize the med point for min
-      norm128D = _mm_setzero_si128();
+      norm128D = simde_mm_setzero_si128();
 
-      dl_ch128=(__m128i *)dl_ch_estimates_ext[aatx*n_rx + aarx];
+      dl_ch128=(simde__m128i *)dl_ch_estimates_ext[aatx*n_rx + aarx];
 
       length2 = length>>2;//length = number of REs, hence length2=nb_REs*(32/128) in SIMD loop
 
       for (ii=0;ii<length2;ii++) {
-        norm128D = _mm_srai_epi32( _mm_madd_epi16(dl_ch128[0],dl_ch128[0]), 2);//[|H_0|²/4 |H_1|²/4 |H_2|²/4 |H_3|²/4]
+        norm128D = simde_mm_srai_epi32(simde_mm_madd_epi16(dl_ch128[0],dl_ch128[0]), 2);//[|H_0|²/4 |H_1|²/4 |H_2|²/4 |H_3|²/4]
         //print_ints("norm128D",&norm128D[0]);
 
         norm_pack = ((int32_t*)&norm128D)[0] +
@@ -1508,49 +1257,8 @@ static void nr_dlsch_channel_level_median(uint32_t rx_size_symbol, int32_t dl_ch
       }
   }
 
-  _mm_empty();
-  _m_empty();
-
-#elif defined(__arm__) || defined(__aarch64__)
-
-  short rb;
-  unsigned char aatx,aarx,nre=12,symbol_mod;
-  int32x4_t norm128D;
-  int16x4_t *dl_ch128;
-
-  for (aatx=0; aatx<frame_parms->nb_antenna_ports_gNB; aatx++){
-    for (aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-      max = 0;
-      min = 0;
-      norm128D = vdupq_n_s32(0);
-
-      dl_ch128=(int16x4_t *)dl_ch_estimates_ext[aatx*n_rx + aarx];
-
-      length_mod8=length&3;
-      length2 = length>>2;
-
-      for (ii=0;ii<length2;ii++) {
-        norm128D = vshrq_n_u32(vmull_s16(dl_ch128[0],dl_ch128[0]), 1);
-        norm_pack = ((int32_t*)&norm128D)[0] +
-                    ((int32_t*)&norm128D)[1] +
-                    ((int32_t*)&norm128D)[2] +
-                    ((int32_t*)&norm128D)[3];
-
-        if (norm_pack > max)
-          max = norm_pack;
-        if (norm_pack < min)
-          min = norm_pack;
-
-          dl_ch128+=1;
-      }
-
-        median[aatx*n_rx + aarx]  = (max+min)>>1;
-
-      //printf("Channel level  median [%d]: %d\n",aatx*n_rx + aarx, median[aatx*n_rx + aarx]);
-      }
-    }
-#endif
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 //==============================================================================================
@@ -1688,41 +1396,39 @@ void nr_dlsch_detection_mrc(uint32_t rx_size_symbol,
                             int32_t dl_ch_magr[][n_rx][rx_size_symbol],
                             unsigned char symbol,
                             unsigned short nb_rb,
-                            int length)
-{
-#if defined(__x86_64__)||defined(__i386__)
+                            int length) {
   unsigned char aatx, aarx;
   int i;
-  __m128i *rxdataF_comp128_0,*rxdataF_comp128_1,*dl_ch_mag128_0,*dl_ch_mag128_1,*dl_ch_mag128_0b,*dl_ch_mag128_1b,*dl_ch_mag128_0r,*dl_ch_mag128_1r;
+  simde__m128i *rxdataF_comp128_0,*rxdataF_comp128_1,*dl_ch_mag128_0,*dl_ch_mag128_1,*dl_ch_mag128_0b,*dl_ch_mag128_1b,*dl_ch_mag128_0r,*dl_ch_mag128_1r;
 
   uint32_t nb_rb_0 = length/12 + ((length%12)?1:0);
 
   if (n_rx>1) {
     for (aatx=0; aatx<n_tx; aatx++) {
-      rxdataF_comp128_0 = (__m128i *)(rxdataF_comp[aatx][0] + symbol * nb_rb * 12);
-      dl_ch_mag128_0 = (__m128i *)dl_ch_mag[aatx][0];
-      dl_ch_mag128_0b = (__m128i *)dl_ch_magb[aatx][0];
-      dl_ch_mag128_0r = (__m128i *)dl_ch_magr[aatx][0];
+      rxdataF_comp128_0 = (simde__m128i *)(rxdataF_comp[aatx][0] + symbol * nb_rb * 12);
+      dl_ch_mag128_0 = (simde__m128i *)dl_ch_mag[aatx][0];
+      dl_ch_mag128_0b = (simde__m128i *)dl_ch_magb[aatx][0];
+      dl_ch_mag128_0r = (simde__m128i *)dl_ch_magr[aatx][0];
       for (aarx=1; aarx<n_rx; aarx++) {
-        rxdataF_comp128_1 = (__m128i *)(rxdataF_comp[aatx][aarx] + symbol * nb_rb * 12);
-        dl_ch_mag128_1 = (__m128i *)dl_ch_mag[aatx][aarx];
-        dl_ch_mag128_1b = (__m128i *)dl_ch_magb[aatx][aarx];
-        dl_ch_mag128_1r = (__m128i *)dl_ch_magr[aatx][aarx];
+        rxdataF_comp128_1 = (simde__m128i *)(rxdataF_comp[aatx][aarx] + symbol * nb_rb * 12);
+        dl_ch_mag128_1 = (simde__m128i *)dl_ch_mag[aatx][aarx];
+        dl_ch_mag128_1b = (simde__m128i *)dl_ch_magb[aatx][aarx];
+        dl_ch_mag128_1r = (simde__m128i *)dl_ch_magr[aatx][aarx];
 
         // MRC on each re of rb, both on MF output and magnitude (for 16QAM/64QAM/256 llr computation)
         for (i=0; i<nb_rb_0*3; i++) {
-          rxdataF_comp128_0[i] = _mm_adds_epi16(rxdataF_comp128_0[i],rxdataF_comp128_1[i]);
-          dl_ch_mag128_0[i]    = _mm_adds_epi16(dl_ch_mag128_0[i],dl_ch_mag128_1[i]);
-          dl_ch_mag128_0b[i]   = _mm_adds_epi16(dl_ch_mag128_0b[i],dl_ch_mag128_1b[i]);
-          dl_ch_mag128_0r[i]   = _mm_adds_epi16(dl_ch_mag128_0r[i],dl_ch_mag128_1r[i]);
+          rxdataF_comp128_0[i] = simde_mm_adds_epi16(rxdataF_comp128_0[i],rxdataF_comp128_1[i]);
+          dl_ch_mag128_0[i]    = simde_mm_adds_epi16(dl_ch_mag128_0[i],dl_ch_mag128_1[i]);
+          dl_ch_mag128_0b[i]   = simde_mm_adds_epi16(dl_ch_mag128_0b[i],dl_ch_mag128_1b[i]);
+          dl_ch_mag128_0r[i]   = simde_mm_adds_epi16(dl_ch_mag128_0r[i],dl_ch_mag128_1r[i]);
         }
       }
     }
 #ifdef DEBUG_DLSCH_DEMOD
     for (i=0; i<nb_rb_0*3; i++) {
     printf("symbol%d RB %d\n",symbol,i/3);
-    rxdataF_comp128_0 = (__m128i *)(rxdataF_comp[0][0] + symbol * nb_rb * 12);
-    rxdataF_comp128_1 = (__m128i *)(rxdataF_comp[0][n_rx] + symbol * nb_rb * 12);
+    rxdataF_comp128_0 = (simde__m128i *)(rxdataF_comp[0][0] + symbol * nb_rb * 12);
+    rxdataF_comp128_1 = (simde__m128i *)(rxdataF_comp[0][n_rx] + symbol * nb_rb * 12);
     print_shorts("tx 1 mrc_re/mrc_Im:",(int16_t*)&rxdataF_comp128_0[i]);
     print_shorts("tx 2 mrc_re/mrc_Im:",(int16_t*)&rxdataF_comp128_1[i]);
     // printf("mrc mag0 = %d = %d \n",((int16_t*)&dl_ch_mag128_0[0])[0],((int16_t*)&dl_ch_mag128_0[0])[1]);
@@ -1730,18 +1436,17 @@ void nr_dlsch_detection_mrc(uint32_t rx_size_symbol,
     }
 #endif
     if (rho) {
-      /*rho128_0 = (__m128i *) &rho[0][symbol*frame_parms->N_RB_DL*12];
-      rho128_1 = (__m128i *) &rho[1][symbol*frame_parms->N_RB_DL*12];
+      /*rho128_0 = (simde__m128i *) &rho[0][symbol*frame_parms->N_RB_DL*12];
+      rho128_1 = (simde__m128i *) &rho[1][symbol*frame_parms->N_RB_DL*12];
       for (i=0; i<nb_rb_0*3; i++) {
         //      print_shorts("mrc rho0:",&rho128_0[i]);
         //      print_shorts("mrc rho1:",&rho128_1[i]);
-        rho128_0[i] = _mm_adds_epi16(_mm_srai_epi16(rho128_0[i],1),_mm_srai_epi16(rho128_1[i],1));
+        rho128_0[i] = simde_mm_adds_epi16(simde_mm_srai_epi16(rho128_0[i],1),simde_mm_srai_epi16(rho128_1[i],1));
       }*/
       }
-    _mm_empty();
-    _m_empty();
+      simde_mm_empty();
+      simde_m_empty();
   }
-#endif
 }
 
 /* Zero Forcing Rx function: nr_a_sum_b()
@@ -1751,14 +1456,13 @@ void nr_dlsch_detection_mrc(uint32_t rx_size_symbol,
 void nr_a_sum_b(c16_t *input_x, c16_t *input_y, unsigned short nb_rb)
 {
   unsigned short rb;
-  __m128i *x = (__m128i *)input_x;
-  __m128i *y = (__m128i *)input_y;
+  simde__m128i *x = (simde__m128i *)input_x;
+  simde__m128i *y = (simde__m128i *)input_y;
 
   for (rb=0; rb<nb_rb; rb++) {
-    x[0] = _mm_adds_epi16(x[0], y[0]);
-    x[1] = _mm_adds_epi16(x[1], y[1]);
-    x[2] = _mm_adds_epi16(x[2], y[2]);
-
+    x[0] = simde_mm_adds_epi16(x[0], y[0]);
+    x[1] = simde_mm_adds_epi16(x[1], y[1]);
+    x[2] = simde_mm_adds_epi16(x[2], y[2]);
     x += 3;
     y += 3;
   }
@@ -1773,29 +1477,29 @@ void nr_a_mult_b(c16_t *a, c16_t *b, c16_t *c, unsigned short nb_rb, unsigned ch
   //This function is used to compute complex multiplications
   short nr_conjugate[8]__attribute__((aligned(16))) = {1,-1,1,-1,1,-1,1,-1};
   unsigned short rb;
-  __m128i *a_128,*b_128, *c_128, mmtmpD0,mmtmpD1,mmtmpD2,mmtmpD3;
+  simde__m128i *a_128,*b_128, *c_128, mmtmpD0,mmtmpD1,mmtmpD2,mmtmpD3;
 
-  a_128 = (__m128i *)a;
-  b_128 = (__m128i *)b;
+  a_128 = (simde__m128i *)a;
+  b_128 = (simde__m128i *)b;
 
-  c_128 = (__m128i *)c;
+  c_128 = (simde__m128i *)c;
 
   for (rb=0; rb<3*nb_rb; rb++) {
     // the real part
-    mmtmpD0 = _mm_sign_epi16(a_128[0],*(__m128i*)&nr_conjugate[0]);
-    mmtmpD0 = _mm_madd_epi16(mmtmpD0,b_128[0]); //Re: (a_re*b_re - a_im*b_im)
+    mmtmpD0 = simde_mm_sign_epi16(a_128[0],*(simde__m128i*)&nr_conjugate[0]);
+    mmtmpD0 = simde_mm_madd_epi16(mmtmpD0,b_128[0]); //Re: (a_re*b_re - a_im*b_im)
 
     // the imag part
-    mmtmpD1 = _mm_shufflelo_epi16(a_128[0],_MM_SHUFFLE(2,3,0,1));
-    mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-    mmtmpD1 = _mm_madd_epi16(mmtmpD1,b_128[0]);//Im: (x_im*y_re + x_re*y_im)
+    mmtmpD1 = simde_mm_shufflelo_epi16(a_128[0],SIMDE_MM_SHUFFLE(2,3,0,1));
+    mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1,SIMDE_MM_SHUFFLE(2,3,0,1));
+    mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,b_128[0]);//Im: (x_im*y_re + x_re*y_im)
 
-    mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift0);
-    mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift0);
-    mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-    mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+    mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift0);
+    mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift0);
+    mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+    mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
 
-    c_128[0] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+    c_128[0] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
 
     /*printf("\n Computing mult \n");
     print_shorts("a:",(int16_t*)&a_128[0]);
@@ -1818,14 +1522,14 @@ static inline void nr_element_sign(c16_t *a, // a
                                    int32_t sign)
 {
   const int16_t nr_sign[8] __attribute__((aligned(16))) = {-1, -1, -1, -1, -1, -1, -1, -1};
-  __m128i *a_128,*b_128;
+  simde__m128i *a_128,*b_128;
 
-  a_128 = (__m128i *)a;
-  b_128 = (__m128i *)b;
+  a_128 = (simde__m128i *)a;
+  b_128 = (simde__m128i *)b;
 
   for (int rb = 0; rb < 3 * nb_rb; rb++) {
     if (sign < 0)
-      b_128[rb] = _mm_sign_epi16(a_128[rb], ((__m128i *)nr_sign)[0]);
+      b_128[rb] = simde_mm_sign_epi16(a_128[rb], ((simde__m128i *)nr_sign)[0]);
     else
       b_128[rb] = a_128[rb];
 
@@ -2067,26 +1771,26 @@ void nr_conjch0_mult_ch1(int *ch0,
   //This function is used to compute multiplications in H_hermitian * H matrix
   short nr_conjugate[8]__attribute__((aligned(16))) = {-1,1,-1,1,-1,1,-1,1};
   unsigned short rb;
-  __m128i *dl_ch0_128,*dl_ch1_128, *ch0conj_ch1_128, mmtmpD0,mmtmpD1,mmtmpD2,mmtmpD3;
+  simde__m128i *dl_ch0_128,*dl_ch1_128, *ch0conj_ch1_128, mmtmpD0,mmtmpD1,mmtmpD2,mmtmpD3;
 
-  dl_ch0_128 = (__m128i *)ch0;
-  dl_ch1_128 = (__m128i *)ch1;
+  dl_ch0_128 = (simde__m128i *)ch0;
+  dl_ch1_128 = (simde__m128i *)ch1;
 
-  ch0conj_ch1_128 = (__m128i *)ch0conj_ch1;
+  ch0conj_ch1_128 = (simde__m128i *)ch0conj_ch1;
 
   for (rb=0; rb<3*nb_rb; rb++) {
 
-    mmtmpD0 = _mm_madd_epi16(dl_ch0_128[0],dl_ch1_128[0]);
-    mmtmpD1 = _mm_shufflelo_epi16(dl_ch0_128[0],_MM_SHUFFLE(2,3,0,1));
-    mmtmpD1 = _mm_shufflehi_epi16(mmtmpD1,_MM_SHUFFLE(2,3,0,1));
-    mmtmpD1 = _mm_sign_epi16(mmtmpD1,*(__m128i*)&nr_conjugate[0]);
-    mmtmpD1 = _mm_madd_epi16(mmtmpD1,dl_ch1_128[0]);
-    mmtmpD0 = _mm_srai_epi32(mmtmpD0,output_shift0);
-    mmtmpD1 = _mm_srai_epi32(mmtmpD1,output_shift0);
-    mmtmpD2 = _mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
-    mmtmpD3 = _mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
+    mmtmpD0 = simde_mm_madd_epi16(dl_ch0_128[0],dl_ch1_128[0]);
+    mmtmpD1 = simde_mm_shufflelo_epi16(dl_ch0_128[0],SIMDE_MM_SHUFFLE(2,3,0,1));
+    mmtmpD1 = simde_mm_shufflehi_epi16(mmtmpD1,SIMDE_MM_SHUFFLE(2,3,0,1));
+    mmtmpD1 = simde_mm_sign_epi16(mmtmpD1,*(simde__m128i*)&nr_conjugate[0]);
+    mmtmpD1 = simde_mm_madd_epi16(mmtmpD1,dl_ch1_128[0]);
+    mmtmpD0 = simde_mm_srai_epi32(mmtmpD0,output_shift0);
+    mmtmpD1 = simde_mm_srai_epi32(mmtmpD1,output_shift0);
+    mmtmpD2 = simde_mm_unpacklo_epi32(mmtmpD0,mmtmpD1);
+    mmtmpD3 = simde_mm_unpackhi_epi32(mmtmpD0,mmtmpD1);
 
-    ch0conj_ch1_128[0] = _mm_packs_epi32(mmtmpD2,mmtmpD3);
+    ch0conj_ch1_128[0] = simde_mm_packs_epi32(mmtmpD2,mmtmpD3);
 
     /*printf("\n Computing conjugates \n");
     print_shorts("ch0:",(int16_t*)&dl_ch0_128[0]);
@@ -2097,8 +1801,8 @@ void nr_conjch0_mult_ch1(int *ch0,
     dl_ch1_128+=1;
     ch0conj_ch1_128+=1;
   }
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 /* Zero Forcing Rx function: up to 4 layers
@@ -2196,48 +1900,48 @@ uint8_t nr_zero_forcing_rx(uint32_t rx_size_symbol,
     nr_element_sign(rxdataF_zforcing[rtx], (c16_t *)(rxdataF_comp[rtx][0] + symbol * nb_rb * 12), nb_rb_0, +1);
 
   //Update LLR thresholds with the Matrix determinant
-  __m128i *dl_ch_mag128_0=NULL,*dl_ch_mag128b_0=NULL,*dl_ch_mag128r_0=NULL,*determ_fin_128;
-  __m128i mmtmpD2,mmtmpD3;
-  __m128i QAM_amp128={0},QAM_amp128b={0},QAM_amp128r={0};
+  simde__m128i *dl_ch_mag128_0=NULL,*dl_ch_mag128b_0=NULL,*dl_ch_mag128r_0=NULL,*determ_fin_128;
+  simde__m128i mmtmpD2,mmtmpD3;
+  simde__m128i QAM_amp128={0},QAM_amp128b={0},QAM_amp128r={0};
   short nr_realpart[8]__attribute__((aligned(16))) = {1,0,1,0,1,0,1,0};
-  determ_fin_128      = (__m128i *)&determ_fin[0];
+  determ_fin_128      = (simde__m128i *)&determ_fin[0];
 
   if (mod_order>2) {
     if (mod_order == 4) {
-      QAM_amp128 = _mm_set1_epi16(QAM16_n1);  //2/sqrt(10)
-      QAM_amp128b = _mm_setzero_si128();
-      QAM_amp128r = _mm_setzero_si128();
+      QAM_amp128 = simde_mm_set1_epi16(QAM16_n1);  //2/sqrt(10)
+      QAM_amp128b = simde_mm_setzero_si128();
+      QAM_amp128r = simde_mm_setzero_si128();
     } else if (mod_order == 6) {
-      QAM_amp128  = _mm_set1_epi16(QAM64_n1); //4/sqrt{42}
-      QAM_amp128b = _mm_set1_epi16(QAM64_n2); //2/sqrt{42}
-      QAM_amp128r = _mm_setzero_si128();
+      QAM_amp128  = simde_mm_set1_epi16(QAM64_n1); //4/sqrt{42}
+      QAM_amp128b = simde_mm_set1_epi16(QAM64_n2); //2/sqrt{42}
+      QAM_amp128r = simde_mm_setzero_si128();
     } else if (mod_order == 8) {
-      QAM_amp128 = _mm_set1_epi16(QAM256_n1); //8/sqrt{170}
-      QAM_amp128b = _mm_set1_epi16(QAM256_n2);//4/sqrt{170}
-      QAM_amp128r = _mm_set1_epi16(QAM256_n3);//2/sqrt{170}
+      QAM_amp128 = simde_mm_set1_epi16(QAM256_n1); //8/sqrt{170}
+      QAM_amp128b = simde_mm_set1_epi16(QAM256_n2);//4/sqrt{170}
+      QAM_amp128r = simde_mm_set1_epi16(QAM256_n3);//2/sqrt{170}
     }
-    dl_ch_mag128_0 = (__m128i *)dl_ch_mag[0][0];
-    dl_ch_mag128b_0 = (__m128i *)dl_ch_magb[0][0];
-    dl_ch_mag128r_0 = (__m128i *)dl_ch_magr[0][0];
+    dl_ch_mag128_0 = (simde__m128i *)dl_ch_mag[0][0];
+    dl_ch_mag128b_0 = (simde__m128i *)dl_ch_magb[0][0];
+    dl_ch_mag128r_0 = (simde__m128i *)dl_ch_magr[0][0];
 
     for (int rb=0; rb<3*nb_rb_0; rb++) {
       //for symmetric H_h_H matrix, the determinant is only real values
-        mmtmpD2 = _mm_sign_epi16(determ_fin_128[0],*(__m128i*)&nr_realpart[0]);//set imag part to 0
-        mmtmpD3 = _mm_shufflelo_epi16(mmtmpD2,_MM_SHUFFLE(2,3,0,1));
-        mmtmpD3 = _mm_shufflehi_epi16(mmtmpD3,_MM_SHUFFLE(2,3,0,1));
-        mmtmpD2 = _mm_add_epi16(mmtmpD2,mmtmpD3);
+        mmtmpD2 = simde_mm_sign_epi16(determ_fin_128[0],*(simde__m128i*)&nr_realpart[0]);//set imag part to 0
+        mmtmpD3 = simde_mm_shufflelo_epi16(mmtmpD2,SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpD3 = simde_mm_shufflehi_epi16(mmtmpD3,SIMDE_MM_SHUFFLE(2,3,0,1));
+        mmtmpD2 = simde_mm_add_epi16(mmtmpD2,mmtmpD3);
 
         dl_ch_mag128_0[0] = mmtmpD2;
         dl_ch_mag128b_0[0] = mmtmpD2;
         dl_ch_mag128r_0[0] = mmtmpD2;
 
-        dl_ch_mag128_0[0] = _mm_mulhi_epi16(dl_ch_mag128_0[0],QAM_amp128);
-        dl_ch_mag128_0[0] = _mm_slli_epi16(dl_ch_mag128_0[0],1);
+        dl_ch_mag128_0[0] = simde_mm_mulhi_epi16(dl_ch_mag128_0[0],QAM_amp128);
+        dl_ch_mag128_0[0] = simde_mm_slli_epi16(dl_ch_mag128_0[0],1);
 
-        dl_ch_mag128b_0[0] = _mm_mulhi_epi16(dl_ch_mag128b_0[0],QAM_amp128b);
-        dl_ch_mag128b_0[0] = _mm_slli_epi16(dl_ch_mag128b_0[0],1);
-        dl_ch_mag128r_0[0] = _mm_mulhi_epi16(dl_ch_mag128r_0[0],QAM_amp128r);
-        dl_ch_mag128r_0[0] = _mm_slli_epi16(dl_ch_mag128r_0[0],1);
+        dl_ch_mag128b_0[0] = simde_mm_mulhi_epi16(dl_ch_mag128b_0[0],QAM_amp128b);
+        dl_ch_mag128b_0[0] = simde_mm_slli_epi16(dl_ch_mag128b_0[0],1);
+        dl_ch_mag128r_0[0] = simde_mm_mulhi_epi16(dl_ch_mag128r_0[0],QAM_amp128r);
+        dl_ch_mag128r_0[0] = simde_mm_slli_epi16(dl_ch_mag128r_0[0],1);
 
 
       determ_fin_128 += 1;
diff --git a/openair1/PHY/NR_UE_TRANSPORT/nr_dlsch_llr_computation.c b/openair1/PHY/NR_UE_TRANSPORT/nr_dlsch_llr_computation.c
index 3edea7a3c4dc959eb1f6db1c05096adcc7a1acb1..0f13fea3caff811177a467612bbcc12821f079a4 100644
--- a/openair1/PHY/NR_UE_TRANSPORT/nr_dlsch_llr_computation.c
+++ b/openair1/PHY/NR_UE_TRANSPORT/nr_dlsch_llr_computation.c
@@ -39,10 +39,6 @@
 //#define DEBUG_LLR_SIC
 
 int16_t nr_zeros[8] __attribute__((aligned(16))) = {0, 0, 0, 0, 0, 0, 0, 0};
-#if defined(__x86_64__) || defined(__i386__)
-__m128i rho_rpi __attribute__ ((aligned(16)));
-__m128i rho_rmi __attribute__((aligned(16)));
-#endif
 
 //==============================================================================================
 // SINGLE-STREAM
@@ -103,34 +99,18 @@ void nr_dlsch_16qam_llr(NR_DL_FRAME_PARMS *frame_parms,
                      uint16_t nb_rb)
 {
 
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *rxF = (__m128i*)&rxdataF_comp[(symbol*nb_rb*12)];
-  __m128i *ch_mag;
-  __m128i llr128[2];
+  simde__m128i *rxF = (simde__m128i*)&rxdataF_comp[(symbol*nb_rb*12)];
+  simde__m128i *ch_mag;
+  simde__m128i llr128[2];
   uint32_t *llr32;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *rxF = (int16x8_t*)&rxdataF_comp[(symbol*nb_rb*12)];
-  int16x8_t *ch_mag;
-  int16x8_t xmm0;
-  int16_t *llr16;
-#endif
 
 
   int i;
   unsigned char len_mod4=0;
 
 
-#if defined(__x86_64__) || defined(__i386__)
     llr32 = (uint32_t*)dlsch_llr;
-#elif defined(__arm__) || defined(__aarch64__)
-    llr16 = (int16_t*)dlsch_llr;
-#endif
-
-#if defined(__x86_64__) || defined(__i386__)
-    ch_mag = (__m128i *)dl_ch_mag;
-#elif defined(__arm__) || defined(__aarch64__)
-    ch_mag = (int16x8_t *)dl_ch_mag;
-#endif
+    ch_mag = (simde__m128i *)dl_ch_mag;
 
 
  // printf("len=%d\n", len);
@@ -142,53 +122,26 @@ void nr_dlsch_16qam_llr(NR_DL_FRAME_PARMS *frame_parms,
  // printf("len+=%d\n", len);
   for (i=0; i<len; i++) {
 
-#if defined(__x86_64__) || defined(__i386)
-    __m128i xmm0;
-    xmm0 = _mm_abs_epi16(rxF[i]);
-    xmm0 = _mm_subs_epi16(ch_mag[i],xmm0);
+    simde__m128i xmm0 =simde_mm_abs_epi16(rxF[i]);
+    xmm0 =simde_mm_subs_epi16(ch_mag[i],xmm0);
 
     // lambda_1=y_R, lambda_2=|y_R|-|h|^2, lamda_3=y_I, lambda_4=|y_I|-|h|^2
-    llr128[0] = _mm_unpacklo_epi32(rxF[i],xmm0);
-    llr128[1] = _mm_unpackhi_epi32(rxF[i],xmm0);
-    llr32[0] = _mm_extract_epi32(llr128[0],0); //((uint32_t *)&llr128[0])[0];
-    llr32[1] = _mm_extract_epi32(llr128[0],1); //((uint32_t *)&llr128[0])[1];
-    llr32[2] = _mm_extract_epi32(llr128[0],2); //((uint32_t *)&llr128[0])[2];
-    llr32[3] = _mm_extract_epi32(llr128[0],3); //((uint32_t *)&llr128[0])[3];
-    llr32[4] = _mm_extract_epi32(llr128[1],0); //((uint32_t *)&llr128[1])[0];
-    llr32[5] = _mm_extract_epi32(llr128[1],1); //((uint32_t *)&llr128[1])[1];
-    llr32[6] = _mm_extract_epi32(llr128[1],2); //((uint32_t *)&llr128[1])[2];
-    llr32[7] = _mm_extract_epi32(llr128[1],3); //((uint32_t *)&llr128[1])[3];
+    llr128[0] =simde_mm_unpacklo_epi32(rxF[i],xmm0);
+    llr128[1] =simde_mm_unpackhi_epi32(rxF[i],xmm0);
+    llr32[0] =simde_mm_extract_epi32(llr128[0],0); //((uint32_t *)&llr128[0])[0];
+    llr32[1] =simde_mm_extract_epi32(llr128[0],1); //((uint32_t *)&llr128[0])[1];
+    llr32[2] =simde_mm_extract_epi32(llr128[0],2); //((uint32_t *)&llr128[0])[2];
+    llr32[3] =simde_mm_extract_epi32(llr128[0],3); //((uint32_t *)&llr128[0])[3];
+    llr32[4] =simde_mm_extract_epi32(llr128[1],0); //((uint32_t *)&llr128[1])[0];
+    llr32[5] =simde_mm_extract_epi32(llr128[1],1); //((uint32_t *)&llr128[1])[1];
+    llr32[6] =simde_mm_extract_epi32(llr128[1],2); //((uint32_t *)&llr128[1])[2];
+    llr32[7] =simde_mm_extract_epi32(llr128[1],3); //((uint32_t *)&llr128[1])[3];
     llr32+=8;
-#elif defined(__arm__) || defined(__aarch64__)
-    xmm0 = vabsq_s16(rxF[i]);
-    xmm0 = vqsubq_s16(ch_mag[i],xmm0);
-    // lambda_1=y_R, lambda_2=|y_R|-|h|^2, lamda_3=y_I, lambda_4=|y_I|-|h|^2
-
-    llr16[0] = vgetq_lane_s16(rxF[i],0);
-    llr16[1] = vgetq_lane_s16(rxF[i],1);
-    llr16[2] = vgetq_lane_s16(xmm0,0);
-    llr16[3] = vgetq_lane_s16(xmm0,1);
-    llr16[4] = vgetq_lane_s16(rxF[i],2);
-    llr16[5] = vgetq_lane_s16(rxF[i],3);
-    llr16[6] = vgetq_lane_s16(xmm0,2);
-    llr16[7] = vgetq_lane_s16(xmm0,3);
-    llr16[8] = vgetq_lane_s16(rxF[i],4);
-    llr16[9] = vgetq_lane_s16(rxF[i],5);
-    llr16[10] = vgetq_lane_s16(xmm0,4);
-    llr16[11] = vgetq_lane_s16(xmm0,5);
-    llr16[12] = vgetq_lane_s16(rxF[i],6);
-    llr16[13] = vgetq_lane_s16(rxF[i],6);
-    llr16[14] = vgetq_lane_s16(xmm0,7);
-    llr16[15] = vgetq_lane_s16(xmm0,7);
-    llr16+=16;
-#endif
 
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+ simde_mm_empty();
+ simde_m_empty();
 }
 
 //----------------------------------------------------------------------------------------------
@@ -205,26 +158,16 @@ void nr_dlsch_64qam_llr(NR_DL_FRAME_PARMS *frame_parms,
 			uint8_t first_symbol_flag,
 			uint16_t nb_rb)
 {
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *rxF = (__m128i*)&rxdataF_comp[(symbol*nb_rb*12)];
-  __m128i *ch_mag,*ch_magb;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *rxF = (int16x8_t*)&rxdataF_comp[(symbol*nb_rb*12)];
-  int16x8_t *ch_mag,*ch_magb,xmm1,xmm2;
-#endif
+  simde__m128i *rxF = (simde__m128i*)&rxdataF_comp[(symbol*nb_rb*12)];
+  simde__m128i *ch_mag,*ch_magb;
   int i,len2;
   unsigned char len_mod4;
   int16_t *llr2;
 
   llr2 = dlsch_llr;
 
-#if defined(__x86_64__) || defined(__i386__)
-  ch_mag = (__m128i *)dl_ch_mag;
-  ch_magb = (__m128i *)dl_ch_magb;
-#elif defined(__arm__) || defined(__aarch64__)
-  ch_mag = (int16x8_t *)dl_ch_mag;
-  ch_magb = (int16x8_t *)dl_ch_magb;
-#endif
+  ch_mag = (simde__m128i *)dl_ch_mag;
+  ch_magb = (simde__m128i *)dl_ch_magb;
 
 //  printf("nr_dlsch_64qam_llr: symbol %d,nb_rb %d, len %d,pbch_pss_sss_adjust %d\n",symbol,nb_rb,len,pbch_pss_sss_adjust);
 
@@ -240,20 +183,12 @@ void nr_dlsch_64qam_llr(NR_DL_FRAME_PARMS *frame_parms,
   len2+=((len_mod4==0)?0:1);
 
   for (i=0; i<len2; i++) {
+     simde__m128i xmm1, xmm2;
+    xmm1 =simde_mm_abs_epi16(rxF[i]);
+    xmm1 =simde_mm_subs_epi16(ch_mag[i],xmm1);
+    xmm2 =simde_mm_abs_epi16(xmm1);
+    xmm2 =simde_mm_subs_epi16(ch_magb[i],xmm2);
 
-#if defined(__x86_64__) || defined(__i386__)
-    __m128i xmm1, xmm2;
-
-    xmm1 = _mm_abs_epi16(rxF[i]);
-    xmm1 = _mm_subs_epi16(ch_mag[i],xmm1);
-    xmm2 = _mm_abs_epi16(xmm1);
-    xmm2 = _mm_subs_epi16(ch_magb[i],xmm2);
-#elif defined(__arm__) || defined(__aarch64__)
-    xmm1 = vabsq_s16(rxF[i]);
-    xmm1 = vsubq_s16(ch_mag[i],xmm1);
-    xmm2 = vabsq_s16(xmm1);
-    xmm2 = vsubq_s16(ch_magb[i],xmm2);
-#endif
     // loop over all LLRs in quad word (24 coded bits)
     /*
       for (j=0;j<8;j+=2) {
@@ -269,69 +204,39 @@ void nr_dlsch_64qam_llr(NR_DL_FRAME_PARMS *frame_parms,
     */
     llr2[0] = ((short *)&rxF[i])[0];
     llr2[1] = ((short *)&rxF[i])[1];
-#if defined(__x86_64__) || defined(__i386__)
-    llr2[2] = _mm_extract_epi16(xmm1,0);
-    llr2[3] = _mm_extract_epi16(xmm1,1);//((short *)&xmm1)[j+1];
-    llr2[4] = _mm_extract_epi16(xmm2,0);//((short *)&xmm2)[j];
-    llr2[5] = _mm_extract_epi16(xmm2,1);//((short *)&xmm2)[j+1];
-#elif defined(__arm__) || defined(__aarch64__)
-    llr2[2] = vgetq_lane_s16(xmm1,0);
-    llr2[3] = vgetq_lane_s16(xmm1,1);//((short *)&xmm1)[j+1];
-    llr2[4] = vgetq_lane_s16(xmm2,0);//((short *)&xmm2)[j];
-    llr2[5] = vgetq_lane_s16(xmm2,1);//((short *)&xmm2)[j+1];
-#endif
+    llr2[2] =simde_mm_extract_epi16(xmm1,0);
+    llr2[3] =simde_mm_extract_epi16(xmm1,1);//((short *)&xmm1)[j+1];
+    llr2[4] =simde_mm_extract_epi16(xmm2,0);//((short *)&xmm2)[j];
+    llr2[5] =simde_mm_extract_epi16(xmm2,1);//((short *)&xmm2)[j+1];
 
     llr2+=6;
     llr2[0] = ((short *)&rxF[i])[2];
     llr2[1] = ((short *)&rxF[i])[3];
-#if defined(__x86_64__) || defined(__i386__)
-    llr2[2] = _mm_extract_epi16(xmm1,2);
-    llr2[3] = _mm_extract_epi16(xmm1,3);//((short *)&xmm1)[j+1];
-    llr2[4] = _mm_extract_epi16(xmm2,2);//((short *)&xmm2)[j];
-    llr2[5] = _mm_extract_epi16(xmm2,3);//((short *)&xmm2)[j+1];
-#elif defined(__arm__) || defined(__aarch64__)
-    llr2[2] = vgetq_lane_s16(xmm1,2);
-    llr2[3] = vgetq_lane_s16(xmm1,3);//((short *)&xmm1)[j+1];
-    llr2[4] = vgetq_lane_s16(xmm2,2);//((short *)&xmm2)[j];
-    llr2[5] = vgetq_lane_s16(xmm2,3);//((short *)&xmm2)[j+1];
-#endif
+    llr2[2] =simde_mm_extract_epi16(xmm1,2);
+    llr2[3] =simde_mm_extract_epi16(xmm1,3);//((short *)&xmm1)[j+1];
+    llr2[4] =simde_mm_extract_epi16(xmm2,2);//((short *)&xmm2)[j];
+    llr2[5] =simde_mm_extract_epi16(xmm2,3);//((short *)&xmm2)[j+1];
 
     llr2+=6;
     llr2[0] = ((short *)&rxF[i])[4];
     llr2[1] = ((short *)&rxF[i])[5];
-#if defined(__x86_64__) || defined(__i386__)
-    llr2[2] = _mm_extract_epi16(xmm1,4);
-    llr2[3] = _mm_extract_epi16(xmm1,5);//((short *)&xmm1)[j+1];
-    llr2[4] = _mm_extract_epi16(xmm2,4);//((short *)&xmm2)[j];
-    llr2[5] = _mm_extract_epi16(xmm2,5);//((short *)&xmm2)[j+1];
-#elif defined(__arm__) || defined(__aarch64__)
-    llr2[2] = vgetq_lane_s16(xmm1,4);
-    llr2[3] = vgetq_lane_s16(xmm1,5);//((short *)&xmm1)[j+1];
-    llr2[4] = vgetq_lane_s16(xmm2,4);//((short *)&xmm2)[j];
-    llr2[5] = vgetq_lane_s16(xmm2,5);//((short *)&xmm2)[j+1];
-#endif
+    llr2[2] =simde_mm_extract_epi16(xmm1,4);
+    llr2[3] =simde_mm_extract_epi16(xmm1,5);//((short *)&xmm1)[j+1];
+    llr2[4] =simde_mm_extract_epi16(xmm2,4);//((short *)&xmm2)[j];
+    llr2[5] =simde_mm_extract_epi16(xmm2,5);//((short *)&xmm2)[j+1];
     llr2+=6;
     llr2[0] = ((short *)&rxF[i])[6];
     llr2[1] = ((short *)&rxF[i])[7];
-#if defined(__x86_64__) || defined(__i386__)
-    llr2[2] = _mm_extract_epi16(xmm1,6);
-    llr2[3] = _mm_extract_epi16(xmm1,7);//((short *)&xmm1)[j+1];
-    llr2[4] = _mm_extract_epi16(xmm2,6);//((short *)&xmm2)[j];
-    llr2[5] = _mm_extract_epi16(xmm2,7);//((short *)&xmm2)[j+1];
-#elif defined(__arm__) || defined(__aarch64__)
-    llr2[2] = vgetq_lane_s16(xmm1,6);
-    llr2[3] = vgetq_lane_s16(xmm1,7);//((short *)&xmm1)[j+1];
-    llr2[4] = vgetq_lane_s16(xmm2,6);//((short *)&xmm2)[j];
-    llr2[5] = vgetq_lane_s16(xmm2,7);//((short *)&xmm2)[j+1];
-#endif
+    llr2[2] =simde_mm_extract_epi16(xmm1,6);
+    llr2[3] =simde_mm_extract_epi16(xmm1,7);//((short *)&xmm1)[j+1];
+    llr2[4] =simde_mm_extract_epi16(xmm2,6);//((short *)&xmm2)[j];
+    llr2[5] =simde_mm_extract_epi16(xmm2,7);//((short *)&xmm2)[j+1];
     llr2+=6;
 
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+ simde_mm_empty();
+ simde_m_empty();
 }
 
 //----------------------------------------------------------------------------------------------
@@ -349,8 +254,8 @@ void nr_dlsch_256qam_llr(NR_DL_FRAME_PARMS *frame_parms,
                      uint8_t first_symbol_flag,
                      uint16_t nb_rb)
 {
-  __m128i *rxF = (__m128i*)&rxdataF_comp[(symbol*nb_rb*12)];
-  __m128i *ch_mag,*ch_magb,*ch_magr;
+  simde__m128i *rxF = (simde__m128i*)&rxdataF_comp[(symbol*nb_rb*12)];
+  simde__m128i *ch_mag,*ch_magb,*ch_magr;
 
   int i,len2;
   unsigned char len_mod4;
@@ -358,66 +263,66 @@ void nr_dlsch_256qam_llr(NR_DL_FRAME_PARMS *frame_parms,
 
   llr2 = dlsch_llr;
 
-  ch_mag = (__m128i *)dl_ch_mag;
-  ch_magb = (__m128i *)dl_ch_magb;
-  ch_magr = (__m128i *)dl_ch_magr;
+  ch_mag = (simde__m128i *)dl_ch_mag;
+  ch_magb = (simde__m128i *)dl_ch_magb;
+  ch_magr = (simde__m128i *)dl_ch_magr;
 
   len_mod4 =len&3;
   len2=len>>2;  // length in quad words (4 REs)
   len2+=((len_mod4==0)?0:1);
 
   for (i=0; i<len2; i++) {
-    __m128i xmm1 = _mm_abs_epi16(rxF[i]);
-    xmm1 = _mm_subs_epi16(ch_mag[i],xmm1);
-    __m128i xmm2 = _mm_abs_epi16(xmm1);
-    xmm2 = _mm_subs_epi16(ch_magb[i],xmm2);
-    __m128i xmm3 = _mm_abs_epi16(xmm2);
-    xmm3 = _mm_subs_epi16(ch_magr[i], xmm3);
+    simde__m128i xmm1 = simde_mm_abs_epi16(rxF[i]);
+    xmm1 = simde_mm_subs_epi16(ch_mag[i],xmm1);
+    simde__m128i xmm2 = simde_mm_abs_epi16(xmm1);
+    xmm2 = simde_mm_subs_epi16(ch_magb[i],xmm2);
+    simde__m128i xmm3 = simde_mm_abs_epi16(xmm2);
+    xmm3 = simde_mm_subs_epi16(ch_magr[i], xmm3);
 
     llr2[0] = ((short *)&rxF[i])[0];
     llr2[1] = ((short *)&rxF[i])[1];
-    llr2[2] = _mm_extract_epi16(xmm1,0);
-    llr2[3] = _mm_extract_epi16(xmm1,1);//((short *)&xmm1)[j+1];
-    llr2[4] = _mm_extract_epi16(xmm2,0);//((short *)&xmm2)[j];
-    llr2[5] = _mm_extract_epi16(xmm2,1);//((short *)&xmm2)[j+1];
-    llr2[6] = _mm_extract_epi16(xmm3,0);
-    llr2[7] = _mm_extract_epi16(xmm3,1);
+    llr2[2] = simde_mm_extract_epi16(xmm1,0);
+    llr2[3] = simde_mm_extract_epi16(xmm1,1);//((short *)&xmm1)[j+1];
+    llr2[4] = simde_mm_extract_epi16(xmm2,0);//((short *)&xmm2)[j];
+    llr2[5] = simde_mm_extract_epi16(xmm2,1);//((short *)&xmm2)[j+1];
+    llr2[6] = simde_mm_extract_epi16(xmm3,0);
+    llr2[7] = simde_mm_extract_epi16(xmm3,1);
 
     llr2+=8;
     llr2[0] = ((short *)&rxF[i])[2];
     llr2[1] = ((short *)&rxF[i])[3];
-    llr2[2] = _mm_extract_epi16(xmm1,2);
-    llr2[3] = _mm_extract_epi16(xmm1,3);//((short *)&xmm1)[j+1];
-    llr2[4] = _mm_extract_epi16(xmm2,2);//((short *)&xmm2)[j];
-    llr2[5] = _mm_extract_epi16(xmm2,3);//((short *)&xmm2)[j+1];
-    llr2[6] = _mm_extract_epi16(xmm3,2);
-    llr2[7] = _mm_extract_epi16(xmm3,3);
+    llr2[2] = simde_mm_extract_epi16(xmm1,2);
+    llr2[3] = simde_mm_extract_epi16(xmm1,3);//((short *)&xmm1)[j+1];
+    llr2[4] = simde_mm_extract_epi16(xmm2,2);//((short *)&xmm2)[j];
+    llr2[5] = simde_mm_extract_epi16(xmm2,3);//((short *)&xmm2)[j+1];
+    llr2[6] = simde_mm_extract_epi16(xmm3,2);
+    llr2[7] = simde_mm_extract_epi16(xmm3,3);
 
     llr2+=8;
     llr2[0] = ((short *)&rxF[i])[4];
     llr2[1] = ((short *)&rxF[i])[5];
-    llr2[2] = _mm_extract_epi16(xmm1,4);
-    llr2[3] = _mm_extract_epi16(xmm1,5);//((short *)&xmm1)[j+1];
-    llr2[4] = _mm_extract_epi16(xmm2,4);//((short *)&xmm2)[j];
-    llr2[5] = _mm_extract_epi16(xmm2,5);//((short *)&xmm2)[j+1];
-    llr2[6] = _mm_extract_epi16(xmm3,4);
-    llr2[7] = _mm_extract_epi16(xmm3,5);
+    llr2[2] = simde_mm_extract_epi16(xmm1,4);
+    llr2[3] = simde_mm_extract_epi16(xmm1,5);//((short *)&xmm1)[j+1];
+    llr2[4] = simde_mm_extract_epi16(xmm2,4);//((short *)&xmm2)[j];
+    llr2[5] = simde_mm_extract_epi16(xmm2,5);//((short *)&xmm2)[j+1];
+    llr2[6] = simde_mm_extract_epi16(xmm3,4);
+    llr2[7] = simde_mm_extract_epi16(xmm3,5);
 
     llr2+=8;
     llr2[0] = ((short *)&rxF[i])[6];
     llr2[1] = ((short *)&rxF[i])[7];
-    llr2[2] = _mm_extract_epi16(xmm1,6);
-    llr2[3] = _mm_extract_epi16(xmm1,7);//((short *)&xmm1)[j+1];
-    llr2[4] = _mm_extract_epi16(xmm2,6);//((short *)&xmm2)[j];
-    llr2[5] = _mm_extract_epi16(xmm2,7);//((short *)&xmm2)[j+1];
-    llr2[6] = _mm_extract_epi16(xmm3,6);
-    llr2[7] = _mm_extract_epi16(xmm3,7);
+    llr2[2] = simde_mm_extract_epi16(xmm1,6);
+    llr2[3] = simde_mm_extract_epi16(xmm1,7);//((short *)&xmm1)[j+1];
+    llr2[4] = simde_mm_extract_epi16(xmm2,6);//((short *)&xmm2)[j];
+    llr2[5] = simde_mm_extract_epi16(xmm2,7);//((short *)&xmm2)[j+1];
+    llr2[6] = simde_mm_extract_epi16(xmm3,6);
+    llr2[7] = simde_mm_extract_epi16(xmm3,7);
     llr2+=8;
 
   }
 
-  _mm_empty();
-  _m_empty();
+ simde_mm_empty();
+ simde_m_empty();
 }
 
 //==============================================================================================
@@ -428,24 +333,21 @@ void nr_dlsch_256qam_llr(NR_DL_FRAME_PARMS *frame_parms,
 // QPSK
 //----------------------------------------------------------------------------------------------
 
-#if defined(__x86_64__) || defined(__i386)
-__m128i  y0r_over2 __attribute__ ((aligned(16)));
-__m128i  y0i_over2 __attribute__ ((aligned(16)));
-__m128i  y1r_over2 __attribute__ ((aligned(16)));
-__m128i  y1i_over2 __attribute__ ((aligned(16)));
+simde__m128i  y0r_over2 __attribute__ ((aligned(16)));
+simde__m128i  y0i_over2 __attribute__ ((aligned(16)));
+simde__m128i  y1r_over2 __attribute__ ((aligned(16)));
+simde__m128i  y1i_over2 __attribute__ ((aligned(16)));
 
-__m128i  A __attribute__ ((aligned(16)));
-__m128i  B __attribute__ ((aligned(16)));
-__m128i  C __attribute__ ((aligned(16)));
-__m128i  D __attribute__ ((aligned(16)));
-__m128i  E __attribute__ ((aligned(16)));
-__m128i  F __attribute__ ((aligned(16)));
-__m128i  G __attribute__ ((aligned(16)));
-__m128i  H __attribute__ ((aligned(16)));
+simde__m128i  A __attribute__ ((aligned(16)));
+simde__m128i  B __attribute__ ((aligned(16)));
+simde__m128i  C __attribute__ ((aligned(16)));
+simde__m128i  D __attribute__ ((aligned(16)));
+simde__m128i  E __attribute__ ((aligned(16)));
+simde__m128i  F __attribute__ ((aligned(16)));
+simde__m128i  G __attribute__ ((aligned(16)));
+simde__m128i  H __attribute__ ((aligned(16)));
 
-#endif
 
-//__m128i ONE_OVER_SQRT_8 __attribute__((aligned(16)));
 
 void nr_qpsk_qpsk(short *stream0_in,
                short *stream1_in,
@@ -466,174 +368,148 @@ void nr_qpsk_qpsk(short *stream0_in,
     length = number of resource elements
   */
 
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *rho01_128i = (__m128i *)rho01;
-  __m128i *stream0_128i_in = (__m128i *)stream0_in;
-  __m128i *stream1_128i_in = (__m128i *)stream1_in;
-  __m128i *stream0_128i_out = (__m128i *)stream0_out;
-  __m128i ONE_OVER_SQRT_8 = _mm_set1_epi16(23170); //round(2^16/sqrt(8))
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *rho01_128i = (int16x8_t *)rho01;
-  int16x8_t *stream0_128i_in = (int16x8_t *)stream0_in;
-  int16x8_t *stream1_128i_in = (int16x8_t *)stream1_in;
-  int16x8_t *stream0_128i_out = (int16x8_t *)stream0_out;
-  int16x8_t ONE_OVER_SQRT_8 = vdupq_n_s16(23170); //round(2^16/sqrt(8))
-#endif
+  simde__m128i *rho01_128i = (simde__m128i *)rho01;
+  simde__m128i *stream0_128i_in = (simde__m128i *)stream0_in;
+  simde__m128i *stream1_128i_in = (simde__m128i *)stream1_in;
+  simde__m128i *stream0_128i_out = (simde__m128i *)stream0_out;
+  simde__m128i ONE_OVER_SQRT_8 =simde_mm_set1_epi16(23170); //round(2^16/sqrt(8))
 
   int i;
 
 
   for (i=0; i<length>>2; i+=2) {
     // in each iteration, we take 8 complex samples
-#if defined(__x86_64__) || defined(__i386__)
-    __m128i xmm0 = rho01_128i[i]; // 4 symbols
-    __m128i xmm1 = rho01_128i[i + 1];
+    simde__m128i xmm0 = rho01_128i[i]; // 4 symbols
+    simde__m128i xmm1 = rho01_128i[i+1];
 
     // put (rho_r + rho_i)/2sqrt2 in rho_rpi
     // put (rho_r - rho_i)/2sqrt2 in rho_rmi
 
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 =simde_mm_shufflelo_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 =simde_mm_shufflehi_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 =simde_mm_shuffle_epi32(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 =simde_mm_shufflelo_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 =simde_mm_shufflehi_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 =simde_mm_shuffle_epi32(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    __m128i xmm2 = _mm_unpacklo_epi64(xmm0, xmm1); // Re(rho)
-    __m128i xmm3 = _mm_unpackhi_epi64(xmm0, xmm1); // Im(rho)
-    __m128i rho_rpi = _mm_adds_epi16(xmm2, xmm3); // rho = Re(rho) + Im(rho)
-    __m128i rho_rmi = _mm_subs_epi16(xmm2, xmm3); // rho* = Re(rho) - Im(rho)
+    simde__m128i xmm2 = simde_mm_unpacklo_epi64(xmm0, xmm1); // Re(rho)
+    simde__m128i xmm3 = simde_mm_unpackhi_epi64(xmm0, xmm1); // Im(rho)
+    simde__m128i rho_rpi = simde_mm_adds_epi16(xmm2, xmm3); // rho = Re(rho) + Im(rho)
+    simde__m128i rho_rmi = simde_mm_subs_epi16(xmm2, xmm3); // rho* = Re(rho) - Im(rho)
 
     // divide by sqrt(8), no shift needed ONE_OVER_SQRT_8 = Q1.16
-    rho_rpi = _mm_mulhi_epi16(rho_rpi,ONE_OVER_SQRT_8);
-    rho_rmi = _mm_mulhi_epi16(rho_rmi,ONE_OVER_SQRT_8);
-#elif defined(__arm__) || defined(__aarch64__)
-
-
-#endif
+    rho_rpi =simde_mm_mulhi_epi16(rho_rpi,ONE_OVER_SQRT_8);
+    rho_rmi =simde_mm_mulhi_epi16(rho_rmi,ONE_OVER_SQRT_8);
     // Compute LLR for first bit of stream 0
 
     // Compute real and imaginary parts of MF output for stream 0
-#if defined(__x86_64__) || defined(__i386__)
     xmm0 = stream0_128i_in[i];
     xmm1 = stream0_128i_in[i+1];
 
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 =simde_mm_shufflelo_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 =simde_mm_shufflehi_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 =simde_mm_shuffle_epi32(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 =simde_mm_shufflelo_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 =simde_mm_shufflehi_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 =simde_mm_shuffle_epi32(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    __m128i y0r = _mm_unpacklo_epi64(xmm0, xmm1); // = [y0r(1),y0r(2),y0r(3),y0r(4)]
-    __m128i y0i = _mm_unpackhi_epi64(xmm0, xmm1);
-
-    __m128i y0r_over2 = _mm_srai_epi16(y0r, 1); // divide by 2
-    __m128i y0i_over2 = _mm_srai_epi16(y0i, 1); // divide by 2
-#elif defined(__arm__) || defined(__aarch64__)
+    simde__m128i y0r = simde_mm_unpacklo_epi64(xmm0, xmm1); // = [y0r(1),y0r(2),y0r(3),y0r(4)]
+    simde__m128i y0i = simde_mm_unpackhi_epi64(xmm0, xmm1);
 
-
-#endif
+    simde__m128i  y0r_over2 = simde_mm_srai_epi16(y0r, 1); // divide by 2
+    simde__m128i  y0i_over2 = simde_mm_srai_epi16(y0i, 1); // divide by 2
     // Compute real and imaginary parts of MF output for stream 1
-#if defined(__x86_64__) || defined(__i386__)
     xmm0 = stream1_128i_in[i];
     xmm1 = stream1_128i_in[i+1];
 
-    xmm0 = _mm_shufflelo_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shufflehi_epi16(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm0 = _mm_shuffle_epi32(xmm0,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflelo_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shufflehi_epi16(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
-    xmm1 = _mm_shuffle_epi32(xmm1,0xd8); //_MM_SHUFFLE(0,2,1,3));
+    xmm0 =simde_mm_shufflelo_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 =simde_mm_shufflehi_epi16(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm0 =simde_mm_shuffle_epi32(xmm0,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 =simde_mm_shufflelo_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 =simde_mm_shufflehi_epi16(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
+    xmm1 =simde_mm_shuffle_epi32(xmm1,0xd8); // SIMDE_MM_SHUFFLE(0,2,1,3));
     //xmm0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
     //xmm1 = [Re(4,5) Re(6,7) Im(4,5) Im(6,7)]
-    __m128i y1r = _mm_unpacklo_epi64(xmm0, xmm1); //[y1r(1),y1r(2),y1r(3),y1r(4)]
-    __m128i y1i = _mm_unpackhi_epi64(xmm0, xmm1); //[y1i(1),y1i(2),y1i(3),y1i(4)]
+    simde__m128i y1r = simde_mm_unpacklo_epi64(xmm0, xmm1); //[y1r(1),y1r(2),y1r(3),y1r(4)]
+    simde__m128i y1i = simde_mm_unpackhi_epi64(xmm0, xmm1); //[y1i(1),y1i(2),y1i(3),y1i(4)]
 
-    __m128i y1r_over2 = _mm_srai_epi16(y1r, 1); // divide by 2
-    __m128i y1i_over2 = _mm_srai_epi16(y1i, 1); // divide by 2
+    simde__m128i y1r_over2 = simde_mm_srai_epi16(y1r, 1); // divide by 2
+    simde__m128i y1i_over2 = simde_mm_srai_epi16(y1i, 1); // divide by 2
 
     // Compute the terms for the LLR of first bit
 
-    xmm0 = _mm_setzero_si128(); // ZERO
+    xmm0 =simde_mm_setzero_si128(); // ZERO
 
     // 1 term for numerator of LLR
-    xmm3 = _mm_subs_epi16(y1r_over2,rho_rpi);
-    A = _mm_abs_epi16(xmm3); // A = |y1r/2 - rho/sqrt(8)|
-    xmm2 = _mm_adds_epi16(A,y0i_over2); // = |y1r/2 - rho/sqrt(8)| + y0i/2
-    xmm3 = _mm_subs_epi16(y1i_over2,rho_rmi);
-    B = _mm_abs_epi16(xmm3); // B = |y1i/2 - rho*/sqrt(8)|
-    __m128i logmax_num_re0 = _mm_adds_epi16(B, xmm2); // = |y1r/2 - rho/sqrt(8)|+|y1i/2 - rho*/sqrt(8)| + y0i/2
+    xmm3 =simde_mm_subs_epi16(y1r_over2,rho_rpi);
+    A =simde_mm_abs_epi16(xmm3); // A = |y1r/2 - rho/sqrt(8)|
+    xmm2 =simde_mm_adds_epi16(A,y0i_over2); // = |y1r/2 - rho/sqrt(8)| + y0i/2
+    xmm3 =simde_mm_subs_epi16(y1i_over2,rho_rmi);
+    B =simde_mm_abs_epi16(xmm3); // B = |y1i/2 - rho*/sqrt(8)|
+    simde__m128i logmax_num_re0 =simde_mm_adds_epi16(B,xmm2); // = |y1r/2 - rho/sqrt(8)|+|y1i/2 - rho*/sqrt(8)| + y0i/2
 
     // 2 term for numerator of LLR
-    xmm3 = _mm_subs_epi16(y1r_over2,rho_rmi);
-    C = _mm_abs_epi16(xmm3); // C = |y1r/2 - rho*/4|
-    xmm2 = _mm_subs_epi16(C,y0i_over2); // = |y1r/2 - rho*/4| - y0i/2
-    xmm3 = _mm_adds_epi16(y1i_over2,rho_rpi);
-    D = _mm_abs_epi16(xmm3); // D = |y1i/2 + rho/4|
-    xmm2 = _mm_adds_epi16(xmm2,D); // |y1r/2 - rho*/4| + |y1i/2 + rho/4| - y0i/2
-    logmax_num_re0 = _mm_max_epi16(logmax_num_re0,xmm2); // max, numerator done
+    xmm3 =simde_mm_subs_epi16(y1r_over2,rho_rmi);
+    C =simde_mm_abs_epi16(xmm3); // C = |y1r/2 - rho*/4|
+    xmm2 =simde_mm_subs_epi16(C,y0i_over2); // = |y1r/2 - rho*/4| - y0i/2
+    xmm3 =simde_mm_adds_epi16(y1i_over2,rho_rpi);
+    D =simde_mm_abs_epi16(xmm3); // D = |y1i/2 + rho/4|
+    xmm2 =simde_mm_adds_epi16(xmm2,D); // |y1r/2 - rho*/4| + |y1i/2 + rho/4| - y0i/2
+    logmax_num_re0 =simde_mm_max_epi16(logmax_num_re0,xmm2); // max, numerator done
 
     // 1 term for denominator of LLR
-    xmm3 = _mm_adds_epi16(y1r_over2,rho_rmi);
-    E = _mm_abs_epi16(xmm3); // E = |y1r/2 + rho*/4|
-    xmm2 = _mm_adds_epi16(E,y0i_over2); // = |y1r/2 + rho*/4| + y0i/2
-    xmm3 = _mm_subs_epi16(y1i_over2,rho_rpi);
-    F = _mm_abs_epi16(xmm3); // F = |y1i/2 - rho/4|
-    __m128i logmax_den_re0 = _mm_adds_epi16(F, xmm2); // = |y1r/2 + rho*/4| + |y1i/2 - rho/4| + y0i/2
+    xmm3 =simde_mm_adds_epi16(y1r_over2,rho_rmi);
+    E =simde_mm_abs_epi16(xmm3); // E = |y1r/2 + rho*/4|
+    xmm2 =simde_mm_adds_epi16(E,y0i_over2); // = |y1r/2 + rho*/4| + y0i/2
+    xmm3 =simde_mm_subs_epi16(y1i_over2,rho_rpi);
+    F =simde_mm_abs_epi16(xmm3); // F = |y1i/2 - rho/4|
+    simde__m128i logmax_den_re0 =simde_mm_adds_epi16(F,xmm2); // = |y1r/2 + rho*/4| + |y1i/2 - rho/4| + y0i/2
 
     // 2 term for denominator of LLR
-    xmm3 = _mm_adds_epi16(y1r_over2,rho_rpi);
-    G = _mm_abs_epi16(xmm3); // G = |y1r/2 + rho/4|
-    xmm2 = _mm_subs_epi16(G,y0i_over2); // = |y1r/2 + rho/4| - y0i/2
-    xmm3 = _mm_adds_epi16(y1i_over2,rho_rmi);
-    H = _mm_abs_epi16(xmm3); // H = |y1i/2 + rho*/4|
-    xmm2 = _mm_adds_epi16(xmm2,H); // = |y1r/2 + rho/4| + |y1i/2 + rho*/4| - y0i/2
-    logmax_den_re0 = _mm_max_epi16(logmax_den_re0,xmm2); // max, denominator done
+    xmm3 =simde_mm_adds_epi16(y1r_over2,rho_rpi);
+    G =simde_mm_abs_epi16(xmm3); // G = |y1r/2 + rho/4|
+    xmm2 =simde_mm_subs_epi16(G,y0i_over2); // = |y1r/2 + rho/4| - y0i/2
+    xmm3 =simde_mm_adds_epi16(y1i_over2,rho_rmi);
+    H =simde_mm_abs_epi16(xmm3); // H = |y1i/2 + rho*/4|
+    xmm2 =simde_mm_adds_epi16(xmm2,H); // = |y1r/2 + rho/4| + |y1i/2 + rho*/4| - y0i/2
+    logmax_den_re0 =simde_mm_max_epi16(logmax_den_re0,xmm2); // max, denominator done
 
     // Compute the terms for the LLR of first bit
 
     // 1 term for nominator of LLR
-    xmm2 = _mm_adds_epi16(A,y0r_over2);
-    __m128i logmax_num_im0 = _mm_adds_epi16(B, xmm2); // = |y1r/2 - rho/4| + |y1i/2 - rho*/4| + y0r/2
+    xmm2 = simde_mm_adds_epi16(A,y0r_over2);
+    simde__m128i logmax_num_im0 = simde_mm_adds_epi16(B, xmm2); // = |y1r/2 - rho/4| + |y1i/2 - rho*/4| + y0r/2
 
     // 2 term for nominator of LLR
-    xmm2 = _mm_subs_epi16(E,y0r_over2);
-    xmm2 = _mm_adds_epi16(xmm2,F); // = |y1r/2 + rho*/4| + |y1i/2 - rho/4| - y0r/2
+    xmm2 =simde_mm_subs_epi16(E,y0r_over2);
+    xmm2 =simde_mm_adds_epi16(xmm2,F); // = |y1r/2 + rho*/4| + |y1i/2 - rho/4| - y0r/2
 
-    logmax_num_im0 = _mm_max_epi16(logmax_num_im0,xmm2); // max, nominator done
+    logmax_num_im0 =simde_mm_max_epi16(logmax_num_im0,xmm2); // max, nominator done
 
     // 1 term for denominator of LLR
-    xmm2 = _mm_adds_epi16(C,y0r_over2);
-    __m128i logmax_den_im0 = _mm_adds_epi16(D, xmm2); // = |y1r/2 - rho*/4| + |y1i/2 + rho/4| - y0r/2
+    xmm2 =simde_mm_adds_epi16(C,y0r_over2);
+    simde__m128i logmax_den_im0 =simde_mm_adds_epi16(D,xmm2); // = |y1r/2 - rho*/4| + |y1i/2 + rho/4| - y0r/2
 
-    xmm2 = _mm_subs_epi16(G,y0r_over2);
-    xmm2 = _mm_adds_epi16(xmm2,H); // = |y1r/2 + rho/4| + |y1i/2 + rho*/4| - y0r/2
+    xmm2 =simde_mm_subs_epi16(G,y0r_over2);
+    xmm2 =simde_mm_adds_epi16(xmm2,H); // = |y1r/2 + rho/4| + |y1i/2 + rho*/4| - y0r/2
 
-    logmax_den_im0 = _mm_max_epi16(logmax_den_im0,xmm2); // max, denominator done
+    logmax_den_im0 =simde_mm_max_epi16(logmax_den_im0,xmm2); // max, denominator done
 
     // LLR of first bit [L1(1), L1(2), L1(3), L1(4)]
-    y0r = _mm_adds_epi16(y0r,logmax_num_re0);
-    y0r = _mm_subs_epi16(y0r,logmax_den_re0);
+    y0r =simde_mm_adds_epi16(y0r,logmax_num_re0);
+    y0r =simde_mm_subs_epi16(y0r,logmax_den_re0);
 
     // LLR of second bit [L2(1), L2(2), L2(3), L2(4)]
-    y0i = _mm_adds_epi16(y0i,logmax_num_im0);
-    y0i = _mm_subs_epi16(y0i,logmax_den_im0);
+    y0i =simde_mm_adds_epi16(y0i,logmax_num_im0);
+    y0i =simde_mm_subs_epi16(y0i,logmax_den_im0);
 
-    _mm_storeu_si128(&stream0_128i_out[i],_mm_unpacklo_epi16(y0r,y0i)); // = [L1(1), L2(1), L1(2), L2(2)]
+   simde_mm_storeu_si128(&stream0_128i_out[i],simde_mm_unpacklo_epi16(y0r,y0i)); // = [L1(1), L2(1), L1(2), L2(2)]
 
     if (i<((length>>1) - 1)) // false if only 2 REs remain
-      _mm_storeu_si128(&stream0_128i_out[i+1],_mm_unpackhi_epi16(y0r,y0i));
-
-#elif defined(__x86_64__)
+     simde_mm_storeu_si128(&stream0_128i_out[i+1],simde_mm_unpackhi_epi16(y0r,y0i));
 
-#endif
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
 }
diff --git a/openair1/PHY/NR_UE_TRANSPORT/nr_pbch.c b/openair1/PHY/NR_UE_TRANSPORT/nr_pbch.c
index 2629c00968eb538bdf3b9de3ca1590580288027b..7e6155676f5d94b95470331c439cd645dddc2600 100644
--- a/openair1/PHY/NR_UE_TRANSPORT/nr_pbch.c
+++ b/openair1/PHY/NR_UE_TRANSPORT/nr_pbch.c
@@ -196,34 +196,19 @@ int nr_pbch_channel_level(struct complex16 dl_ch_estimates_ext[][PBCH_MAX_RE_PER
                           NR_DL_FRAME_PARMS *frame_parms,
 			  int nb_re) {
   int16_t nb_rb=nb_re/12;
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i avg128;
-  __m128i *dl_ch128;
-#elif defined(__arm__) || defined(__aarch64__)
-  int32x4_t avg128;
-  int16x8_t *dl_ch128;
-#endif
+  simde__m128i avg128;
+  simde__m128i *dl_ch128;
   int avg1=0,avg2=0;
 
   for (int aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
     //clear average level
-#if defined(__x86_64__) || defined(__i386__)
-    avg128 = _mm_setzero_si128();
-    dl_ch128=(__m128i *)dl_ch_estimates_ext[aarx];
-#elif defined(__arm__) || defined(__aarch64__)
-    avg128 = vdupq_n_s32(0);
-    dl_ch128=(int16x8_t *)dl_ch_estimates_ext[aarx];
-#endif
+    avg128 = simde_mm_setzero_si128();
+    dl_ch128=(simde__m128i *)dl_ch_estimates_ext[aarx];
 
     for (int rb=0; rb<nb_rb; rb++) {
-#if defined(__x86_64__) || defined(__i386__)
-      avg128 = _mm_add_epi32(avg128,_mm_madd_epi16(dl_ch128[0],dl_ch128[0]));
-      avg128 = _mm_add_epi32(avg128,_mm_madd_epi16(dl_ch128[1],dl_ch128[1]));
-      avg128 = _mm_add_epi32(avg128,_mm_madd_epi16(dl_ch128[2],dl_ch128[2]));
-#elif defined(__arm__) || defined(__aarch64__)
-      abort();
-      // to be filled in
-#endif
+      avg128 = simde_mm_add_epi32(avg128, simde_mm_madd_epi16(dl_ch128[0],dl_ch128[0]));
+      avg128 = simde_mm_add_epi32(avg128, simde_mm_madd_epi16(dl_ch128[1],dl_ch128[1]));
+      avg128 = simde_mm_add_epi32(avg128, simde_mm_madd_epi16(dl_ch128[2],dl_ch128[2]));
       dl_ch128+=3;
       /*
       if (rb==0) {
@@ -252,9 +237,9 @@ static void nr_pbch_channel_compensation(struct complex16 rxdataF_ext[][PBCH_MAX
 					 NR_DL_FRAME_PARMS *frame_parms,
 					 uint8_t output_shift) {
   for (int aarx=0; aarx<frame_parms->nb_antennas_rx; aarx++) {
-    vect128 *dl_ch128          = (vect128 *)dl_ch_estimates_ext[aarx];
-    vect128 *rxdataF128        = (vect128 *)rxdataF_ext[aarx];
-    vect128 *rxdataF_comp128   = (vect128 *)rxdataF_comp[aarx];
+    simde__m128i *dl_ch128          = (simde__m128i *)dl_ch_estimates_ext[aarx];
+    simde__m128i *rxdataF128        = (simde__m128i *)rxdataF_ext[aarx];
+    simde__m128i *rxdataF_comp128   = (simde__m128i *)rxdataF_comp[aarx];
 
     for (int re=0; re<nb_re; re+=12) {
       *rxdataF_comp128++ = mulByConjugate128(rxdataF128++, dl_ch128++, output_shift);
@@ -268,37 +253,23 @@ void nr_pbch_detection_mrc(NR_DL_FRAME_PARMS *frame_parms,
                            int **rxdataF_comp,
                            uint8_t symbol) {
   uint8_t symbol_mod;
-  int i, nb_rb=6;
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *rxdataF_comp128_0,*rxdataF_comp128_1;
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *rxdataF_comp128_0,*rxdataF_comp128_1;
-#endif
+  int i, nb_rb = 6;
+  simde__m128i *rxdataF_comp128_0, *rxdataF_comp128_1;
   symbol_mod = (symbol>=(7-frame_parms->Ncp)) ? symbol-(7-frame_parms->Ncp) : symbol;
 
-  if (frame_parms->nb_antennas_rx>1) {
-#if defined(__x86_64__) || defined(__i386__)
-    rxdataF_comp128_0   = (__m128i *)&rxdataF_comp[0][symbol_mod*6*12];
-    rxdataF_comp128_1   = (__m128i *)&rxdataF_comp[1][symbol_mod*6*12];
-#elif defined(__arm__) || defined(__aarch64__)
-    rxdataF_comp128_0   = (int16x8_t *)&rxdataF_comp[0][symbol_mod*6*12];
-    rxdataF_comp128_1   = (int16x8_t *)&rxdataF_comp[1][symbol_mod*6*12];
-#endif
+  if (frame_parms->nb_antennas_rx > 1) {
+    rxdataF_comp128_0 = (simde__m128i *)&rxdataF_comp[0][symbol_mod * 6 * 12];
+    rxdataF_comp128_1 = (simde__m128i *)&rxdataF_comp[1][symbol_mod * 6 * 12];
 
     // MRC on each re of rb, both on MF output and magnitude (for 16QAM/64QAM llr computation)
-    for (i=0; i<nb_rb*3; i++) {
-#if defined(__x86_64__) || defined(__i386__)
-      rxdataF_comp128_0[i] = _mm_adds_epi16(_mm_srai_epi16(rxdataF_comp128_0[i],1),_mm_srai_epi16(rxdataF_comp128_1[i],1));
-#elif defined(__arm__) || defined(__aarch64__)
-      rxdataF_comp128_0[i] = vhaddq_s16(rxdataF_comp128_0[i],rxdataF_comp128_1[i]);
-#endif
+    for (i = 0; i < nb_rb * 3; i++) {
+      rxdataF_comp128_0[i] =
+          simde_mm_adds_epi16(simde_mm_srai_epi16(rxdataF_comp128_0[i], 1), simde_mm_srai_epi16(rxdataF_comp128_1[i], 1));
     }
   }
 
-#if defined(__x86_64__) || defined(__i386__)
-  _mm_empty();
-  _m_empty();
-#endif
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 static void nr_pbch_unscrambling(int16_t *demod_pbch_e,
diff --git a/openair1/PHY/TOOLS/cadd_vv.c b/openair1/PHY/TOOLS/cadd_vv.c
index 88a2bd98140a71b6c73a3273d01e6c980ab53dd1..f1c61cc60adc2959babb523dc980ecb13137b165 100644
--- a/openair1/PHY/TOOLS/cadd_vv.c
+++ b/openair1/PHY/TOOLS/cadd_vv.c
@@ -30,16 +30,16 @@ int32_t sub_cpx_vector16(int16_t *x,
 {
   unsigned int i;                 // loop counter
 
-  __m128i *x_128;
-  __m128i *y_128;
-  __m128i *z_128;
+  simde__m128i *x_128;
+  simde__m128i *y_128;
+  simde__m128i *z_128;
 
-  x_128 = (__m128i *)&x[0];
-  y_128 = (__m128i *)&y[0];
-  z_128 = (__m128i *)&z[0];
+  x_128 = (simde__m128i *)&x[0];
+  y_128 = (simde__m128i *)&y[0];
+  z_128 = (simde__m128i *)&z[0];
 
  for(i=0; i<(N>>3); i++) {
-    z_128[0] = _mm_subs_epi16(x_128[0],y_128[0]);
+    z_128[0] = simde_mm_subs_epi16(x_128[0],y_128[0]);
 
     x_128++;
     y_128++;
@@ -47,8 +47,8 @@ int32_t sub_cpx_vector16(int16_t *x,
 
   }
 
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
   return(0);
 }
 
diff --git a/openair1/PHY/TOOLS/cdot_prod.c b/openair1/PHY/TOOLS/cdot_prod.c
index 6caf99543f4665b73f4f8c491d424efd5b40ef3a..7370bb21b1f13b877014230caa2fe6c7cc317a75 100644
--- a/openair1/PHY/TOOLS/cdot_prod.c
+++ b/openair1/PHY/TOOLS/cdot_prod.c
@@ -36,23 +36,23 @@ c32_t dot_product(const c16_t *x,//! input vector
   const int16_t reflip[32] __attribute__((aligned(32))) = {1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1};
   const int8_t imshuffle[64] __attribute__((aligned(32))) = {2, 3, 0, 1, 6, 7, 4, 5, 10, 11, 8, 9, 14, 15, 12, 13, 18, 19, 16, 17, 22, 23, 20, 21, 26, 27, 24, 25, 30, 31, 28, 29};
   const c16_t *end = x + N;
-  __m256i cumul_re = {0}, cumul_im = {0};
+  simde__m256i cumul_re = {0}, cumul_im = {0};
   while (x < end) {
-    const __m256i in1 = simde_mm256_loadu_si256((__m256i *)x);
-    const __m256i in2 = simde_mm256_loadu_si256((__m256i *)y);
-    const __m256i tmpRe = simde_mm256_madd_epi16(in1, in2);
+    const simde__m256i in1 = simde_mm256_loadu_si256((simde__m256i *)x);
+    const simde__m256i in2 = simde_mm256_loadu_si256((simde__m256i *)y);
+    const simde__m256i tmpRe = simde_mm256_madd_epi16(in1, in2);
     cumul_re = simde_mm256_add_epi32(cumul_re, simde_mm256_srai_epi32(tmpRe, output_shift));
-    const __m256i tmp1 = simde_mm256_shuffle_epi8(in2, *(__m256i *)imshuffle);
-    const __m256i tmp2 = simde_mm256_sign_epi16(tmp1, *(__m256i *)reflip);
-    const __m256i tmpIm = simde_mm256_madd_epi16(in1, tmp2);
+    const simde__m256i tmp1 = simde_mm256_shuffle_epi8(in2, *(simde__m256i *)imshuffle);
+    const simde__m256i tmp2 = simde_mm256_sign_epi16(tmp1, *(simde__m256i *)reflip);
+    const simde__m256i tmpIm = simde_mm256_madd_epi16(in1, tmp2);
     cumul_im = simde_mm256_add_epi32(cumul_im, simde_mm256_srai_epi32(tmpIm, output_shift));
     x += 8;
     y += 8;
   }
 
   // this gives Re Re Im Im Re Re Im Im
-  const __m256i cumulTmp = simde_mm256_hadd_epi32(cumul_re, cumul_im);
-  const __m256i cumul = simde_mm256_hadd_epi32(cumulTmp, cumulTmp);
+  const simde__m256i cumulTmp = simde_mm256_hadd_epi32(cumul_re, cumul_im);
+  const simde__m256i cumul = simde_mm256_hadd_epi32(cumulTmp, cumulTmp);
 
   c32_t ret;
   ret.r = simde_mm256_extract_epi32(cumul, 0) + simde_mm256_extract_epi32(cumul, 4);
diff --git a/openair1/PHY/TOOLS/cmult_sv.c b/openair1/PHY/TOOLS/cmult_sv.c
index 8679f2cb971a8249386df3bd3073dc0498880089..6cbf6ccd975419207b58ac97dc671d05d558ed83 100644
--- a/openair1/PHY/TOOLS/cmult_sv.c
+++ b/openair1/PHY/TOOLS/cmult_sv.c
@@ -21,6 +21,8 @@
 
 #include "PHY/sse_intrin.h"
 #include "tools_defs.h"
+#include <simde/simde-common.h>
+#include <simde/x86/sse.h>
 
 void multadd_complex_vector_real_scalar(int16_t *x,
                                         int16_t alpha,
@@ -46,10 +48,9 @@ void multadd_complex_vector_real_scalar(int16_t *x,
     for (n=0; n<N>>2; n++) {
       y_128[n] = adds_int16(y_128[n],mulhi_int16(x_128[n],alpha_128));
     }
- 
-  _mm_empty();
-  _m_empty();
 
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void multadd_real_vector_complex_scalar(const int16_t *x, const int16_t *alpha, int16_t *y, uint32_t N)
@@ -64,22 +65,12 @@ void multadd_real_vector_complex_scalar(const int16_t *x, const int16_t *alpha,
   const simd_q15_t alpha_r_128 = set1_int16(alpha[0]);
   const simd_q15_t alpha_i_128 = set1_int16(alpha[1]);
   for (i=0; i<N>>3; i++) {
-    const simd_q15_t yr = mulhi_s1_int16(alpha_r_128, x_128[i]);
-    const simd_q15_t yi = mulhi_s1_int16(alpha_i_128, x_128[i]);
-#if defined(__x86_64__) || defined(__i386__)
-    const simd_q15_t tmp = _mm_loadu_si128(y_128);
-    _mm_storeu_si128(y_128++, _mm_adds_epi16(tmp, _mm_unpacklo_epi16(yr, yi)));
-    const simd_q15_t tmp2 = _mm_loadu_si128(y_128);
-    _mm_storeu_si128(y_128++, _mm_adds_epi16(tmp2, _mm_unpackhi_epi16(yr, yi)));
-#elif defined(__arm__) || defined (__aarch64__)
-    int16x8x2_t yint;
-    yint = vzipq_s16(yr,yi);
-    *y_128 = adds_int16(*y_128, yint.val[0]);
-    j++;
-    *y_128 = adds_int16(*y_128, yint.val[1]);
-
-    j++;
-#endif
+    const simd_q15_t yr     = mulhi_s1_int16(alpha_r_128,x_128[i]);
+    const simd_q15_t yi     = mulhi_s1_int16(alpha_i_128,x_128[i]);
+    const simd_q15_t tmp = simde_mm_loadu_si128(y_128);
+    simde_mm_storeu_si128(y_128++,simde_mm_adds_epi16(tmp, simde_mm_unpacklo_epi16(yr, yi)));
+    const simd_q15_t tmp2 = simde_mm_loadu_si128(y_128);
+    simde_mm_storeu_si128(y_128++, simde_mm_adds_epi16(tmp2, simde_mm_unpackhi_epi16(yr, yi)));
   }
 }
 
@@ -91,54 +82,52 @@ void rotate_cpx_vector(const c16_t *const x, const c16_t *const alpha, c16_t *y,
   // output_shift reduces the result of the multiplication by this number of bits
   if ( __builtin_cpu_supports("avx2")) {
     // output is 32 bytes aligned, but not the input
-    
-    const c16_t for_re={alpha->r, -alpha->i};
-    const __m256i alpha_for_real = simde_mm256_set1_epi32(*(uint32_t *)&for_re);
-    const c16_t for_im={alpha->i, alpha->r};
-    const __m256i alpha_for_im = simde_mm256_set1_epi32(*(uint32_t *)&for_im);
-    const __m256i perm_mask = simde_mm256_set_epi8(31,
-                                                   30,
-                                                   23,
-                                                   22,
-                                                   29,
-                                                   28,
-                                                   21,
-                                                   20,
-                                                   27,
-                                                   26,
-                                                   19,
-                                                   18,
-                                                   25,
-                                                   24,
-                                                   17,
-                                                   16,
-                                                   15,
-                                                   14,
-                                                   7,
-                                                   6,
-                                                   13,
-                                                   12,
-                                                   5,
-                                                   4,
-                                                   11,
-                                                   10,
-                                                   3,
-                                                   2,
-                                                   9,
-                                                   8,
-                                                   1,
-                                                   0);
-    __m256i* xd= (__m256i*)x;
-    const __m256i *end=xd+N/8;
-    for( __m256i* yd = (__m256i *)y; xd<end ; yd++, xd++) {
-      const __m256i y256= _mm256_lddqu_si256(xd);
-      const __m256i xre = simde_mm256_srai_epi32(simde_mm256_madd_epi16(y256,alpha_for_real),
-						 output_shift);
-      const __m256i xim = simde_mm256_srai_epi32(simde_mm256_madd_epi16(y256,alpha_for_im),
-						 output_shift);
+
+    const c16_t for_re = {alpha->r, -alpha->i};
+    const simde__m256i alpha_for_real = simde_mm256_set1_epi32(*(uint32_t *)&for_re);
+    const c16_t for_im = {alpha->i, alpha->r};
+    const simde__m256i alpha_for_im = simde_mm256_set1_epi32(*(uint32_t *)&for_im);
+    const simde__m256i perm_mask = simde_mm256_set_epi8(31,
+                                                        30,
+                                                        23,
+                                                        22,
+                                                        29,
+                                                        28,
+                                                        21,
+                                                        20,
+                                                        27,
+                                                        26,
+                                                        19,
+                                                        18,
+                                                        25,
+                                                        24,
+                                                        17,
+                                                        16,
+                                                        15,
+                                                        14,
+                                                        7,
+                                                        6,
+                                                        13,
+                                                        12,
+                                                        5,
+                                                        4,
+                                                        11,
+                                                        10,
+                                                        3,
+                                                        2,
+                                                        9,
+                                                        8,
+                                                        1,
+                                                        0);
+    simde__m256i *xd = (simde__m256i *)x;
+    const simde__m256i *end = xd + N / 8;
+    for (simde__m256i *yd = (simde__m256i *)y; xd < end; yd++, xd++) {
+      const simde__m256i y256 = simde_mm256_lddqu_si256(xd);
+      const simde__m256i xre = simde_mm256_srai_epi32(simde_mm256_madd_epi16(y256, alpha_for_real), output_shift);
+      const simde__m256i xim = simde_mm256_srai_epi32(simde_mm256_madd_epi16(y256, alpha_for_im), output_shift);
       // a bit faster than unpacklo+unpackhi+packs
-      const __m256i tmp=simde_mm256_packs_epi32(xre,xim);
-      _mm256_storeu_si256(yd,simde_mm256_shuffle_epi8(tmp,perm_mask));
+      const simde__m256i tmp = simde_mm256_packs_epi32(xre, xim);
+      simde_mm256_storeu_si256(yd, simde_mm256_shuffle_epi8(tmp, perm_mask));
     }
     c16_t* alpha16=(c16_t*) alpha, *yLast;
     yLast=((c16_t*)y)+(N/8)*8;
@@ -168,8 +157,7 @@ void rotate_cpx_vector(const c16_t *const x, const c16_t *const alpha, c16_t *y,
     simd_q15_t *y_128,alpha_128;
     int32_t *xd=(int32_t *)x; 
 
-#if defined(__x86_64__) || defined(__i386__)
-    __m128i shift = _mm_cvtsi32_si128(output_shift);
+    simde__m128i shift = simde_mm_cvtsi32_si128(output_shift);
     register simd_q15_t m0,m1,m2,m3;
 
     ((int16_t *)&alpha_128)[0] = alpha->r;
@@ -180,56 +168,19 @@ void rotate_cpx_vector(const c16_t *const x, const c16_t *const alpha, c16_t *y,
     ((int16_t *)&alpha_128)[5] = -alpha->i;
     ((int16_t *)&alpha_128)[6] = alpha->i;
     ((int16_t *)&alpha_128)[7] = alpha->r;
-#elif defined(__arm__) || defined(__aarch64__)
-    int32x4_t shift;
-    int32x4_t ab_re0,ab_re1,ab_im0,ab_im1,re32,im32;
-    int16_t reflip[8]  __attribute__((aligned(16))) = {1,-1,1,-1,1,-1,1,-1};
-    int32x4x2_t xtmp;
-
-    ((int16_t *)&alpha_128)[0] = alpha->r;
-    ((int16_t *)&alpha_128)[1] = alpha->i;
-    ((int16_t *)&alpha_128)[2] = alpha->r;
-    ((int16_t *)&alpha_128)[3] = alpha->i;
-    ((int16_t *)&alpha_128)[4] = alpha->r;
-    ((int16_t *)&alpha_128)[5] = alpha->i;
-    ((int16_t *)&alpha_128)[6] = alpha->r;
-    ((int16_t *)&alpha_128)[7] = alpha->i;
-    int16x8_t bflip = vrev32q_s16(alpha_128);
-    int16x8_t bconj = vmulq_s16(alpha_128,*(int16x8_t *)reflip);
-    shift = vdupq_n_s32(-output_shift);
-#endif
     y_128 = (simd_q15_t *) y;
 
 
     for(i=0; i<N>>2; i++) {
-#if defined(__x86_64__) || defined(__i386__)
-      m0 = _mm_setr_epi32(xd[0],xd[0],xd[1],xd[1]);
-      m1 = _mm_setr_epi32(xd[2],xd[2],xd[3],xd[3]);
-      m2 = _mm_madd_epi16(m0,alpha_128); //complex multiply. result is 32bit [Re Im Re Im]
-      m3 = _mm_madd_epi16(m1,alpha_128); //complex multiply. result is 32bit [Re Im Re Im]
-      m2 = _mm_sra_epi32(m2,shift);        // shift right by shift in order to  compensate for the input amplitude
-      m3 = _mm_sra_epi32(m3,shift);        // shift right by shift in order to  compensate for the input amplitude
-
-      y_128[0] = _mm_packs_epi32(m2,m3);        // pack in 16bit integers with saturation [re im re im re im re im]
+      m0 = simde_mm_setr_epi32(xd[0],xd[0],xd[1],xd[1]);
+      m1 = simde_mm_setr_epi32(xd[2],xd[2],xd[3],xd[3]);
+      m2 = simde_mm_madd_epi16(m0,alpha_128); //complex multiply. result is 32bit [Re Im Re Im]
+      m3 = simde_mm_madd_epi16(m1,alpha_128); //complex multiply. result is 32bit [Re Im Re Im]
+      m2 = simde_mm_sra_epi32(m2,shift);        // shift right by shift in order to  compensate for the input amplitude
+      m3 = simde_mm_sra_epi32(m3,shift);        // shift right by shift in order to  compensate for the input amplitude
+
+      y_128[0] = simde_mm_packs_epi32(m2,m3);        // pack in 16bit integers with saturation [re im re im re im re im]
       //print_ints("y_128[0]=", &y_128[0]);
-#elif defined(__arm__) || defined(__aarch64__)
-
-      ab_re0 = vmull_s16(((int16x4_t*)xd)[0],((int16x4_t*)&bconj)[0]);
-      ab_re1 = vmull_s16(((int16x4_t*)xd)[1],((int16x4_t*)&bconj)[1]);
-      ab_im0 = vmull_s16(((int16x4_t*)xd)[0],((int16x4_t*)&bflip)[0]);
-      ab_im1 = vmull_s16(((int16x4_t*)xd)[1],((int16x4_t*)&bflip)[1]);
-      re32 = vshlq_s32(vcombine_s32(vpadd_s32(((int32x2_t*)&ab_re0)[0],((int32x2_t*)&ab_re0)[1]),
-                                    vpadd_s32(((int32x2_t*)&ab_re1)[0],((int32x2_t*)&ab_re1)[1])),
-                       shift);
-      im32 = vshlq_s32(vcombine_s32(vpadd_s32(((int32x2_t*)&ab_im0)[0],((int32x2_t*)&ab_im0)[1]),
-                                    vpadd_s32(((int32x2_t*)&ab_im1)[0],((int32x2_t*)&ab_im1)[1])),
-                       shift);
-
-      xtmp = vzipq_s32(re32,im32);
-  
-      y_128[0] = vcombine_s16(vmovn_s32(xtmp.val[0]),vmovn_s32(xtmp.val[1]));
-
-#endif
       xd+=4;
       y_128+=1;
     }
diff --git a/openair1/PHY/TOOLS/cmult_vv.c b/openair1/PHY/TOOLS/cmult_vv.c
index 9d1adff5726246c9bb1391fcfaf6cd8644d02bb1..6bee29551f101c327fea487dcbf22faa27483089 100644
--- a/openair1/PHY/TOOLS/cmult_vv.c
+++ b/openair1/PHY/TOOLS/cmult_vv.c
@@ -23,21 +23,13 @@
 #include "tools_defs.h"
 #include <stdio.h>
 
-#if defined(__x86_64__) || defined(__i386__)
-static const int16_t conjug[8] __attribute__((aligned(16))) = {-1, 1, -1, 1, -1, 1, -1, 1};
-static const int16_t conjug2[8] __attribute__((aligned(16))) = {1, -1, 1, -1, 1, -1, 1, -1};
+static const int16_t conjug[8]__attribute__((aligned(16))) = {-1,1,-1,1,-1,1,-1,1} ;
+static const int16_t conjug2[8]__attribute__((aligned(16))) = {1,-1,1,-1,1,-1,1,-1} ;
 
-#define simd_q15_t __m128i
-#define simdshort_q15_t __m64
-#define set1_int16(a) _mm_set1_epi16(a)
-#define setr_int16(a0, a1, a2, a3, a4, a5, a6, a7) _mm_setr_epi16(a0, a1, a2, a3, a4, a5, a6, a7 )
-#elif defined(__arm__) || defined(__aarch64__)
-static const int16_t conjug[4] __attribute__((aligned(16))) = {-1, 1, -1, 1};
-#define simd_q15_t int16x8_t
-#define simdshort_q15_t int16x4_t
-#define _mm_empty()
-#define _m_empty()
-#endif
+#define simd_q15_t simde__m128i
+#define simdshort_q15_t simde__m64
+#define set1_int16(a) simde_mm_set1_epi16(a)
+#define setr_int16(a0, a1, a2, a3, a4, a5, a6, a7) simde_mm_setr_epi16(a0, a1, a2, a3, a4, a5, a6, a7 )
 
 int mult_cpx_conj_vector(int16_t *x1,
                          int16_t *x2,
@@ -66,17 +58,9 @@ int mult_cpx_conj_vector(int16_t *x1,
   simd_q15_t *x1_128;
   simd_q15_t *x2_128;
   simd_q15_t *y_128;
-#if defined(__x86_64__) || defined(__i386__)
   simd_q15_t tmp_re,tmp_im;
   simd_q15_t tmpy0,tmpy1;
 
-#elif defined(__arm__) || defined(__aarch64__)
-  int32x4_t tmp_re,tmp_im;
-  int32x4_t tmp_re1,tmp_im1;
-  int16x4x2_t tmpy;
-  int32x4_t shift = vdupq_n_s32(-output_shift);
-#endif
-
   x1_128 = (simd_q15_t *)&x1[0];
   x2_128 = (simd_q15_t *)&x2[0];
   y_128  = (simd_q15_t *)&y[0];
@@ -84,55 +68,27 @@ int mult_cpx_conj_vector(int16_t *x1,
 
   // we compute 4 cpx multiply for each loop
   for(i=0; i<(N>>2); i++) {
-#if defined(__x86_64__) || defined(__i386__)
-    tmp_re = _mm_madd_epi16(*x1_128,*x2_128);
-    tmp_im = _mm_shufflelo_epi16(*x1_128,_MM_SHUFFLE(2,3,0,1));
-    tmp_im = _mm_shufflehi_epi16(tmp_im,_MM_SHUFFLE(2,3,0,1));
-    tmp_im = _mm_sign_epi16(tmp_im,*(__m128i*)&conjug[0]);
-    tmp_im = _mm_madd_epi16(tmp_im,*x2_128);
-    tmp_re = _mm_srai_epi32(tmp_re,output_shift);
-    tmp_im = _mm_srai_epi32(tmp_im,output_shift);
-    tmpy0  = _mm_unpacklo_epi32(tmp_re,tmp_im);
-    tmpy1  = _mm_unpackhi_epi32(tmp_re,tmp_im);
-    if (madd==0)
-      *y_128 = _mm_packs_epi32(tmpy0,tmpy1);
-    else
-      *y_128 += _mm_packs_epi32(tmpy0,tmpy1);
-
-#elif defined(__arm__) || defined(__aarch64__)
-
-    tmp_re  = vmull_s16(((simdshort_q15_t *)x1_128)[0], ((simdshort_q15_t*)x2_128)[0]);
-    //tmp_re = [Re(x1[0])Re(x2[0]) Im(x1[0])Im(x2[0]) Re(x1[1])Re(x2[1]) Im(x1[1])Im(x2[1])]
-    tmp_re1 = vmull_s16(((simdshort_q15_t *)x1_128)[1], ((simdshort_q15_t*)x2_128)[1]);
-    //tmp_re1 = [Re(x1[1])Re(x2[1]) Im(x1[1])Im(x2[1]) Re(x1[1])Re(x2[2]) Im(x1[1])Im(x2[2])]
-    tmp_re  = vcombine_s32(vpadd_s32(vget_low_s32(tmp_re),vget_high_s32(tmp_re)),
-                           vpadd_s32(vget_low_s32(tmp_re1),vget_high_s32(tmp_re1)));
-    //tmp_re = [Re(ch[0])Re(rx[0])+Im(ch[0])Im(ch[0]) Re(ch[1])Re(rx[1])+Im(ch[1])Im(ch[1]) Re(ch[2])Re(rx[2])+Im(ch[2]) Im(ch[2]) Re(ch[3])Re(rx[3])+Im(ch[3])Im(ch[3])]
-
-    tmp_im  = vmull_s16(vrev32_s16(vmul_s16(((simdshort_q15_t*)x2_128)[0],*(simdshort_q15_t*)conjug)), ((simdshort_q15_t*)x1_128)[0]);
-    //tmp_im = [-Im(ch[0])Re(rx[0]) Re(ch[0])Im(rx[0]) -Im(ch[1])Re(rx[1]) Re(ch[1])Im(rx[1])]
-    tmp_im1 = vmull_s16(vrev32_s16(vmul_s16(((simdshort_q15_t*)x2_128)[1],*(simdshort_q15_t*)conjug)), ((simdshort_q15_t*)x1_128)[1]);
-    //tmp_im1 = [-Im(ch[2])Re(rx[2]) Re(ch[2])Im(rx[2]) -Im(ch[3])Re(rx[3]) Re(ch[3])Im(rx[3])]
-    tmp_im  = vcombine_s32(vpadd_s32(vget_low_s32(tmp_im),vget_high_s32(tmp_im)),
-                           vpadd_s32(vget_low_s32(tmp_im1),vget_high_s32(tmp_im1)));
-    //tmp_im = [-Im(ch[0])Re(rx[0])+Re(ch[0])Im(rx[0]) -Im(ch[1])Re(rx[1])+Re(ch[1])Im(rx[1]) -Im(ch[2])Re(rx[2])+Re(ch[2])Im(rx[2]) -Im(ch[3])Re(rx[3])+Re(ch[3])Im(rx[3])]
-
-    tmp_re = vqshlq_s32(tmp_re,shift);
-    tmp_im = vqshlq_s32(tmp_im,shift);
-    tmpy   = vzip_s16(vmovn_s32(tmp_re),vmovn_s32(tmp_im));
+    tmp_re = simde_mm_madd_epi16(*x1_128,*x2_128);
+    tmp_im = simde_mm_shufflelo_epi16(*x1_128, SIMDE_MM_SHUFFLE(2,3,0,1));
+    tmp_im = simde_mm_shufflehi_epi16(tmp_im, SIMDE_MM_SHUFFLE(2,3,0,1));
+    tmp_im = simde_mm_sign_epi16(tmp_im,*(simde__m128i*)&conjug[0]);
+    tmp_im = simde_mm_madd_epi16(tmp_im,*x2_128);
+    tmp_re = simde_mm_srai_epi32(tmp_re,output_shift);
+    tmp_im = simde_mm_srai_epi32(tmp_im,output_shift);
+    tmpy0  = simde_mm_unpacklo_epi32(tmp_re,tmp_im);
+    tmpy1  = simde_mm_unpackhi_epi32(tmp_re,tmp_im);
     if (madd==0)
-      *y_128 = vcombine_s16(tmpy.val[0],tmpy.val[1]);
+      *y_128 = simde_mm_packs_epi32(tmpy0,tmpy1);
     else
-      *y_128 += vcombine_s16(tmpy.val[0],tmpy.val[1]);
-#endif
+      *y_128 += simde_mm_packs_epi32(tmpy0,tmpy1);
     x1_128++;
     x2_128++;
     y_128++;
   }
 
 
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 
   return(0);
 }
@@ -170,32 +126,32 @@ int mult_cpx_vector(int16_t *x1, //Q15
   //right shift by 13 while p_a * x0 and 15 while
   // we compute 4 cpx multiply for each loop
   for(i=0; i<(N>>2); i++) {
-    tmp_re = _mm_sign_epi16(*x1_128,*(__m128i*)&conjug2[0]);// Q15
+    tmp_re = simde_mm_sign_epi16(*x1_128,*(simde__m128i*)&conjug2[0]);// Q15
     //print_shorts("tmp_re1:",&tmp_re[i]);
-    tmp_re = _mm_madd_epi16(tmp_re,*x2_128); //Q28
+    tmp_re = simde_mm_madd_epi16(tmp_re,*x2_128); //Q28
     //print_ints("tmp_re2:",&tmp_re[i]);
-    tmp_im = _mm_shufflelo_epi16(*x1_128,_MM_SHUFFLE(2,3,0,1)); //Q15
+    tmp_im = simde_mm_shufflelo_epi16(*x1_128, SIMDE_MM_SHUFFLE(2,3,0,1)); //Q15
     //print_shorts("tmp_im1:",&tmp_im[i]);
-    tmp_im = _mm_shufflehi_epi16(tmp_im,_MM_SHUFFLE(2,3,0,1)); //Q15
+    tmp_im = simde_mm_shufflehi_epi16(tmp_im, SIMDE_MM_SHUFFLE(2,3,0,1)); //Q15
     //print_shorts("tmp_im2:",&tmp_im[i]);
-    tmp_im = _mm_madd_epi16(tmp_im, *x2_128); //Q28
+    tmp_im = simde_mm_madd_epi16(tmp_im, *x2_128); //Q28
     //print_ints("tmp_im3:",&tmp_im[i]);
-    tmp_re = _mm_srai_epi32(tmp_re,output_shift);//Q(28-shift)
+    tmp_re = simde_mm_srai_epi32(tmp_re,output_shift);//Q(28-shift)
     //print_ints("tmp_re shifted:",&tmp_re[i]);
-    tmp_im = _mm_srai_epi32(tmp_im,output_shift); //Q(28-shift)
+    tmp_im = simde_mm_srai_epi32(tmp_im,output_shift); //Q(28-shift)
     //print_ints("tmp_im shifted:",&tmp_im[i]);
-    tmpy0  = _mm_unpacklo_epi32(tmp_re,tmp_im); //Q(28-shift)
+    tmpy0  = simde_mm_unpacklo_epi32(tmp_re,tmp_im); //Q(28-shift)
     //print_ints("unpack lo :",&tmpy0[i]);
-    tmpy1  = _mm_unpackhi_epi32(tmp_re,tmp_im); //Q(28-shift)
+    tmpy1  = simde_mm_unpackhi_epi32(tmp_re,tmp_im); //Q(28-shift)
     //print_ints("mrc rho0:",&tmpy1[i]);
-    *y_128 = _mm_packs_epi32(tmpy0,tmpy1); //must be Q15
+    *y_128 = simde_mm_packs_epi32(tmpy0,tmpy1); //must be Q15
     //print_shorts("*y_128:",&y_128[i]);
     x1_128++;
     x2_128++;
     y_128++;
   }
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
   return(0);
 }
 
@@ -224,45 +180,34 @@ int multadd_cpx_vector(int16_t *x1,
   simd_q15_t *x1_128;
   simd_q15_t *x2_128;
   simd_q15_t *y_128;
-#if defined(__x86_64__) || defined(__i386__)
   simd_q15_t tmp_re,tmp_im;
   simd_q15_t tmpy0,tmpy1;
-#elif defined(__arm__) || defined(__aarch64__)
-  int32x4_t tmp_re,tmp_im;
-  int32x4_t tmp_re1,tmp_im1;
-  int16x4x2_t tmpy;
-  int32x4_t shift = vdupq_n_s32(-output_shift);
-#endif
   x1_128 = (simd_q15_t *)&x1[0];
   x2_128 = (simd_q15_t *)&x2[0];
   y_128  = (simd_q15_t *)&y[0];
   // we compute 4 cpx multiply for each loop
   for(i=0; i<(N>>2); i++) {
-#if defined(__x86_64__) || defined(__i386__)
-    tmp_re = _mm_sign_epi16(*x1_128,*(__m128i*)&conjug2[0]);
-    tmp_re = _mm_madd_epi16(tmp_re,*x2_128);
-    tmp_im = _mm_shufflelo_epi16(*x1_128,_MM_SHUFFLE(2,3,0,1));
-    tmp_im = _mm_shufflehi_epi16(tmp_im,_MM_SHUFFLE(2,3,0,1));
-    tmp_im = _mm_madd_epi16(tmp_im,*x2_128);
-    tmp_re = _mm_srai_epi32(tmp_re,output_shift);
-    tmp_im = _mm_srai_epi32(tmp_im,output_shift);
-    tmpy0  = _mm_unpacklo_epi32(tmp_re,tmp_im);
+    tmp_re = simde_mm_sign_epi16(*x1_128,*(simde__m128i*)&conjug2[0]);
+    tmp_re = simde_mm_madd_epi16(tmp_re,*x2_128);
+    tmp_im = simde_mm_shufflelo_epi16(*x1_128, SIMDE_MM_SHUFFLE(2,3,0,1));
+    tmp_im = simde_mm_shufflehi_epi16(tmp_im, SIMDE_MM_SHUFFLE(2,3,0,1));
+    tmp_im = simde_mm_madd_epi16(tmp_im,*x2_128);
+    tmp_re = simde_mm_srai_epi32(tmp_re,output_shift);
+    tmp_im = simde_mm_srai_epi32(tmp_im,output_shift);
+    tmpy0  = simde_mm_unpacklo_epi32(tmp_re,tmp_im);
     //print_ints("unpack lo:",&tmpy0[i]);
-    tmpy1  = _mm_unpackhi_epi32(tmp_re,tmp_im);
+    tmpy1  = simde_mm_unpackhi_epi32(tmp_re,tmp_im);
     //print_ints("unpack hi:",&tmpy1[i]);
     if (zero_flag == 1)
-      *y_128 = _mm_packs_epi32(tmpy0,tmpy1);
+      *y_128 = simde_mm_packs_epi32(tmpy0,tmpy1);
     else
-      *y_128 = _mm_adds_epi16(*y_128,_mm_packs_epi32(tmpy0,tmpy1));
+      *y_128 = simde_mm_adds_epi16(*y_128,simde_mm_packs_epi32(tmpy0,tmpy1));
     //print_shorts("*y_128:",&y_128[i]);
-#elif defined(__arm__) || defined(__aarch64__)
-    msg("mult_cpx_vector not implemented for __arm__ nor __aarch64__");
-#endif
     x1_128++;
     x2_128++;
     y_128++;
   }
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
   return(0);
 }
diff --git a/openair1/PHY/TOOLS/oai_dfts.c b/openair1/PHY/TOOLS/oai_dfts.c
index 47318550bec59205d1b956c64d39dcc3f9892523..9bd652abe77f9155213d66285eb1ecb2dcc88b18 100644
--- a/openair1/PHY/TOOLS/oai_dfts.c
+++ b/openair1/PHY/TOOLS/oai_dfts.c
@@ -18,7 +18,8 @@
  * For more information about the OpenAirInterface (OAI) Software Alliance:
  *      contact@openairinterface.org
  */
- 
+
+#if defined(__x86_64__) || defined(__i386__)
 #include <stdio.h>
 #include <stdlib.h>
 #include <string.h>
@@ -63,59 +64,75 @@ const static int16_t conjugatedft[32] __attribute__((aligned(32))) = {-1,1,-1,1,
 
 const static int16_t reflip[32]  __attribute__((aligned(32))) = {1,-1,1,-1,1,-1,1,-1,1,-1,1,-1,1,-1,1,-1};
 
-
-
-
-
-
-#if defined(__x86_64__) || defined(__i386__)
-static inline void cmac(__m128i a,__m128i b, __m128i *re32, __m128i *im32) __attribute__((always_inline));
-static inline void cmac(__m128i a,__m128i b, __m128i *re32, __m128i *im32)
-{
-
-  __m128i cmac_tmp,cmac_tmp_re32,cmac_tmp_im32;
-
-  cmac_tmp    = _mm_sign_epi16(b,*(__m128i*)reflip);
-  cmac_tmp_re32  = _mm_madd_epi16(a,cmac_tmp);
-
- 
-  //  cmac_tmp    = _mm_shufflelo_epi16(b,_MM_SHUFFLE(2,3,0,1));
-  //  cmac_tmp    = _mm_shufflehi_epi16(cmac_tmp,_MM_SHUFFLE(2,3,0,1));
-  cmac_tmp = _mm_shuffle_epi8(b,_mm_set_epi8(13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2));
-  cmac_tmp_im32  = _mm_madd_epi16(cmac_tmp,a);
-
-  *re32 = _mm_add_epi32(*re32,cmac_tmp_re32);
-  *im32 = _mm_add_epi32(*im32,cmac_tmp_im32);
-}
-
-static inline void cmacc(__m128i a,__m128i b, __m128i *re32, __m128i *im32) __attribute__((always_inline));
-static inline void cmacc(__m128i a,__m128i b, __m128i *re32, __m128i *im32)
-{
-
-  __m128i cmac_tmp,cmac_tmp_re32,cmac_tmp_im32;
-
-
-  cmac_tmp_re32  = _mm_madd_epi16(a,b);
-
-
-  cmac_tmp    = _mm_sign_epi16(b,*(__m128i*)reflip);
-  //  cmac_tmp    = _mm_shufflelo_epi16(b,_MM_SHUFFLE(2,3,0,1));
-  //  cmac_tmp    = _mm_shufflehi_epi16(cmac_tmp,_MM_SHUFFLE(2,3,0,1));
-  cmac_tmp = _mm_shuffle_epi8(cmac_tmp,_mm_set_epi8(13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2));
-  cmac_tmp_im32  = _mm_madd_epi16(cmac_tmp,a);
-
-  *re32 = _mm_add_epi32(*re32,cmac_tmp_re32);
-  *im32 = _mm_add_epi32(*im32,cmac_tmp_im32);
-}
-
-static inline void cmac_256(__m256i a,__m256i b, __m256i *re32, __m256i *im32) __attribute__((always_inline));
-static inline void cmac_256(__m256i a,__m256i b, __m256i *re32, __m256i *im32)
-{
-
-  __m256i cmac_tmp,cmac_tmp_re32,cmac_tmp_im32;
-  __m256i imshuffle = simde_mm256_set_epi8(29,28,31,30,25,24,27,26,21,20,23,22,17,16,19,18,13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2);
-
-  cmac_tmp       = simde_mm256_sign_epi16(b,*(__m256i*)reflip);
+static inline void cmac(simde__m128i a, simde__m128i b, simde__m128i *re32, simde__m128i *im32) __attribute__((always_inline))
+{
+  simde__m128i cmac_tmp, cmac_tmp_re32, cmac_tmp_im32;
+
+  cmac_tmp = simde_mm_sign_epi16(b, *(simde__m128i *)reflip);
+  cmac_tmp_re32 = simde_mm_madd_epi16(a, cmac_tmp);
+
+  //  cmac_tmp    = simde_mm_shufflelo_epi16(b, SIMDE_MM_SHUFFLE(2,3,0,1));
+  //  cmac_tmp    = simde_mm_shufflehi_epi16(cmac_tmp, SIMDE_MM_SHUFFLE(2,3,0,1));
+  cmac_tmp = simde_mm_shuffle_epi8(b, simde_mm_set_epi8(13, 12, 15, 14, 9, 8, 11, 10, 5, 4, 7, 6, 1, 0, 3, 2));
+  cmac_tmp_im32 = simde_mm_madd_epi16(cmac_tmp, a);
+
+  *re32 = simde_mm_add_epi32(*re32, cmac_tmp_re32);
+  *im32 = simde_mm_add_epi32(*im32, cmac_tmp_im32);
+}
+
+static inline void cmacc(simde__m128i a, simde__m128i b, simde__m128i *re32, simde__m128i *im32) __attribute__((always_inline))
+{
+  simde__m128i cmac_tmp, cmac_tmp_re32, cmac_tmp_im32;
+
+  cmac_tmp_re32 = simde_mm_madd_epi16(a, b);
+
+  cmac_tmp = simde_mm_sign_epi16(b, *(simde__m128i *)reflip);
+  //  cmac_tmp    = simde_mm_shufflelo_epi16(b, SIMDE_MM_SHUFFLE(2,3,0,1));
+  //  cmac_tmp    = simde_mm_shufflehi_epi16(cmac_tmp, SIMDE_MM_SHUFFLE(2,3,0,1));
+  cmac_tmp = simde_mm_shuffle_epi8(cmac_tmp, simde_mm_set_epi8(13, 12, 15, 14, 9, 8, 11, 10, 5, 4, 7, 6, 1, 0, 3, 2));
+  cmac_tmp_im32 = simde_mm_madd_epi16(cmac_tmp, a);
+
+  *re32 = simde_mm_add_epi32(*re32, cmac_tmp_re32);
+  *im32 = simde_mm_add_epi32(*im32, cmac_tmp_im32);
+}
+
+static inline void cmac_256(simde__m256i a, simde__m256i b, simde__m256i *re32, simde__m256i *im32) __attribute__((always_inline))
+{
+  simde__m256i cmac_tmp, cmac_tmp_re32, cmac_tmp_im32;
+  simde__m256i imshuffle = simde_mm256_set_epi8(29,
+                                                28,
+                                                31,
+                                                30,
+                                                25,
+                                                24,
+                                                27,
+                                                26,
+                                                21,
+                                                20,
+                                                23,
+                                                22,
+                                                17,
+                                                16,
+                                                19,
+                                                18,
+                                                13,
+                                                12,
+                                                15,
+                                                14,
+                                                9,
+                                                8,
+                                                11,
+                                                10,
+                                                5,
+                                                4,
+                                                7,
+                                                6,
+                                                1,
+                                                0,
+                                                3,
+                                                2);
+
+  cmac_tmp = simde_mm256_sign_epi16(b, *(simde__m256i *)reflip);
   cmac_tmp_re32  = simde_mm256_madd_epi16(a,cmac_tmp);
 
   cmac_tmp       = simde_mm256_shuffle_epi8(b,imshuffle);
@@ -124,104 +141,174 @@ static inline void cmac_256(__m256i a,__m256i b, __m256i *re32, __m256i *im32)
   *re32 = simde_mm256_add_epi32(*re32,cmac_tmp_re32);
   *im32 = simde_mm256_add_epi32(*im32,cmac_tmp_im32);
 }
-#if 0
-static inline void cmacc_256(__m256i a,__m256i b, __m256i *re32, __m256i *im32) __attribute__((always_inline));
-static inline void cmacc_256(__m256i a,__m256i b, __m256i *re32, __m256i *im32)
-{
 
-  __m256i cmac_tmp,cmac_tmp_re32,cmac_tmp_im32;
-  __m256i imshuffle = simde_mm256_set_epi8(29,28,31,30,25,24,27,26,21,20,23,22,17,16,19,18,13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2);
+static inline void cmacc_256(simde__m256i a, simde__m256i b, simde__m256i *re32, simde__m256i *im32) __attribute__((always_inline))
+{
+  simde__m256i cmac_tmp, cmac_tmp_re32, cmac_tmp_im32;
+  simde__m256i imshuffle = simde_mm256_set_epi8(29,
+                                                28,
+                                                31,
+                                                30,
+                                                25,
+                                                24,
+                                                27,
+                                                26,
+                                                21,
+                                                20,
+                                                23,
+                                                22,
+                                                17,
+                                                16,
+                                                19,
+                                                18,
+                                                13,
+                                                12,
+                                                15,
+                                                14,
+                                                9,
+                                                8,
+                                                11,
+                                                10,
+                                                5,
+                                                4,
+                                                7,
+                                                6,
+                                                1,
+                                                0,
+                                                3,
+                                                2);
 
   cmac_tmp_re32   = simde_mm256_madd_epi16(a,b);
 
-
-  cmac_tmp        = simde_mm256_sign_epi16(b,*(__m256i*)reflip);
+  cmac_tmp = simde_mm256_sign_epi16(b, *(simde__m256i *)reflip);
   cmac_tmp        = simde_mm256_shuffle_epi8(b,imshuffle);
   cmac_tmp_im32   = simde_mm256_madd_epi16(cmac_tmp,a);
 
   *re32 = simde_mm256_add_epi32(*re32,cmac_tmp_re32);
   *im32 = simde_mm256_add_epi32(*im32,cmac_tmp_im32);
 }
-#endif
-static inline void cmult(__m128i a,__m128i b, __m128i *re32, __m128i *im32) __attribute__((always_inline));
-
-static inline void cmult(__m128i a,__m128i b, __m128i *re32, __m128i *im32)
-{
-
-  register __m128i mmtmpb;
-
-  mmtmpb    = _mm_sign_epi16(b,*(__m128i*)reflip);
-  *re32     = _mm_madd_epi16(a,mmtmpb);
-  //  mmtmpb    = _mm_shufflelo_epi16(b,_MM_SHUFFLE(2,3,0,1));
-  //  mmtmpb    = _mm_shufflehi_epi16(mmtmpb,_MM_SHUFFLE(2,3,0,1));
-  mmtmpb        = _mm_shuffle_epi8(b,_mm_set_epi8(13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2));
-  *im32  = _mm_madd_epi16(a,mmtmpb);
-
-}
-
-static inline void cmult_256(__m256i a,__m256i b, __m256i *re32, __m256i *im32) __attribute__((always_inline));
 
-static inline void cmult_256(__m256i a,__m256i b, __m256i *re32, __m256i *im32)
-{
-
-  register __m256i mmtmpb;
-  __m256i const perm_mask = simde_mm256_set_epi8(29,28,31,30,25,24,27,26,21,20,23,22,17,16,19,18,13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2);
-
-  mmtmpb    = simde_mm256_sign_epi16(b,*(__m256i*)reflip);
+static inline void cmult(simde__m128i a, simde__m128i b, simde__m128i *re32, simde__m128i *im32) __attribute__((always_inline))
+{
+  register simde__m128i mmtmpb;
+
+  mmtmpb = simde_mm_sign_epi16(b, *(simde__m128i *)reflip);
+  *re32 = simde_mm_madd_epi16(a, mmtmpb);
+  //  mmtmpb    = simde_mm_shufflelo_epi16(b, SIMDE_MM_SHUFFLE(2,3,0,1));
+  //  mmtmpb    = simde_mm_shufflehi_epi16(mmtmpb, SIMDE_MM_SHUFFLE(2,3,0,1));
+  mmtmpb = simde_mm_shuffle_epi8(b, simde_mm_set_epi8(13, 12, 15, 14, 9, 8, 11, 10, 5, 4, 7, 6, 1, 0, 3, 2));
+  *im32 = simde_mm_madd_epi16(a, mmtmpb);
+}
+
+
+static inline void cmult_256(simde__m256i a, simde__m256i b, simde__m256i *re32, simde__m256i *im32) __attribute__((always_inline))
+{
+  register simde__m256i mmtmpb;
+  simde__m256i const perm_mask = simde_mm256_set_epi8(29,
+                                                      28,
+                                                      31,
+                                                      30,
+                                                      25,
+                                                      24,
+                                                      27,
+                                                      26,
+                                                      21,
+                                                      20,
+                                                      23,
+                                                      22,
+                                                      17,
+                                                      16,
+                                                      19,
+                                                      18,
+                                                      13,
+                                                      12,
+                                                      15,
+                                                      14,
+                                                      9,
+                                                      8,
+                                                      11,
+                                                      10,
+                                                      5,
+                                                      4,
+                                                      7,
+                                                      6,
+                                                      1,
+                                                      0,
+                                                      3,
+                                                      2);
+
+  mmtmpb = simde_mm256_sign_epi16(b, *(simde__m256i *)reflip);
   *re32     = simde_mm256_madd_epi16(a,mmtmpb);
   mmtmpb    = simde_mm256_shuffle_epi8(b,perm_mask);
-  *im32     = simde_mm256_madd_epi16(a,mmtmpb);
-
-}
-
-static inline void cmultc(__m128i a,__m128i b, __m128i *re32, __m128i *im32) __attribute__((always_inline));
-
-static inline void cmultc(__m128i a,__m128i b, __m128i *re32, __m128i *im32)
-{
-
-  register __m128i mmtmpb;
-
-  *re32     = _mm_madd_epi16(a,b);
-  mmtmpb    = _mm_sign_epi16(b,*(__m128i*)reflip);
-  mmtmpb    = _mm_shuffle_epi8(mmtmpb,_mm_set_epi8(13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2));
-  *im32  = _mm_madd_epi16(a,mmtmpb);
-
-}
-
-static inline void cmultc_256(__m256i a,__m256i b, __m256i *re32, __m256i *im32) __attribute__((always_inline));
-
-static inline void cmultc_256(__m256i a,__m256i b, __m256i *re32, __m256i *im32)
-{
-
-  register __m256i mmtmpb;
-  __m256i const perm_mask = simde_mm256_set_epi8(29,28,31,30,25,24,27,26,21,20,23,22,17,16,19,18,13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2);
+  *im32 = simde_mm256_madd_epi16(a, mmtmpb);
+}
+
+static inline void cmultc(simde__m128i a, simde__m128i b, simde__m128i *re32, simde__m128i *im32) __attribute__((always_inline))
+{
+  register simde__m128i mmtmpb;
+
+  *re32 = simde_mm_madd_epi16(a, b);
+  mmtmpb = simde_mm_sign_epi16(b, *(simde__m128i *)reflip);
+  mmtmpb = simde_mm_shuffle_epi8(mmtmpb, simde_mm_set_epi8(13, 12, 15, 14, 9, 8, 11, 10, 5, 4, 7, 6, 1, 0, 3, 2));
+  *im32 = simde_mm_madd_epi16(a, mmtmpb);
+}
+
+static inline void cmultc_256(simde__m256i a, simde__m256i b, simde__m256i *re32, simde__m256i *im32)    __attribute__((always_inline))
+{
+  register simde__m256i mmtmpb;
+  simde__m256i const perm_mask = simde_mm256_set_epi8(29,
+                                                      28,
+                                                      31,
+                                                      30,
+                                                      25,
+                                                      24,
+                                                      27,
+                                                      26,
+                                                      21,
+                                                      20,
+                                                      23,
+                                                      22,
+                                                      17,
+                                                      16,
+                                                      19,
+                                                      18,
+                                                      13,
+                                                      12,
+                                                      15,
+                                                      14,
+                                                      9,
+                                                      8,
+                                                      11,
+                                                      10,
+                                                      5,
+                                                      4,
+                                                      7,
+                                                      6,
+                                                      1,
+                                                      0,
+                                                      3,
+                                                      2);
 
   *re32     = simde_mm256_madd_epi16(a,b);
-  mmtmpb    = simde_mm256_sign_epi16(b,*(__m256i*)reflip);
+  mmtmpb = simde_mm256_sign_epi16(b, *(simde__m256i *)reflip);
   mmtmpb    = simde_mm256_shuffle_epi8(mmtmpb,perm_mask);
-  *im32     = simde_mm256_madd_epi16(a,mmtmpb);
-
+  *im32 = simde_mm256_madd_epi16(a, mmtmpb);
 }
 
-static inline __m128i cpack(__m128i xre,__m128i xim) __attribute__((always_inline));
 
-static inline __m128i cpack(__m128i xre,__m128i xim)
+static inline simde__m128i cpack(simde__m128i xre, simde__m128i xim) __attribute__((always_inline))
 {
+  register simde__m128i cpack_tmp1, cpack_tmp2;
 
-  register __m128i cpack_tmp1,cpack_tmp2;
-
-  cpack_tmp1 = _mm_unpacklo_epi32(xre,xim);
-  cpack_tmp2 = _mm_unpackhi_epi32(xre,xim);
-  return(_mm_packs_epi32(_mm_srai_epi32(cpack_tmp1,15),_mm_srai_epi32(cpack_tmp2,15)));
-
+  cpack_tmp1 = simde_mm_unpacklo_epi32(xre, xim);
+  cpack_tmp2 = simde_mm_unpackhi_epi32(xre, xim);
+  return (simde_mm_packs_epi32(simde_mm_srai_epi32(cpack_tmp1, 15), simde_mm_srai_epi32(cpack_tmp2, 15)));
 }
 
-static inline __m256i cpack_256(__m256i xre,__m256i xim) __attribute__((always_inline));
 
-static inline __m256i cpack_256(__m256i xre,__m256i xim)
+static inline simde__m256i cpack_256(simde__m256i xre, simde__m256i xim) __attribute__((always_inline))
 {
-
-  register __m256i cpack_tmp1,cpack_tmp2;
+  register simde__m256i cpack_tmp1, cpack_tmp2;
 
   cpack_tmp1 = simde_mm256_unpacklo_epi32(xre,xim);
   cpack_tmp2 = simde_mm256_unpackhi_epi32(xre,xim);
@@ -229,74 +316,56 @@ static inline __m256i cpack_256(__m256i xre,__m256i xim)
 
 }
 
-static inline void packed_cmult(__m128i a,__m128i b, __m128i *c) __attribute__((always_inline));
-
-static inline void packed_cmult(__m128i a,__m128i b, __m128i *c)
+static inline void packed_cmult(simde__m128i a, simde__m128i b, simde__m128i *c) __attribute__((always_inline))
 {
-
-  __m128i cre,cim;
+  simde__m128i cre, cim;
   cmult(a,b,&cre,&cim);
   *c = cpack(cre,cim);
 
 }
 
-static inline void packed_cmult_256(__m256i a,__m256i b, __m256i *c) __attribute__((always_inline));
-
-static inline void packed_cmult_256(__m256i a,__m256i b, __m256i *c)
+static inline void packed_cmult_256(simde__m256i a, simde__m256i b, simde__m256i *c) __attribute__((always_inline))
 {
-
-  __m256i cre,cim;
+  simde__m256i cre, cim;
   cmult_256(a,b,&cre,&cim);
   *c = cpack_256(cre,cim);
 
 }
 
-static inline void packed_cmultc(__m128i a,__m128i b, __m128i *c) __attribute__((always_inline));
-
-static inline void packed_cmultc(__m128i a,__m128i b, __m128i *c)
+static inline void packed_cmultc(simde__m128i a, simde__m128i b, simde__m128i *c) __attribute__((always_inline))
 {
-
-  __m128i cre,cim;
+  simde__m128i cre, cim;
 
   cmultc(a,b,&cre,&cim);
   *c = cpack(cre,cim);
 
 }
-#if 0
-static inline void packed_cmultc_256(__m256i a,__m256i b, __m256i *c) __attribute__((always_inline));
 
-static inline void packed_cmultc_256(__m256i a,__m256i b, __m256i *c)
+static inline void packed_cmultc_256(simde__m256i a, simde__m256i b, simde__m256i *c) __attribute__((always_inline))
 {
-
-  __m256i cre,cim;
+  simde__m256i cre, cim;
 
   cmultc_256(a,b,&cre,&cim);
   *c = cpack_256(cre,cim);
 
 }
-#endif
-static inline __m128i packed_cmult2(__m128i a,__m128i b,__m128i b2) __attribute__((always_inline));
-
-static inline __m128i packed_cmult2(__m128i a,__m128i b,__m128i b2)
-{
 
+static inline simde__m128i packed_cmult2(simde__m128i a, simde__m128i b, simde__m128i b2) __attribute__((always_inline));
 
-  register __m128i cre,cim;
+static inline simde__m128i packed_cmult2(simde__m128i a, simde__m128i b, simde__m128i b2)
+{
+  register simde__m128i cre, cim;
 
-  cre       = _mm_madd_epi16(a,b);
-  cim       = _mm_madd_epi16(a,b2);
+  cre = simde_mm_madd_epi16(a, b);
+  cim = simde_mm_madd_epi16(a, b2);
 
   return(cpack(cre,cim));
 
 }
 
-static inline __m256i packed_cmult2_256(__m256i a,__m256i b,__m256i b2) __attribute__((always_inline));
-
-static inline __m256i packed_cmult2_256(__m256i a,__m256i b,__m256i b2)
+static inline simde__m256i packed_cmult2_256(simde__m256i a, simde__m256i b, simde__m256i b2) __attribute__((always_inline))
 {
-
-
-  register __m256i cre,cim;
+  register simde__m256i cre, cim;
 
   cre       = simde_mm256_madd_epi16(a,b);
   cim       = simde_mm256_madd_epi16(a,b2);
@@ -305,162 +374,6 @@ static inline __m256i packed_cmult2_256(__m256i a,__m256i b,__m256i b2)
 
 }
 
-#elif defined(__arm__) || defined(__aarch64__)
-static inline void cmac(int16x8_t a,int16x8_t b, int32x4_t *re32, int32x4_t *im32) __attribute__((always_inline));
-static inline void cmac(int16x8_t a,int16x8_t b, int32x4_t *re32, int32x4_t *im32)
-{
-
-  
-  int32x4_t ab_re0,ab_re1,ab_im0,ab_im1;
-  int16x8_t bflip = vrev32q_s16(b);
-  int16x8_t bconj = vmulq_s16(b,*(int16x8_t *)reflip);
-
-  ab_re0 = vmull_s16(((int16x4_t*)&a)[0],((int16x4_t*)&bconj)[0]);
-  ab_re1 = vmull_s16(((int16x4_t*)&a)[1],((int16x4_t*)&bconj)[1]);
-  ab_im0 = vmull_s16(((int16x4_t*)&a)[0],((int16x4_t*)&bflip)[0]);
-  ab_im1 = vmull_s16(((int16x4_t*)&a)[1],((int16x4_t*)&bflip)[1]);
-  *re32 = vqaddq_s32(*re32,vcombine_s32(vpadd_s32(((int32x2_t*)&ab_re0)[0],((int32x2_t*)&ab_re0)[1]),
-					vpadd_s32(((int32x2_t*)&ab_re1)[0],((int32x2_t*)&ab_re1)[1])));
-  *im32 = vqaddq_s32(*im32,vcombine_s32(vpadd_s32(((int32x2_t*)&ab_im0)[0],((int32x2_t*)&ab_im0)[1]),
-					vpadd_s32(((int32x2_t*)&ab_im1)[0],((int32x2_t*)&ab_im1)[1])));
-}
-
-static inline void cmacc(int16x8_t a,int16x8_t b, int32x4_t *re32, int32x4_t *im32) __attribute__((always_inline));
-static inline void cmacc(int16x8_t a,int16x8_t b, int32x4_t *re32, int32x4_t *im32)
-{
-  int32x4_t ab_re0,ab_re1,ab_im0,ab_im1;
-  int16x8_t bconj = vmulq_s16(b,*(int16x8_t *)reflip);
-  int16x8_t bflip = vrev32q_s16(bconj);
-
-  ab_re0 = vmull_s16(((int16x4_t*)&a)[0],((int16x4_t*)&b)[0]);
-  ab_re1 = vmull_s16(((int16x4_t*)&a)[1],((int16x4_t*)&b)[1]);
-  ab_im0 = vmull_s16(((int16x4_t*)&a)[0],((int16x4_t*)&bflip)[0]);
-  ab_im1 = vmull_s16(((int16x4_t*)&a)[1],((int16x4_t*)&bflip)[1]);
-  *re32 = vqaddq_s32(*re32,vcombine_s32(vpadd_s32(((int32x2_t*)&ab_re0)[0],((int32x2_t*)&ab_re0)[1]),
-					vpadd_s32(((int32x2_t*)&ab_re1)[0],((int32x2_t*)&ab_re1)[1])));
-  *im32 = vqaddq_s32(*im32,vcombine_s32(vpadd_s32(((int32x2_t*)&ab_im0)[0],((int32x2_t*)&ab_im0)[1]),
-					vpadd_s32(((int32x2_t*)&ab_im1)[0],((int32x2_t*)&ab_im1)[1])));
-
-}
-
-static inline void cmult(int16x8_t a,int16x8_t b, int32x4_t *re32, int32x4_t *im32) __attribute__((always_inline));
-static inline void cmult(int16x8_t a,int16x8_t b, int32x4_t *re32, int32x4_t *im32)
-{
-  int32x4_t ab_re0,ab_re1,ab_im0,ab_im1;
-  int16x8_t bflip = vrev32q_s16(b);
-  int16x8_t bconj = vmulq_s16(b,*(int16x8_t *)reflip);
-  int16x4_t al,ah,bcl,bch,bfl,bfh;
-  int32x2_t abr0l,abr0h,abr1l,abr1h,abi0l,abi0h,abi1l,abi1h;
-
-  al  = vget_low_s16(a);      ah = vget_high_s16(a);
-  bcl = vget_low_s16(bconj);  bch = vget_high_s16(bconj);
-  bfl = vget_low_s16(bflip);  bfh = vget_high_s16(bflip);
-
-  ab_re0 = vmull_s16(al,bcl);
-  ab_re1 = vmull_s16(ah,bch);
-  ab_im0 = vmull_s16(al,bfl);
-  ab_im1 = vmull_s16(ah,bfh);
-  abr0l = vget_low_s32(ab_re0); abr0h = vget_high_s32(ab_re0);
-  abr1l = vget_low_s32(ab_re1); abr1h = vget_high_s32(ab_re1);
-  abi0l = vget_low_s32(ab_im0); abi0h = vget_high_s32(ab_im0);
-  abi1l = vget_low_s32(ab_im1); abi1h = vget_high_s32(ab_im1);
-
-  *re32 = vcombine_s32(vpadd_s32(abr0l,abr0h),
-                       vpadd_s32(abr1l,abr1h));
-  *im32 = vcombine_s32(vpadd_s32(abi0l,abi0h),
-                       vpadd_s32(abi1l,abi1h));
-}
-
-static inline void cmultc(int16x8_t a,int16x8_t b, int32x4_t *re32, int32x4_t *im32) __attribute__((always_inline));
-
-static inline void cmultc(int16x8_t a,int16x8_t b, int32x4_t *re32, int32x4_t *im32)
-{
-  int32x4_t ab_re0,ab_re1,ab_im0,ab_im1;
-  int16x8_t bconj = vmulq_s16(b,*(int16x8_t *)reflip);
-  int16x8_t bflip = vrev32q_s16(bconj);
-  int16x4_t al,ah,bl,bh,bfl,bfh; 
-  int32x2_t abr0l,abr0h,abr1l,abr1h,abi0l,abi0h,abi1l,abi1h;
-  al  = vget_low_s16(a);     ah = vget_high_s16(a);
-  bl  = vget_low_s16(b);     bh = vget_high_s16(b);
-  bfl = vget_low_s16(bflip); bfh = vget_high_s16(bflip);
-
-  ab_re0 = vmull_s16(al,bl);
-  ab_re1 = vmull_s16(ah,bh);
-  ab_im0 = vmull_s16(al,bfl);
-  ab_im1 = vmull_s16(ah,bfh);
-
-  abr0l = vget_low_s32(ab_re0); abr0h = vget_high_s32(ab_re0);
-  abr1l = vget_low_s32(ab_re1); abr1h = vget_high_s32(ab_re1);
-  abi0l = vget_low_s32(ab_im0); abi0h = vget_high_s32(ab_im0);
-  abi1l = vget_low_s32(ab_im1); abi1h = vget_high_s32(ab_im1);
-
-  *re32 = vcombine_s32(vpadd_s32(abr0l,abr0h),
-		       vpadd_s32(abr1l,abr1h));
-  *im32 = vcombine_s32(vpadd_s32(abi0l,abi0h),
-		       vpadd_s32(abi1l,abi1h));
-
-}
-
-
-static inline int16x8_t cpack(int32x4_t xre,int32x4_t xim) __attribute__((always_inline));
-
-static inline int16x8_t cpack(int32x4_t xre,int32x4_t xim)
-{
-  int32x4x2_t xtmp;
-
-  xtmp = vzipq_s32(xre,xim);
-  return(vcombine_s16(vqshrn_n_s32(xtmp.val[0],15),vqshrn_n_s32(xtmp.val[1],15)));
-
-}
-
-
-static inline void packed_cmult(int16x8_t a,int16x8_t b, int16x8_t *c) __attribute__((always_inline));
-
-static inline void packed_cmult(int16x8_t a,int16x8_t b, int16x8_t *c)
-{
-
-  int32x4_t cre,cim;
-  cmult(a,b,&cre,&cim);
-  *c = cpack(cre,cim);
-
-}
-
-
-static inline void packed_cmultc(int16x8_t a,int16x8_t b, int16x8_t *c) __attribute__((always_inline));
-
-static inline void packed_cmultc(int16x8_t a,int16x8_t b, int16x8_t *c)
-{
-
-  int32x4_t cre,cim;
-
-  cmultc(a,b,&cre,&cim);
-  *c = cpack(cre,cim);
-
-}
-
-static inline int16x8_t packed_cmult2(int16x8_t a,int16x8_t b,  int16x8_t b2) __attribute__((always_inline));
-
-static inline int16x8_t packed_cmult2(int16x8_t a,int16x8_t b,  int16x8_t b2)
-{
-
-  
-
-  int32x4_t ab_re0,ab_re1,ab_im0,ab_im1,cre,cim;
-  
-  ab_re0 = vmull_s16(((int16x4_t*)&a)[0],((int16x4_t*)&b)[0]);
-  ab_re1 = vmull_s16(((int16x4_t*)&a)[1],((int16x4_t*)&b)[1]);
-  ab_im0 = vmull_s16(((int16x4_t*)&a)[0],((int16x4_t*)&b2)[0]);
-  ab_im1 = vmull_s16(((int16x4_t*)&a)[1],((int16x4_t*)&b2)[1]);
-  cre = vcombine_s32(vpadd_s32(((int32x2_t*)&ab_re0)[0],((int32x2_t*)&ab_re0)[1]),
-		     vpadd_s32(((int32x2_t*)&ab_re1)[0],((int32x2_t*)&ab_re1)[1]));
-  cim = vcombine_s32(vpadd_s32(((int32x2_t*)&ab_im0)[0],((int32x2_t*)&ab_im0)[1]),
-		     vpadd_s32(((int32x2_t*)&ab_im1)[0],((int32x2_t*)&ab_im1)[1]));
-  return(cpack(cre,cim));
-
-}
-
-#endif // defined(__x86_64__) || defined(__i386__)
-
 const static int16_t W0s[16]__attribute__((aligned(32))) = {32767,0,32767,0,32767,0,32767,0,32767,0,32767,0,32767,0,32767,0};
 
 const static int16_t W13s[16]__attribute__((aligned(32))) = {-16384,-28378,-16384,-28378,-16384,-28378,-16384,-28378,-16384,-28378,-16384,-28378,-16384,-28378,-16384,-28378};
@@ -471,32 +384,21 @@ const static int16_t W25s[16]__attribute__((aligned(32))) = {-26509,-19260,-2650
 const static int16_t W35s[16]__attribute__((aligned(32))) = {-26510,19260,-26510,19260,-26510,19260,-26510,19260,-26510,19260,-26510,19260,-26510,19260,-26510,19260};
 const static int16_t W45s[16]__attribute__((aligned(32))) = {10126,31163,10126,31163,10126,31163,10126,31163,10126,31163,10126,31163,10126,31163,10126,31163};
 
-#if defined(__x86_64__) || defined(__i386__)
-const __m128i *W0 = (__m128i *)W0s;
-const __m128i *W13 = (__m128i *)W13s;
-const __m128i *W23 = (__m128i *)W23s;
-const __m128i *W15 = (__m128i *)W15s;
-const __m128i *W25 = (__m128i *)W25s;
-const __m128i *W35 = (__m128i *)W35s;
-const __m128i *W45 = (__m128i *)W45s;
-
-const __m256i *W0_256 =  (__m256i *)W0s;
-const __m256i *W13_256 = (__m256i *)W13s;
-const __m256i *W23_256 = (__m256i *)W23s;
-const __m256i *W15_256 = (__m256i *)W15s;
-const __m256i *W25_256 = (__m256i *)W25s;
-const __m256i *W35_256 = (__m256i *)W35s;
-const __m256i *W45_256 = (__m256i *)W45s;
-
-#elif defined(__arm__) || defined(__aarch64__)
-int16x8_t *W0  = (int16x8_t *)W0s;
-int16x8_t *W13 = (int16x8_t *)W13s;
-int16x8_t *W23 = (int16x8_t *)W23s;
-int16x8_t *W15 = (int16x8_t *)W15s;
-int16x8_t *W25 = (int16x8_t *)W25s;
-int16x8_t *W35 = (int16x8_t *)W35s;
-int16x8_t *W45 = (int16x8_t *)W45s;
-#endif // defined(__x86_64__) || defined(__i386__)
+const simde__m128i *W0 = (simde__m128i *)W0s;
+const simde__m128i *W13 = (simde__m128i *)W13s;
+const simde__m128i *W23 = (simde__m128i *)W23s;
+const simde__m128i *W15 = (simde__m128i *)W15s;
+const simde__m128i *W25 = (simde__m128i *)W25s;
+const simde__m128i *W35 = (simde__m128i *)W35s;
+const simde__m128i *W45 = (simde__m128i *)W45s;
+
+const simde__m256i *W0_256 = (simde__m256i *)W0s;
+const simde__m256i *W13_256 = (simde__m256i *)W13s;
+const simde__m256i *W23_256 = (simde__m256i *)W23s;
+const simde__m256i *W15_256 = (simde__m256i *)W15s;
+const simde__m256i *W25_256 = (simde__m256i *)W25s;
+const simde__m256i *W35_256 = (simde__m256i *)W35s;
+const simde__m256i *W45_256 = (simde__m256i *)W45s;
 
 const static int16_t dft_norm_table[16] = {9459,  //12
 					   6689,//24
@@ -516,41 +418,33 @@ const static int16_t dft_norm_table[16] = {9459,  //12
 					   14654
 }; //sqrt(5) //300
 
-
-#if defined(__x86_64__) || defined(__i386__)
-static inline void bfly2(__m128i *x0, __m128i *x1,__m128i *y0, __m128i *y1,__m128i *tw)__attribute__((always_inline));
-
-static inline void bfly2(__m128i *x0, __m128i *x1,__m128i *y0, __m128i *y1,__m128i *tw)
+static inline void bfly2(simde__m128i *x0, simde__m128i *x1, simde__m128i *y0, simde__m128i *y1, simde__m128i *tw)  __attribute__((always_inline))
 {
-
-  __m128i x0r_2,x0i_2,x1r_2,x1i_2,dy0r,dy1r,dy0i,dy1i;
-  __m128i bfly2_tmp1,bfly2_tmp2;
+  simde__m128i x0r_2, x0i_2, x1r_2, x1i_2, dy0r, dy1r, dy0i, dy1i;
+  simde__m128i bfly2_tmp1, bfly2_tmp2;
 
   cmult(*(x0),*(W0),&x0r_2,&x0i_2);
   cmult(*(x1),*(tw),&x1r_2,&x1i_2);
 
-  dy0r = _mm_srai_epi32(_mm_add_epi32(x0r_2,x1r_2),15);
-  dy1r = _mm_srai_epi32(_mm_sub_epi32(x0r_2,x1r_2),15);
-  dy0i = _mm_srai_epi32(_mm_add_epi32(x0i_2,x1i_2),15);
+  dy0r = simde_mm_srai_epi32(simde_mm_add_epi32(x0r_2, x1r_2), 15);
+  dy1r = simde_mm_srai_epi32(simde_mm_sub_epi32(x0r_2, x1r_2), 15);
+  dy0i = simde_mm_srai_epi32(simde_mm_add_epi32(x0i_2, x1i_2), 15);
   //  printf("y0i %d\n",((int16_t *)y0i)[0]);
-  dy1i = _mm_srai_epi32(_mm_sub_epi32(x0i_2,x1i_2),15);
+  dy1i = simde_mm_srai_epi32(simde_mm_sub_epi32(x0i_2, x1i_2), 15);
 
-  bfly2_tmp1 = _mm_unpacklo_epi32(dy0r,dy0i);
-  bfly2_tmp2 = _mm_unpackhi_epi32(dy0r,dy0i);
-  *y0 = _mm_packs_epi32(bfly2_tmp1,bfly2_tmp2);
+  bfly2_tmp1 = simde_mm_unpacklo_epi32(dy0r, dy0i);
+  bfly2_tmp2 = simde_mm_unpackhi_epi32(dy0r, dy0i);
+  *y0 = simde_mm_packs_epi32(bfly2_tmp1, bfly2_tmp2);
 
-  bfly2_tmp1 = _mm_unpacklo_epi32(dy1r,dy1i);
-  bfly2_tmp2 = _mm_unpackhi_epi32(dy1r,dy1i);
-  *y1 = _mm_packs_epi32(bfly2_tmp1,bfly2_tmp2);
+  bfly2_tmp1 = simde_mm_unpacklo_epi32(dy1r, dy1i);
+  bfly2_tmp2 = simde_mm_unpackhi_epi32(dy1r, dy1i);
+  *y1 = simde_mm_packs_epi32(bfly2_tmp1, bfly2_tmp2);
 }
 
-static inline void bfly2_256(__m256i *x0, __m256i *x1,__m256i *y0, __m256i *y1,__m256i *tw)__attribute__((always_inline));
-
-static inline void bfly2_256(__m256i *x0, __m256i *x1,__m256i *y0, __m256i *y1,__m256i *tw)
+static inline void bfly2_256(simde__m256i *x0, simde__m256i *x1, simde__m256i *y0, simde__m256i *y1, simde__m256i *tw)   __attribute__((always_inline))
 {
-
-  __m256i x0r_2,x0i_2,x1r_2,x1i_2,dy0r,dy1r,dy0i,dy1i;
-  __m256i bfly2_tmp1,bfly2_tmp2;
+  simde__m256i x0r_2, x0i_2, x1r_2, x1i_2, dy0r, dy1r, dy0i, dy1i;
+  simde__m256i bfly2_tmp1, bfly2_tmp2;
 
   cmult_256(*(x0),*(W0_256),&x0r_2,&x0i_2);
   cmult_256(*(x1),*(tw),&x1r_2,&x1i_2);
@@ -570,63 +464,21 @@ static inline void bfly2_256(__m256i *x0, __m256i *x1,__m256i *y0, __m256i *y1,_
   *y1 = simde_mm256_packs_epi32(bfly2_tmp1,bfly2_tmp2);
 }
 
-#elif defined(__arm__) || defined(__aarch64__)
-
-static inline void bfly2(int16x8_t *x0, int16x8_t *x1,int16x8_t *y0, int16x8_t *y1,int16x8_t *tw)__attribute__((always_inline));
-
-static inline void bfly2(int16x8_t *x0, int16x8_t *x1,int16x8_t *y0, int16x8_t *y1,int16x8_t *tw)
+static inline void bfly2_tw1(simde__m128i *x0, simde__m128i *x1, simde__m128i *y0, simde__m128i *y1) __attribute__((always_inline))
 {
-
-  int32x4_t x0r_2,x0i_2,x1r_2,x1i_2,dy0r,dy1r,dy0i,dy1i;
-
-  cmult(*(x0),*(W0),&x0r_2,&x0i_2);
-  cmult(*(x1),*(tw),&x1r_2,&x1i_2);
-
-  dy0r = vqaddq_s32(x0r_2,x1r_2);
-  dy1r = vqsubq_s32(x0r_2,x1r_2);
-  dy0i = vqaddq_s32(x0i_2,x1i_2);
-  dy1i = vqsubq_s32(x0i_2,x1i_2);
-
-  *y0 = cpack(dy0r,dy0i);
-  *y1 = cpack(dy1r,dy1i);
+  *y0 = simde_mm_adds_epi16(*x0, *x1);
+  *y1 = simde_mm_subs_epi16(*x0, *x1);
 }
 
-
-#endif // defined(__x86_64__) || defined(__i386__)
-
-#if defined(__x86_64__) || defined(__i386__)
-static inline void bfly2_tw1(__m128i *x0, __m128i *x1, __m128i *y0, __m128i *y1)__attribute__((always_inline));
-
-static inline void bfly2_tw1(__m128i *x0, __m128i *x1, __m128i *y0, __m128i *y1)
+static inline void bfly2_16(simde__m128i *x0,
+                            simde__m128i *x1,
+                            simde__m128i *y0,
+                            simde__m128i *y1,
+                            simde__m128i *tw,
+                            simde__m128i *twb) __attribute__((always_inline))
 {
-
-  *y0  = _mm_adds_epi16(*x0,*x1);
-  *y1  = _mm_subs_epi16(*x0,*x1);
-
-}
-
-#elif defined(__arm__) || defined(__aarch64__)
-
-static inline void bfly2_tw1(int16x8_t *x0, int16x8_t *x1, int16x8_t *y0, int16x8_t *y1)__attribute__((always_inline));
-
-static inline void bfly2_tw1(int16x8_t *x0, int16x8_t *x1, int16x8_t *y0, int16x8_t *y1)
-{
-
-  *y0  = vqaddq_s16(*x0,*x1);
-  *y1  = vqsubq_s16(*x0,*x1);
-
-}
-#endif // defined(__x86_64__) || defined(__i386__)
- 
-#if defined(__x86_64__) || defined(__i386__)
-#if 0
-static inline void bfly2_16(__m128i *x0, __m128i *x1, __m128i *y0, __m128i *y1, __m128i *tw, __m128i *twb)__attribute__((always_inline));
-
-static inline void bfly2_16(__m128i *x0, __m128i *x1, __m128i *y0, __m128i *y1, __m128i *tw, __m128i *twb)
-{
-
-  //  register __m128i x1t;
-  __m128i x1t;
+  //  register simde__m128i x1t;
+  simde__m128i x1t;
 
   x1t = packed_cmult2(*(x1),*(tw),*(twb));
   /*
@@ -635,19 +487,21 @@ static inline void bfly2_16(__m128i *x0, __m128i *x1, __m128i *y0, __m128i *y1,
   print_shorts("tw",(int16_t*)tw);
   print_shorts("twb",(int16_t*)twb);
   print_shorts("x1t",(int16_t*)&x1t);*/
-  *y0  = _mm_adds_epi16(*x0,x1t);
-  *y1  = _mm_subs_epi16(*x0,x1t);
+  *y0 = simde_mm_adds_epi16(*x0, x1t);
+  *y1 = simde_mm_subs_epi16(*x0, x1t);
   /*  print_shorts("y0",(int16_t*)y0);
       print_shorts("y1",(int16_t*)y1);*/
 }
-#endif
-static inline void bfly2_16_256(__m256i *x0, __m256i *x1, __m256i *y0, __m256i *y1, __m256i *tw, __m256i *twb)__attribute__((always_inline));
 
-static inline void bfly2_16_256(__m256i *x0, __m256i *x1, __m256i *y0, __m256i *y1, __m256i *tw, __m256i *twb)
+static inline void bfly2_16_256(simde__m256i *x0,
+                                simde__m256i *x1,
+                                simde__m256i *y0,
+                                simde__m256i *y1,
+                                simde__m256i *tw,
+                                simde__m256i *twb) __attribute__((always_inline))
 {
-
-  //  register __m256i x1t;
-  __m256i x1t;
+  //  register simde__m256i x1t;
+  simde__m256i x1t;
 
   x1t = packed_cmult2_256(*(x1),*(tw),*(twb));
   /*
@@ -663,55 +517,33 @@ static inline void bfly2_16_256(__m256i *x0, __m256i *x1, __m256i *y0, __m256i *
     print_shorts256("y1",(int16_t*)y1);*/
 }
 
-#elif defined(__arm__) || defined(__aarch64__)
-#if 0
-static inline void bfly2_16(int16x8_t *x0, int16x8_t *x1, int16x8_t *y0, int16x8_t *y1, int16x8_t *tw, int16x8_t *twb)__attribute__((always_inline));
-
-static inline void bfly2_16(int16x8_t *x0, int16x8_t *x1, int16x8_t *y0, int16x8_t *y1, int16x8_t *tw, int16x8_t *twb)
+static inline void ibfly2(simde__m128i *x0, simde__m128i *x1, simde__m128i *y0, simde__m128i *y1, simde__m128i *tw)   __attribute__((always_inline))
 {
-
-  *y0  = vqaddq_s16(*x0,*x1);
-  *y1  = vqsubq_s16(*x0,*x1);
-
-}
-#endif
-#endif // defined(__x86_64__) || defined(__i386__)
-
-#if defined(__x86_64__) || defined(__i386__)
-#if 0
-static inline void ibfly2(__m128i *x0, __m128i *x1,__m128i *y0, __m128i *y1,__m128i *tw)__attribute__((always_inline));
-
-static inline void ibfly2(__m128i *x0, __m128i *x1,__m128i *y0, __m128i *y1,__m128i *tw)
-{
-
-  __m128i x0r_2,x0i_2,x1r_2,x1i_2,dy0r,dy1r,dy0i,dy1i;
-  __m128i bfly2_tmp1,bfly2_tmp2;
+  simde__m128i x0r_2, x0i_2, x1r_2, x1i_2, dy0r, dy1r, dy0i, dy1i;
+  simde__m128i bfly2_tmp1, bfly2_tmp2;
 
   cmultc(*(x0),*(W0),&x0r_2,&x0i_2);
   cmultc(*(x1),*(tw),&x1r_2,&x1i_2);
 
-  dy0r = _mm_srai_epi32(_mm_add_epi32(x0r_2,x1r_2),15);
-  dy1r = _mm_srai_epi32(_mm_sub_epi32(x0r_2,x1r_2),15);
-  dy0i = _mm_srai_epi32(_mm_add_epi32(x0i_2,x1i_2),15);
+  dy0r = simde_mm_srai_epi32(simde_mm_add_epi32(x0r_2, x1r_2), 15);
+  dy1r = simde_mm_srai_epi32(simde_mm_sub_epi32(x0r_2, x1r_2), 15);
+  dy0i = simde_mm_srai_epi32(simde_mm_add_epi32(x0i_2, x1i_2), 15);
   //  printf("y0i %d\n",((int16_t *)y0i)[0]);
-  dy1i = _mm_srai_epi32(_mm_sub_epi32(x0i_2,x1i_2),15);
+  dy1i = simde_mm_srai_epi32(simde_mm_sub_epi32(x0i_2, x1i_2), 15);
 
-  bfly2_tmp1 = _mm_unpacklo_epi32(dy0r,dy0i);
-  bfly2_tmp2 = _mm_unpackhi_epi32(dy0r,dy0i);
-  *y0 = _mm_packs_epi32(bfly2_tmp1,bfly2_tmp2);
+  bfly2_tmp1 = simde_mm_unpacklo_epi32(dy0r, dy0i);
+  bfly2_tmp2 = simde_mm_unpackhi_epi32(dy0r, dy0i);
+  *y0 = simde_mm_packs_epi32(bfly2_tmp1, bfly2_tmp2);
 
-  bfly2_tmp1 = _mm_unpacklo_epi32(dy1r,dy1i);
-  bfly2_tmp2 = _mm_unpackhi_epi32(dy1r,dy1i);
-  *y1 = _mm_packs_epi32(bfly2_tmp1,bfly2_tmp2);
+  bfly2_tmp1 = simde_mm_unpacklo_epi32(dy1r, dy1i);
+  bfly2_tmp2 = simde_mm_unpackhi_epi32(dy1r, dy1i);
+  *y1 = simde_mm_packs_epi32(bfly2_tmp1, bfly2_tmp2);
 }
-#endif
-static inline void ibfly2_256(__m256i *x0, __m256i *x1,__m256i *y0, __m256i *y1,__m256i *tw)__attribute__((always_inline));
 
-static inline void ibfly2_256(__m256i *x0, __m256i *x1,__m256i *y0, __m256i *y1,__m256i *tw)
+static inline void ibfly2_256(simde__m256i *x0, simde__m256i *x1, simde__m256i *y0, simde__m256i *y1, simde__m256i *tw)  __attribute__((always_inline))
 {
-
-  __m256i x0r_2,x0i_2,x1r_2,x1i_2,dy0r,dy1r,dy0i,dy1i;
-  __m256i bfly2_tmp1,bfly2_tmp2;
+  simde__m256i x0r_2, x0i_2, x1r_2, x1i_2, dy0r, dy1r, dy0i, dy1i;
+  simde__m256i bfly2_tmp1, bfly2_tmp2;
 
   cmultc_256(*(x0),*(W0_256),&x0r_2,&x0i_2);
   cmultc_256(*(x1),*(tw),&x1r_2,&x1i_2);
@@ -731,67 +563,43 @@ static inline void ibfly2_256(__m256i *x0, __m256i *x1,__m256i *y0, __m256i *y1,
   *y1 = simde_mm256_packs_epi32(bfly2_tmp1,bfly2_tmp2);
 }
 
-#elif defined(__arm__) || defined(__aarch64__)
-#if 0
-static inline void ibfly2(int16x8_t *x0, int16x8_t *x1,int16x8_t *y0, int16x8_t *y1,int16x8_t *tw)
-{
-
-  int32x4_t x0r_2,x0i_2,x1r_2,x1i_2,dy0r,dy1r,dy0i,dy1i;
-
-  cmultc(*(x0),*(W0),&x0r_2,&x0i_2);
-  cmultc(*(x1),*(tw),&x1r_2,&x1i_2);
-
-  dy0r = vqaddq_s32(x0r_2,x1r_2);
-  dy1r = vqsubq_s32(x0r_2,x1r_2);
-  dy0i = vqaddq_s32(x0i_2,x1i_2);
-  dy1i = vqsubq_s32(x0i_2,x1i_2);
-
-  *y0 = cpack(dy0r,dy0i);
-  *y1 = cpack(dy1r,dy1i);
-
-}
-#endif
-#endif // defined(__x86_64__) || defined(__i386__)
-
 
 // This is the radix-3 butterfly (fft)
 
-#if defined(__x86_64__) || defined(__i386__)
-
-static inline void bfly3(__m128i *x0,__m128i *x1,__m128i *x2,
-                         __m128i *y0,__m128i *y1,__m128i *y2,
-                         __m128i *tw1,__m128i *tw2) __attribute__((always_inline));
-
-static inline void bfly3(__m128i *x0,__m128i *x1,__m128i *x2,
-                         __m128i *y0,__m128i *y1,__m128i *y2,
-                         __m128i *tw1,__m128i *tw2)
+static inline void bfly3(simde__m128i *x0,
+                         simde__m128i *x1,
+                         simde__m128i *x2,
+                         simde__m128i *y0,
+                         simde__m128i *y1,
+                         simde__m128i *y2,
+                         simde__m128i *tw1,
+                         simde__m128i *tw2) __attribute__((always_inline))
 {
-
-  __m128i tmpre,tmpim,x1_2,x2_2;
+  simde__m128i tmpre, tmpim, x1_2, x2_2;
 
   packed_cmult(*(x1),*(tw1),&x1_2);
   packed_cmult(*(x2),*(tw2),&x2_2);
-  *(y0)  = _mm_adds_epi16(*(x0),_mm_adds_epi16(x1_2,x2_2));
+  *(y0) = simde_mm_adds_epi16(*(x0), simde_mm_adds_epi16(x1_2, x2_2));
   cmult(x1_2,*(W13),&tmpre,&tmpim);
   cmac(x2_2,*(W23),&tmpre,&tmpim);
   *(y1) = cpack(tmpre,tmpim);
-  *(y1) = _mm_adds_epi16(*(x0),*(y1));
+  *(y1) = simde_mm_adds_epi16(*(x0), *(y1));
   cmult(x1_2,*(W23),&tmpre,&tmpim);
   cmac(x2_2,*(W13),&tmpre,&tmpim);
   *(y2) = cpack(tmpre,tmpim);
-  *(y2) = _mm_adds_epi16(*(x0),*(y2));
+  *(y2) = simde_mm_adds_epi16(*(x0), *(y2));
 }
 
-static inline void bfly3_256(__m256i *x0,__m256i *x1,__m256i *x2,
-			     __m256i *y0,__m256i *y1,__m256i *y2,
-			     __m256i *tw1,__m256i *tw2) __attribute__((always_inline));
-
-static inline void bfly3_256(__m256i *x0,__m256i *x1,__m256i *x2,
-			     __m256i *y0,__m256i *y1,__m256i *y2,
-			     __m256i *tw1,__m256i *tw2)
-{ 
-
-  __m256i tmpre,tmpim,x1_2,x2_2;
+static inline void bfly3_256(simde__m256i *x0,
+                             simde__m256i *x1,
+                             simde__m256i *x2,
+                             simde__m256i *y0,
+                             simde__m256i *y1,
+                             simde__m256i *y2,
+                             simde__m256i *tw1,
+                             simde__m256i *tw2) __attribute__((always_inline))
+{
+  simde__m256i tmpre, tmpim, x1_2, x2_2;
 
   packed_cmult_256(*(x1),*(tw1),&x1_2);
   packed_cmult_256(*(x2),*(tw2),&x2_2);
@@ -806,69 +614,40 @@ static inline void bfly3_256(__m256i *x0,__m256i *x1,__m256i *x2,
   *(y2) = simde_mm256_adds_epi16(*(x0),*(y2));
 }
 
-#elif defined(__arm__) || defined(__aarch64__)
-static inline void bfly3(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,
-                         int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,
-                         int16x8_t *tw1,int16x8_t *tw2) __attribute__((always_inline));
-
-static inline void bfly3(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,
-                         int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,
-                         int16x8_t *tw1,int16x8_t *tw2)
-{
-
-  int32x4_t tmpre,tmpim;
-  int16x8_t x1_2,x2_2;
-
-  packed_cmult(*(x1),*(tw1),&x1_2);
-  packed_cmult(*(x2),*(tw2),&x2_2);
-  *(y0)  = vqaddq_s16(*(x0),vqaddq_s16(x1_2,x2_2));
-  cmult(x1_2,*(W13),&tmpre,&tmpim);
-  cmac(x2_2,*(W23),&tmpre,&tmpim);
-  *(y1) = cpack(tmpre,tmpim);
-  *(y1) = vqaddq_s16(*(x0),*(y1));
-  cmult(x1_2,*(W23),&tmpre,&tmpim);
-  cmac(x2_2,*(W13),&tmpre,&tmpim);
-  *(y2) = cpack(tmpre,tmpim);
-  *(y2) = vqaddq_s16(*(x0),*(y2));
-}
-
-#endif // defined(__x86_64__) || defined(__i386__)
-
-#if defined(__x86_64__) || defined(__i386__)
-static inline void ibfly3(__m128i *x0,__m128i *x1,__m128i *x2,
-			  __m128i *y0,__m128i *y1,__m128i *y2,
-			  __m128i *tw1,__m128i *tw2) __attribute__((always_inline));
-
-static inline void ibfly3(__m128i *x0,__m128i *x1,__m128i *x2,
-			  __m128i *y0,__m128i *y1,__m128i *y2,
-			  __m128i *tw1,__m128i *tw2)
+static inline void ibfly3(simde__m128i *x0,
+                          simde__m128i *x1,
+                          simde__m128i *x2,
+                          simde__m128i *y0,
+                          simde__m128i *y1,
+                          simde__m128i *y2,
+                          simde__m128i *tw1,
+                          simde__m128i *tw2) __attribute__((always_inline))
 {
-
-  __m128i tmpre,tmpim,x1_2,x2_2;
+  simde__m128i tmpre, tmpim, x1_2, x2_2;
 
   packed_cmultc(*(x1),*(tw1),&x1_2);
   packed_cmultc(*(x2),*(tw2),&x2_2);
-  *(y0)  = _mm_adds_epi16(*(x0),_mm_adds_epi16(x1_2,x2_2));
+  *(y0) = simde_mm_adds_epi16(*(x0), simde_mm_adds_epi16(x1_2, x2_2));
   cmultc(x1_2,*(W13),&tmpre,&tmpim);
   cmacc(x2_2,*(W23),&tmpre,&tmpim);
   *(y1) = cpack(tmpre,tmpim);
-  *(y1) = _mm_adds_epi16(*(x0),*(y1));
+  *(y1) = simde_mm_adds_epi16(*(x0), *(y1));
   cmultc(x1_2,*(W23),&tmpre,&tmpim);
   cmacc(x2_2,*(W13),&tmpre,&tmpim);
   *(y2) = cpack(tmpre,tmpim);
-  *(y2) = _mm_adds_epi16(*(x0),*(y2));
+  *(y2) = simde_mm_adds_epi16(*(x0), *(y2));
 }
-#if 0
-static inline void ibfly3_256(__m256i *x0,__m256i *x1,__m256i *x2,
-			      __m256i *y0,__m256i *y1,__m256i *y2,
-			      __m256i *tw1,__m256i *tw2) __attribute__((always_inline));
-
-static inline void ibfly3_256(__m256i *x0,__m256i *x1,__m256i *x2,
-			      __m256i *y0,__m256i *y1,__m256i *y2,
-			      __m256i *tw1,__m256i *tw2)
-{ 
 
-  __m256i tmpre,tmpim,x1_2,x2_2;
+static inline void ibfly3_256(simde__m256i *x0,
+                              simde__m256i *x1,
+                              simde__m256i *x2,
+                              simde__m256i *y0,
+                              simde__m256i *y1,
+                              simde__m256i *y2,
+                              simde__m256i *tw1,
+                              simde__m256i *tw2) __attribute__((always_inline));
+{
+  simde__m256i tmpre, tmpim, x1_2, x2_2;
 
   packed_cmultc_256(*(x1),*(tw1),&x1_2);
   packed_cmultc_256(*(x2),*(tw2),&x2_2);
@@ -882,63 +661,35 @@ static inline void ibfly3_256(__m256i *x0,__m256i *x1,__m256i *x2,
   *(y2) = cpack_256(tmpre,tmpim);
   *(y2) = simde_mm256_adds_epi16(*(x0),*(y2));
 }
-#endif
-#elif defined(__arm__) || defined(__aarch64__)
-static inline void ibfly3(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,
-			  int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,
-			  int16x8_t *tw1,int16x8_t *tw2) __attribute__((always_inline));
 
-static inline void ibfly3(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,
-			  int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,
-			  int16x8_t *tw1,int16x8_t *tw2)
+static inline void bfly3_tw1(simde__m128i *x0,
+                             simde__m128i *x1,
+                             simde__m128i *x2,
+                             simde__m128i *y0,
+                             simde__m128i *y1,
+                             simde__m128i *y2) __attribute__((always_inline))
 {
+  simde__m128i tmpre, tmpim;
 
-  int32x4_t tmpre,tmpim;
-  int16x8_t x1_2,x2_2;
-
-  packed_cmultc(*(x1),*(tw1),&x1_2);
-  packed_cmultc(*(x2),*(tw2),&x2_2);
-  *(y0)  = vqaddq_s16(*(x0),vqaddq_s16(x1_2,x2_2));
-  cmultc(x1_2,*(W13),&tmpre,&tmpim);
-  cmacc(x2_2,*(W23),&tmpre,&tmpim);
-  *(y1) = cpack(tmpre,tmpim);
-  *(y1) = vqaddq_s16(*(x0),*(y1));
-  cmultc(x1_2,*(W23),&tmpre,&tmpim);
-  cmacc(x2_2,*(W13),&tmpre,&tmpim);
-  *(y2) = cpack(tmpre,tmpim);
-  *(y2) = vqaddq_s16(*(x0),*(y2));
-}
-#endif // defined(__x86_64__) || defined(__i386__)
-
-#if defined(__x86_64__) || defined(__i386__)
-static inline void bfly3_tw1(__m128i *x0,__m128i *x1,__m128i *x2,
-                             __m128i *y0,__m128i *y1,__m128i *y2) __attribute__((always_inline));
-
-static inline void bfly3_tw1(__m128i *x0,__m128i *x1,__m128i *x2,
-                             __m128i *y0,__m128i *y1,__m128i *y2)
-{
-
-  __m128i tmpre,tmpim;
-
-  *(y0) = _mm_adds_epi16(*(x0),_mm_adds_epi16(*(x1),*(x2)));
+  *(y0) = simde_mm_adds_epi16(*(x0), simde_mm_adds_epi16(*(x1), *(x2)));
   cmult(*(x1),*(W13),&tmpre,&tmpim);
   cmac(*(x2),*(W23),&tmpre,&tmpim);
   *(y1) = cpack(tmpre,tmpim);
-  *(y1) = _mm_adds_epi16(*(x0),*(y1));
+  *(y1) = simde_mm_adds_epi16(*(x0), *(y1));
   cmult(*(x1),*(W23),&tmpre,&tmpim);
   cmac(*(x2),*(W13),&tmpre,&tmpim);
   *(y2) = cpack(tmpre,tmpim);
-  *(y2) = _mm_adds_epi16(*(x0),*(y2));
+  *(y2) = simde_mm_adds_epi16(*(x0), *(y2));
 }
 
-static inline void bfly3_tw1_256(__m256i *x0,__m256i *x1,__m256i *x2,
-				 __m256i *y0,__m256i *y1,__m256i *y2) __attribute__((always_inline));
-
-static inline void bfly3_tw1_256(__m256i *x0,__m256i *x1,__m256i *x2,
-				 __m256i *y0,__m256i *y1,__m256i *y2)
+static inline void bfly3_tw1_256(simde__m256i *x0,
+                                 simde__m256i *x1,
+                                 simde__m256i *x2,
+                                 simde__m256i *y0,
+                                 simde__m256i *y1,
+                                 simde__m256i *y2) __attribute__((always_inline))
 {
-
-  __m256i tmpre,tmpim;
+  simde__m256i tmpre, tmpim;
 
   *(y0) = simde_mm256_adds_epi16(*(x0),simde_mm256_adds_epi16(*(x1),*(x2)));
   cmult_256(*(x1),*(W13_256),&tmpre,&tmpim);
@@ -951,200 +702,139 @@ static inline void bfly3_tw1_256(__m256i *x0,__m256i *x1,__m256i *x2,
   *(y2) = simde_mm256_adds_epi16(*(x0),*(y2));
 }
 
-#elif defined(__arm__) || defined(__aarch64__)
-static inline void bfly3_tw1(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,
-                             int16x8_t *y0,int16x8_t *y1,int16x8_t *y2) __attribute__((always_inline));
-
-static inline void bfly3_tw1(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,
-                             int16x8_t *y0,int16x8_t *y1,int16x8_t *y2)
+static inline void bfly4(simde__m128i *x0,
+                         simde__m128i *x1,
+                         simde__m128i *x2,
+                         simde__m128i *x3,
+                         simde__m128i *y0,
+                         simde__m128i *y1,
+                         simde__m128i *y2,
+                         simde__m128i *y3,
+                         simde__m128i *tw1,
+                         simde__m128i *tw2,
+                         simde__m128i *tw3) __attribute__((always_inline))
 {
-
-  int32x4_t tmpre,tmpim;
-
-  *(y0) = vqaddq_s16(*(x0),vqaddq_s16(*(x1),*(x2)));
-  cmult(*(x1),*(W13),&tmpre,&tmpim);
-  cmac(*(x2),*(W23),&tmpre,&tmpim);
-  *(y1) = cpack(tmpre,tmpim);
-  *(y1) = vqaddq_s16(*(x0),*(y1));
-  cmult(*(x1),*(W23),&tmpre,&tmpim);
-  cmac(*(x2),*(W13),&tmpre,&tmpim);
-  *(y2) = cpack(tmpre,tmpim);
-  *(y2) = vqaddq_s16(*(x0),*(y2));
-
-}
-
-#endif // defined(__x86_64__) || defined(__i386__)
-
-#if defined(__x86_64__) || defined(__i386__)
-static inline void bfly4(__m128i *x0,__m128i *x1,__m128i *x2,__m128i *x3,
-                         __m128i *y0,__m128i *y1,__m128i *y2,__m128i *y3,
-                         __m128i *tw1,__m128i *tw2,__m128i *tw3)__attribute__((always_inline));
-
-static inline void bfly4(__m128i *x0,__m128i *x1,__m128i *x2,__m128i *x3,
-                         __m128i *y0,__m128i *y1,__m128i *y2,__m128i *y3,
-                         __m128i *tw1,__m128i *tw2,__m128i *tw3)
-{
-
-  __m128i x1r_2,x1i_2,x2r_2,x2i_2,x3r_2,x3i_2,dy0r,dy0i,dy1r,dy1i,dy2r,dy2i,dy3r,dy3i;
+  simde__m128i x1r_2, x1i_2, x2r_2, x2i_2, x3r_2, x3i_2, dy0r, dy0i, dy1r, dy1i, dy2r, dy2i, dy3r, dy3i;
 
   //  cmult(*(x0),*(W0),&x0r_2,&x0i_2);
   cmult(*(x1),*(tw1),&x1r_2,&x1i_2);
   cmult(*(x2),*(tw2),&x2r_2,&x2i_2);
   cmult(*(x3),*(tw3),&x3r_2,&x3i_2);
-  //  dy0r = _mm_add_epi32(x0r_2,_mm_add_epi32(x1r_2,_mm_add_epi32(x2r_2,x3r_2)));
-  //  dy0i = _mm_add_epi32(x0i_2,_mm_add_epi32(x1i_2,_mm_add_epi32(x2i_2,x3i_2)));
+  //  dy0r = simde_mm_add_epi32(x0r_2,simde_mm_add_epi32(x1r_2,simde_mm_add_epi32(x2r_2,x3r_2)));
+  //  dy0i = simde_mm_add_epi32(x0i_2,simde_mm_add_epi32(x1i_2,simde_mm_add_epi32(x2i_2,x3i_2)));
   //  *(y0)  = cpack(dy0r,dy0i);
-  dy0r = _mm_add_epi32(x1r_2,_mm_add_epi32(x2r_2,x3r_2));
-  dy0i = _mm_add_epi32(x1i_2,_mm_add_epi32(x2i_2,x3i_2));
-  *(y0)  = _mm_add_epi16(*(x0),cpack(dy0r,dy0i));
-  //  dy1r = _mm_add_epi32(x0r_2,_mm_sub_epi32(x1i_2,_mm_add_epi32(x2r_2,x3i_2)));
-  //  dy1i = _mm_sub_epi32(x0i_2,_mm_add_epi32(x1r_2,_mm_sub_epi32(x2i_2,x3r_2)));
+  dy0r = simde_mm_add_epi32(x1r_2, simde_mm_add_epi32(x2r_2, x3r_2));
+  dy0i = simde_mm_add_epi32(x1i_2, simde_mm_add_epi32(x2i_2, x3i_2));
+  *(y0) = simde_mm_add_epi16(*(x0), cpack(dy0r, dy0i));
+  //  dy1r = simde_mm_add_epi32(x0r_2,simde_mm_sub_epi32(x1i_2,simde_mm_add_epi32(x2r_2,x3i_2)));
+  //  dy1i = simde_mm_sub_epi32(x0i_2,simde_mm_add_epi32(x1r_2,simde_mm_sub_epi32(x2i_2,x3r_2)));
   //  *(y1)  = cpack(dy1r,dy1i);
-  dy1r = _mm_sub_epi32(x1i_2,_mm_add_epi32(x2r_2,x3i_2));
-  dy1i = _mm_sub_epi32(_mm_sub_epi32(x3r_2,x2i_2),x1r_2);
-  *(y1)  = _mm_add_epi16(*(x0),cpack(dy1r,dy1i));
-  //  dy2r = _mm_sub_epi32(x0r_2,_mm_sub_epi32(x1r_2,_mm_sub_epi32(x2r_2,x3r_2)));
-  //  dy2i = _mm_sub_epi32(x0i_2,_mm_sub_epi32(x1i_2,_mm_sub_epi32(x2i_2,x3i_2)));
+  dy1r = simde_mm_sub_epi32(x1i_2, simde_mm_add_epi32(x2r_2, x3i_2));
+  dy1i = simde_mm_sub_epi32(simde_mm_sub_epi32(x3r_2, x2i_2), x1r_2);
+  *(y1) = simde_mm_add_epi16(*(x0), cpack(dy1r, dy1i));
+  //  dy2r = simde_mm_sub_epi32(x0r_2,simde_mm_sub_epi32(x1r_2,simde_mm_sub_epi32(x2r_2,x3r_2)));
+  //  dy2i = simde_mm_sub_epi32(x0i_2,simde_mm_sub_epi32(x1i_2,simde_mm_sub_epi32(x2i_2,x3i_2)));
   //  *(y2)  = cpack(dy2r,dy2i);
-  dy2r = _mm_sub_epi32(_mm_sub_epi32(x2r_2,x3r_2),x1r_2);
-  dy2i = _mm_sub_epi32(_mm_sub_epi32(x2i_2,x3i_2),x1i_2);
-  *(y2)  = _mm_add_epi16(*(x0),cpack(dy2r,dy2i));
-  //  dy3r = _mm_sub_epi32(x0r_2,_mm_add_epi32(x1i_2,_mm_sub_epi32(x2r_2,x3i_2)));
-  //  dy3i = _mm_add_epi32(x0i_2,_mm_sub_epi32(x1r_2,_mm_add_epi32(x2i_2,x3r_2)));
+  dy2r = simde_mm_sub_epi32(simde_mm_sub_epi32(x2r_2, x3r_2), x1r_2);
+  dy2i = simde_mm_sub_epi32(simde_mm_sub_epi32(x2i_2, x3i_2), x1i_2);
+  *(y2) = simde_mm_add_epi16(*(x0), cpack(dy2r, dy2i));
+  //  dy3r = simde_mm_sub_epi32(x0r_2,simde_mm_add_epi32(x1i_2,simde_mm_sub_epi32(x2r_2,x3i_2)));
+  //  dy3i = simde_mm_add_epi32(x0i_2,simde_mm_sub_epi32(x1r_2,simde_mm_add_epi32(x2i_2,x3r_2)));
   //  *(y3) = cpack(dy3r,dy3i);
-  dy3r = _mm_sub_epi32(_mm_sub_epi32(x3i_2,x2r_2),x1i_2);
-  dy3i = _mm_sub_epi32(x1r_2,_mm_add_epi32(x2i_2,x3r_2));
-  *(y3) = _mm_add_epi16(*(x0),cpack(dy3r,dy3i));
+  dy3r = simde_mm_sub_epi32(simde_mm_sub_epi32(x3i_2, x2r_2), x1i_2);
+  dy3i = simde_mm_sub_epi32(x1r_2, simde_mm_add_epi32(x2i_2, x3r_2));
+  *(y3) = simde_mm_add_epi16(*(x0), cpack(dy3r, dy3i));
 }
 
-static inline void bfly4_256(__m256i *x0,__m256i *x1,__m256i *x2,__m256i *x3,
-			     __m256i *y0,__m256i *y1,__m256i *y2,__m256i *y3,
-			     __m256i *tw1,__m256i *tw2,__m256i *tw3)__attribute__((always_inline));
-
-static inline void bfly4_256(__m256i *x0,__m256i *x1,__m256i *x2,__m256i *x3,
-			     __m256i *y0,__m256i *y1,__m256i *y2,__m256i *y3,
-			     __m256i *tw1,__m256i *tw2,__m256i *tw3)
+static inline void bfly4_256(simde__m256i *x0,
+                             simde__m256i *x1,
+                             simde__m256i *x2,
+                             simde__m256i *x3,
+                             simde__m256i *y0,
+                             simde__m256i *y1,
+                             simde__m256i *y2,
+                             simde__m256i *y3,
+                             simde__m256i *tw1,
+                             simde__m256i *tw2,
+                             simde__m256i *tw3) __attribute__((always_inline))
 {
-
-  __m256i x1r_2,x1i_2,x2r_2,x2i_2,x3r_2,x3i_2,dy0r,dy0i,dy1r,dy1i,dy2r,dy2i,dy3r,dy3i;
+  simde__m256i x1r_2, x1i_2, x2r_2, x2i_2, x3r_2, x3i_2, dy0r, dy0i, dy1r, dy1i, dy2r, dy2i, dy3r, dy3i;
 
   //  cmult(*(x0),*(W0),&x0r_2,&x0i_2);
   cmult_256(*(x1),*(tw1),&x1r_2,&x1i_2);
   cmult_256(*(x2),*(tw2),&x2r_2,&x2i_2);
   cmult_256(*(x3),*(tw3),&x3r_2,&x3i_2);
-  //  dy0r = _mm_add_epi32(x0r_2,_mm_add_epi32(x1r_2,_mm_add_epi32(x2r_2,x3r_2)));
-  //  dy0i = _mm_add_epi32(x0i_2,_mm_add_epi32(x1i_2,_mm_add_epi32(x2i_2,x3i_2)));
+  //  dy0r = simde_mm_add_epi32(x0r_2,simde_mm_add_epi32(x1r_2,simde_mm_add_epi32(x2r_2,x3r_2)));
+  //  dy0i = simde_mm_add_epi32(x0i_2,simde_mm_add_epi32(x1i_2,simde_mm_add_epi32(x2i_2,x3i_2)));
   //  *(y0)  = cpack(dy0r,dy0i);
   dy0r = simde_mm256_add_epi32(x1r_2,simde_mm256_add_epi32(x2r_2,x3r_2));
   dy0i = simde_mm256_add_epi32(x1i_2,simde_mm256_add_epi32(x2i_2,x3i_2));
   *(y0)  = simde_mm256_add_epi16(*(x0),cpack_256(dy0r,dy0i));
-  //  dy1r = _mm_add_epi32(x0r_2,_mm_sub_epi32(x1i_2,_mm_add_epi32(x2r_2,x3i_2)));
-  //  dy1i = _mm_sub_epi32(x0i_2,_mm_add_epi32(x1r_2,_mm_sub_epi32(x2i_2,x3r_2)));
+  //  dy1r = simde_mm_add_epi32(x0r_2,simde_mm_sub_epi32(x1i_2,simde_mm_add_epi32(x2r_2,x3i_2)));
+  //  dy1i = simde_mm_sub_epi32(x0i_2,simde_mm_add_epi32(x1r_2,simde_mm_sub_epi32(x2i_2,x3r_2)));
   //  *(y1)  = cpack(dy1r,dy1i);
   dy1r = simde_mm256_sub_epi32(x1i_2,simde_mm256_add_epi32(x2r_2,x3i_2));
   dy1i = simde_mm256_sub_epi32(simde_mm256_sub_epi32(x3r_2,x2i_2),x1r_2);
   *(y1)  = simde_mm256_add_epi16(*(x0),cpack_256(dy1r,dy1i));
-  //  dy2r = _mm_sub_epi32(x0r_2,_mm_sub_epi32(x1r_2,_mm_sub_epi32(x2r_2,x3r_2)));
-  //  dy2i = _mm_sub_epi32(x0i_2,_mm_sub_epi32(x1i_2,_mm_sub_epi32(x2i_2,x3i_2)));
+  //  dy2r = simde_mm_sub_epi32(x0r_2,simde_mm_sub_epi32(x1r_2,simde_mm_sub_epi32(x2r_2,x3r_2)));
+  //  dy2i = simde_mm_sub_epi32(x0i_2,simde_mm_sub_epi32(x1i_2,simde_mm_sub_epi32(x2i_2,x3i_2)));
   //  *(y2)  = cpack(dy2r,dy2i);
   dy2r = simde_mm256_sub_epi32(simde_mm256_sub_epi32(x2r_2,x3r_2),x1r_2);
   dy2i = simde_mm256_sub_epi32(simde_mm256_sub_epi32(x2i_2,x3i_2),x1i_2);
   *(y2)  = simde_mm256_add_epi16(*(x0),cpack_256(dy2r,dy2i));
-  //  dy3r = _mm_sub_epi32(x0r_2,_mm_add_epi32(x1i_2,_mm_sub_epi32(x2r_2,x3i_2)));
-  //  dy3i = _mm_add_epi32(x0i_2,_mm_sub_epi32(x1r_2,_mm_add_epi32(x2i_2,x3r_2)));
+  //  dy3r = simde_mm_sub_epi32(x0r_2,simde_mm_add_epi32(x1i_2,simde_mm_sub_epi32(x2r_2,x3i_2)));
+  //  dy3i = simde_mm_add_epi32(x0i_2,simde_mm_sub_epi32(x1r_2,simde_mm_add_epi32(x2i_2,x3r_2)));
   //  *(y3) = cpack(dy3r,dy3i);
   dy3r = simde_mm256_sub_epi32(simde_mm256_sub_epi32(x3i_2,x2r_2),x1i_2);
   dy3i = simde_mm256_sub_epi32(x1r_2,simde_mm256_add_epi32(x2i_2,x3r_2));
   *(y3) = simde_mm256_add_epi16(*(x0),cpack_256(dy3r,dy3i));
 }
 
-#elif defined(__arm__) || defined(__aarch64__)
-static inline void bfly4(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,int16x8_t *x3,
-                         int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,int16x8_t *y3,
-                         int16x8_t *tw1,int16x8_t *tw2,int16x8_t *tw3)__attribute__((always_inline));
-
-static inline void bfly4(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,int16x8_t *x3,
-                         int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,int16x8_t *y3,
-                         int16x8_t *tw1,int16x8_t *tw2,int16x8_t *tw3)
+static inline void ibfly4(simde__m128i *x0,
+                          simde__m128i *x1,
+                          simde__m128i *x2,
+                          simde__m128i *x3,
+                          simde__m128i *y0,
+                          simde__m128i *y1,
+                          simde__m128i *y2,
+                          simde__m128i *y3,
+                          simde__m128i *tw1,
+                          simde__m128i *tw2,
+                          simde__m128i *tw3) __attribute__((always_inline))
 {
-
-  int32x4_t x1r_2,x1i_2,x2r_2,x2i_2,x3r_2,x3i_2,dy0r,dy0i,dy1r,dy1i,dy2r,dy2i,dy3r,dy3i;
-
-  //  cmult(*(x0),*(W0),&x0r_2,&x0i_2);
-  cmult(*(x1),*(tw1),&x1r_2,&x1i_2);
-  cmult(*(x2),*(tw2),&x2r_2,&x2i_2);
-  cmult(*(x3),*(tw3),&x3r_2,&x3i_2);
-  //  dy0r = _mm_add_epi32(x0r_2,_mm_add_epi32(x1r_2,_mm_add_epi32(x2r_2,x3r_2)));
-  //  dy0i = _mm_add_epi32(x0i_2,_mm_add_epi32(x1i_2,_mm_add_epi32(x2i_2,x3i_2)));
-  //  *(y0)  = cpack(dy0r,dy0i);
-  dy0r = vqaddq_s32(x1r_2,vqaddq_s32(x2r_2,x3r_2));
-  dy0i = vqaddq_s32(x1i_2,vqaddq_s32(x2i_2,x3i_2));
-  *(y0)  = vqaddq_s16(*(x0),cpack(dy0r,dy0i));
-  //  dy1r = _mm_add_epi32(x0r_2,_mm_sub_epi32(x1i_2,_mm_add_epi32(x2r_2,x3i_2)));
-  //  dy1i = _mm_sub_epi32(x0i_2,_mm_add_epi32(x1r_2,_mm_sub_epi32(x2i_2,x3r_2)));
-  //  *(y1)  = cpack(dy1r,dy1i);
-  dy1r = vqsubq_s32(x1i_2,vqaddq_s32(x2r_2,x3i_2));
-  dy1i = vqsubq_s32(vqsubq_s32(x3r_2,x2i_2),x1r_2);
-  *(y1)  = vqaddq_s16(*(x0),cpack(dy1r,dy1i));
-  //  dy2r = _mm_sub_epi32(x0r_2,_mm_sub_epi32(x1r_2,_mm_sub_epi32(x2r_2,x3r_2)));
-  //  dy2i = _mm_sub_epi32(x0i_2,_mm_sub_epi32(x1i_2,_mm_sub_epi32(x2i_2,x3i_2)));
-  //  *(y2)  = cpack(dy2r,dy2i);
-  dy2r = vqsubq_s32(vqsubq_s32(x2r_2,x3r_2),x1r_2);
-  dy2i = vqsubq_s32(vqsubq_s32(x2i_2,x3i_2),x1i_2);
-  *(y2)  = vqaddq_s16(*(x0),cpack(dy2r,dy2i));
-  //  dy3r = _mm_sub_epi32(x0r_2,_mm_add_epi32(x1i_2,_mm_sub_epi32(x2r_2,x3i_2)));
-  //  dy3i = _mm_add_epi32(x0i_2,_mm_sub_epi32(x1r_2,_mm_add_epi32(x2i_2,x3r_2)));
-  //  *(y3) = cpack(dy3r,dy3i);
-  dy3r = vqsubq_s32(vqsubq_s32(x3i_2,x2r_2),x1i_2);
-  dy3i = vqsubq_s32(x1r_2,vqaddq_s32(x2i_2,x3r_2));
-  *(y3) = vqaddq_s16(*(x0),cpack(dy3r,dy3i));
-}
-
-#endif // defined(__x86_64__) || defined(__i386__)
-
-#if defined(__x86_64__) || defined(__i386__)
-#if 0
-static inline void ibfly4(__m128i *x0,__m128i *x1,__m128i *x2,__m128i *x3,
-                          __m128i *y0,__m128i *y1,__m128i *y2,__m128i *y3,
-                          __m128i *tw1,__m128i *tw2,__m128i *tw3)__attribute__((always_inline));
-
-static inline void ibfly4(__m128i *x0,__m128i *x1,__m128i *x2,__m128i *x3,
-                          __m128i *y0,__m128i *y1,__m128i *y2,__m128i *y3,
-                          __m128i *tw1,__m128i *tw2,__m128i *tw3)
-{
-
-  __m128i x1r_2,x1i_2,x2r_2,x2i_2,x3r_2,x3i_2,dy0r,dy0i,dy1r,dy1i,dy2r,dy2i,dy3r,dy3i;
-
+  simde__m128i x1r_2, x1i_2, x2r_2, x2i_2, x3r_2, x3i_2, dy0r, dy0i, dy1r, dy1i, dy2r, dy2i, dy3r, dy3i;
 
   cmultc(*(x1),*(tw1),&x1r_2,&x1i_2);
   cmultc(*(x2),*(tw2),&x2r_2,&x2i_2);
   cmultc(*(x3),*(tw3),&x3r_2,&x3i_2);
 
-  dy0r = _mm_add_epi32(x1r_2,_mm_add_epi32(x2r_2,x3r_2));
-  dy0i = _mm_add_epi32(x1i_2,_mm_add_epi32(x2i_2,x3i_2));
-  *(y0)  = _mm_add_epi16(*(x0),cpack(dy0r,dy0i));
-  dy3r = _mm_sub_epi32(x1i_2,_mm_add_epi32(x2r_2,x3i_2));
-  dy3i = _mm_sub_epi32(_mm_sub_epi32(x3r_2,x2i_2),x1r_2);
-  *(y3)  = _mm_add_epi16(*(x0),cpack(dy3r,dy3i));
-  dy2r = _mm_sub_epi32(_mm_sub_epi32(x2r_2,x3r_2),x1r_2);
-  dy2i = _mm_sub_epi32(_mm_sub_epi32(x2i_2,x3i_2),x1i_2);
-  *(y2)  = _mm_add_epi16(*(x0),cpack(dy2r,dy2i));
-  dy1r = _mm_sub_epi32(_mm_sub_epi32(x3i_2,x2r_2),x1i_2);
-  dy1i = _mm_sub_epi32(x1r_2,_mm_add_epi32(x2i_2,x3r_2));
-  *(y1) = _mm_add_epi16(*(x0),cpack(dy1r,dy1i));
-}
-#endif
-static inline void ibfly4_256(__m256i *x0,__m256i *x1,__m256i *x2,__m256i *x3,
-			      __m256i *y0,__m256i *y1,__m256i *y2,__m256i *y3,
-			      __m256i *tw1,__m256i *tw2,__m256i *tw3)__attribute__((always_inline));
-
-static inline void ibfly4_256(__m256i *x0,__m256i *x1,__m256i *x2,__m256i *x3,
-			      __m256i *y0,__m256i *y1,__m256i *y2,__m256i *y3,
-			      __m256i *tw1,__m256i *tw2,__m256i *tw3)
-{
-
-  __m256i x1r_2,x1i_2,x2r_2,x2i_2,x3r_2,x3i_2,dy0r,dy0i,dy1r,dy1i,dy2r,dy2i,dy3r,dy3i;
-
+  dy0r = simde_mm_add_epi32(x1r_2, simde_mm_add_epi32(x2r_2, x3r_2));
+  dy0i = simde_mm_add_epi32(x1i_2, simde_mm_add_epi32(x2i_2, x3i_2));
+  *(y0) = simde_mm_add_epi16(*(x0), cpack(dy0r, dy0i));
+  dy3r = simde_mm_sub_epi32(x1i_2, simde_mm_add_epi32(x2r_2, x3i_2));
+  dy3i = simde_mm_sub_epi32(simde_mm_sub_epi32(x3r_2, x2i_2), x1r_2);
+  *(y3) = simde_mm_add_epi16(*(x0), cpack(dy3r, dy3i));
+  dy2r = simde_mm_sub_epi32(simde_mm_sub_epi32(x2r_2, x3r_2), x1r_2);
+  dy2i = simde_mm_sub_epi32(simde_mm_sub_epi32(x2i_2, x3i_2), x1i_2);
+  *(y2) = simde_mm_add_epi16(*(x0), cpack(dy2r, dy2i));
+  dy1r = simde_mm_sub_epi32(simde_mm_sub_epi32(x3i_2, x2r_2), x1i_2);
+  dy1i = simde_mm_sub_epi32(x1r_2, simde_mm_add_epi32(x2i_2, x3r_2));
+  *(y1) = simde_mm_add_epi16(*(x0), cpack(dy1r, dy1i));
+}
+
+static inline void ibfly4_256(simde__m256i *x0,
+                              simde__m256i *x1,
+                              simde__m256i *x2,
+                              simde__m256i *x3,
+                              simde__m256i *y0,
+                              simde__m256i *y1,
+                              simde__m256i *y2,
+                              simde__m256i *y3,
+                              simde__m256i *tw1,
+                              simde__m256i *tw2,
+                              simde__m256i *tw3) __attribute__((always_inline))
+{
+  simde__m256i x1r_2, x1i_2, x2r_2, x2i_2, x3r_2, x3i_2, dy0r, dy0i, dy1r, dy1i, dy2r, dy2i, dy3r, dy3i;
 
   cmultc_256(*(x1),*(tw1),&x1r_2,&x1i_2);
   cmultc_256(*(x2),*(tw2),&x2r_2,&x2i_2);
@@ -1164,90 +854,93 @@ static inline void ibfly4_256(__m256i *x0,__m256i *x1,__m256i *x2,__m256i *x3,
   *(y1) = simde_mm256_add_epi16(*(x0),cpack_256(dy1r,dy1i));
 }
 
-#elif defined(__arm__) || defined(__aarch64__)
-#if 0
-static inline void ibfly4(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,int16x8_t *x3,
-                          int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,int16x8_t *y3,
-                          int16x8_t *tw1,int16x8_t *tw2,int16x8_t *tw3)__attribute__((always_inline));
-
-static inline void ibfly4(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,int16x8_t *x3,
-                          int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,int16x8_t *y3,
-                          int16x8_t *tw1,int16x8_t *tw2,int16x8_t *tw3)
-{
-
-  int32x4_t x1r_2,x1i_2,x2r_2,x2i_2,x3r_2,x3i_2,dy0r,dy0i,dy1r,dy1i,dy2r,dy2i,dy3r,dy3i;
-
-
-  cmultc(*(x1),*(tw1),&x1r_2,&x1i_2);
-  cmultc(*(x2),*(tw2),&x2r_2,&x2i_2);
-  cmultc(*(x3),*(tw3),&x3r_2,&x3i_2);
-
-  dy0r  = vqaddq_s32(x1r_2,vqaddq_s32(x2r_2,x3r_2));
-  dy0i  = vqaddq_s32(x1i_2,vqaddq_s32(x2i_2,x3i_2));
-  *(y0) = vqaddq_s16(*(x0),cpack(dy0r,dy0i));
-  dy3r  = vqsubq_s32(x1i_2,vqaddq_s32(x2r_2,x3i_2));
-  dy3i  = vqsubq_s32(vqsubq_s32(x3r_2,x2i_2),x1r_2);
-  *(y3) = vqaddq_s16(*(x0),cpack(dy3r,dy3i));
-  dy2r  = vqsubq_s32(vqsubq_s32(x2r_2,x3r_2),x1r_2);
-  dy2i  = vqsubq_s32(vqsubq_s32(x2i_2,x3i_2),x1i_2);
-  *(y2) = vqaddq_s16(*(x0),cpack(dy2r,dy2i));
-  dy1r  = vqsubq_s32(vqsubq_s32(x3i_2,x2r_2),x1i_2);
-  dy1i  = vqsubq_s32(x1r_2,vqaddq_s32(x2i_2,x3r_2));
-  *(y1) = vqaddq_s16(*(x0),cpack(dy1r,dy1i));
-}
-#endif
-#endif // defined(__x86_64__) || defined(__i386__)
-
-#if defined(__x86_64__) || defined(__i386__)
-static inline void bfly4_tw1(__m128i *x0,__m128i *x1,__m128i *x2,__m128i *x3,
-                             __m128i *y0,__m128i *y1,__m128i *y2,__m128i *y3)__attribute__((always_inline));
-
-static inline void bfly4_tw1(__m128i *x0,__m128i *x1,__m128i *x2,__m128i *x3,
-                             __m128i *y0,__m128i *y1,__m128i *y2,__m128i *y3)
-{
-  register __m128i x1_flip,x3_flip,x02t,x13t;
-  register __m128i complex_shuffle = _mm_set_epi8(13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2);
-
-  x02t    = _mm_adds_epi16(*(x0),*(x2));
-  x13t    = _mm_adds_epi16(*(x1),*(x3));
-  *(y0)   = _mm_adds_epi16(x02t,x13t);
-  *(y2)   = _mm_subs_epi16(x02t,x13t);
-  x1_flip = _mm_sign_epi16(*(x1),*(__m128i*)conjugatedft);
-  x1_flip = _mm_shuffle_epi8(x1_flip,complex_shuffle);
-  x3_flip = _mm_sign_epi16(*(x3),*(__m128i*)conjugatedft);
-  x3_flip = _mm_shuffle_epi8(x3_flip,complex_shuffle);
-  x02t    = _mm_subs_epi16(*(x0),*(x2));
-  x13t    = _mm_subs_epi16(x1_flip,x3_flip);
-  *(y1)   = _mm_adds_epi16(x02t,x13t);  // x0 + x1f - x2 - x3f
-  *(y3)   = _mm_subs_epi16(x02t,x13t);  // x0 - x1f - x2 + x3f
+static inline void bfly4_tw1(simde__m128i *x0,
+                             simde__m128i *x1,
+                             simde__m128i *x2,
+                             simde__m128i *x3,
+                             simde__m128i *y0,
+                             simde__m128i *y1,
+                             simde__m128i *y2,
+                             simde__m128i *y3) __attribute__((always_inline));
+{
+  register simde__m128i x1_flip, x3_flip, x02t, x13t;
+  register simde__m128i complex_shuffle = simde_mm_set_epi8(13, 12, 15, 14, 9, 8, 11, 10, 5, 4, 7, 6, 1, 0, 3, 2);
+
+  x02t = simde_mm_adds_epi16(*(x0), *(x2));
+  x13t = simde_mm_adds_epi16(*(x1), *(x3));
+  *(y0) = simde_mm_adds_epi16(x02t, x13t);
+  *(y2) = simde_mm_subs_epi16(x02t, x13t);
+  x1_flip = simde_mm_sign_epi16(*(x1), *(simde__m128i *)conjugatedft);
+  x1_flip = simde_mm_shuffle_epi8(x1_flip, complex_shuffle);
+  x3_flip = simde_mm_sign_epi16(*(x3), *(simde__m128i *)conjugatedft);
+  x3_flip = simde_mm_shuffle_epi8(x3_flip, complex_shuffle);
+  x02t = simde_mm_subs_epi16(*(x0), *(x2));
+  x13t = simde_mm_subs_epi16(x1_flip, x3_flip);
+  *(y1) = simde_mm_adds_epi16(x02t, x13t); // x0 + x1f - x2 - x3f
+  *(y3) = simde_mm_subs_epi16(x02t, x13t); // x0 - x1f - x2 + x3f
 
   /*
-  *(y0) = _mm_adds_epi16(*(x0),_mm_adds_epi16(*(x1),_mm_adds_epi16(*(x2),*(x3))));
-  x1_flip = _mm_sign_epi16(*(x1),*(__m128i*)conjugatedft);
-  x1_flip = _mm_shuffle_epi8(x1_flip,_mm_set_epi8(13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2));
-  x3_flip = _mm_sign_epi16(*(x3),*(__m128i*)conjugatedft);
-  x3_flip = _mm_shuffle_epi8(x3_flip,_mm_set_epi8(13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2));
-  *(y1)   = _mm_adds_epi16(*(x0),_mm_subs_epi16(x1_flip,_mm_adds_epi16(*(x2),x3_flip)));
-  *(y2)   = _mm_subs_epi16(*(x0),_mm_subs_epi16(*(x1),_mm_subs_epi16(*(x2),*(x3))));
-  *(y3)   = _mm_subs_epi16(*(x0),_mm_adds_epi16(x1_flip,_mm_subs_epi16(*(x2),x3_flip)));
+  *(y0) = simde_mm_adds_epi16(*(x0),simde_mm_adds_epi16(*(x1),simde_mm_adds_epi16(*(x2),*(x3))));
+  x1_flip = simde_mm_sign_epi16(*(x1),*(simde__m128i*)conjugatedft);
+  x1_flip = simde_mm_shuffle_epi8(x1_flip,simde_mm_set_epi8(13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2));
+  x3_flip = simde_mm_sign_epi16(*(x3),*(simde__m128i*)conjugatedft);
+  x3_flip = simde_mm_shuffle_epi8(x3_flip,simde_mm_set_epi8(13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2));
+  *(y1)   = simde_mm_adds_epi16(*(x0),simde_mm_subs_epi16(x1_flip,simde_mm_adds_epi16(*(x2),x3_flip)));
+  *(y2)   = simde_mm_subs_epi16(*(x0),simde_mm_subs_epi16(*(x1),simde_mm_subs_epi16(*(x2),*(x3))));
+  *(y3)   = simde_mm_subs_epi16(*(x0),simde_mm_adds_epi16(x1_flip,simde_mm_subs_epi16(*(x2),x3_flip)));
   */
 }
-static inline void bfly4_tw1_256(__m256i *x0,__m256i *x1,__m256i *x2,__m256i *x3,
-				 __m256i *y0,__m256i *y1,__m256i *y2,__m256i *y3)__attribute__((always_inline));
 
-static inline void bfly4_tw1_256(__m256i *x0,__m256i *x1,__m256i *x2,__m256i *x3,
-				 __m256i *y0,__m256i *y1,__m256i *y2,__m256i *y3)
-{
-  register __m256i x1_flip,x3_flip,x02t,x13t;
-  register __m256i complex_shuffle = simde_mm256_set_epi8(29,28,31,30,25,24,27,26,21,20,23,22,17,16,19,18,13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2);
+static inline void bfly4_tw1_256(simde__m256i *x0,
+                                 simde__m256i *x1,
+                                 simde__m256i *x2,
+                                 simde__m256i *x3,
+                                 simde__m256i *y0,
+                                 simde__m256i *y1,
+                                 simde__m256i *y2,
+                                 simde__m256i *y3) __attribute__((always_inline))
+{
+  register simde__m256i x1_flip, x3_flip, x02t, x13t;
+  register simde__m256i complex_shuffle = simde_mm256_set_epi8(29,
+                                                               28,
+                                                               31,
+                                                               30,
+                                                               25,
+                                                               24,
+                                                               27,
+                                                               26,
+                                                               21,
+                                                               20,
+                                                               23,
+                                                               22,
+                                                               17,
+                                                               16,
+                                                               19,
+                                                               18,
+                                                               13,
+                                                               12,
+                                                               15,
+                                                               14,
+                                                               9,
+                                                               8,
+                                                               11,
+                                                               10,
+                                                               5,
+                                                               4,
+                                                               7,
+                                                               6,
+                                                               1,
+                                                               0,
+                                                               3,
+                                                               2);
 
   x02t    = simde_mm256_adds_epi16(*(x0),*(x2));
   x13t    = simde_mm256_adds_epi16(*(x1),*(x3));
   *(y0)   = simde_mm256_adds_epi16(x02t,x13t);
   *(y2)   = simde_mm256_subs_epi16(x02t,x13t);
-  x1_flip = simde_mm256_sign_epi16(*(x1),*(__m256i*)conjugatedft);
+  x1_flip = simde_mm256_sign_epi16(*(x1), *(simde__m256i *)conjugatedft);
   x1_flip = simde_mm256_shuffle_epi8(x1_flip,complex_shuffle);
-  x3_flip = simde_mm256_sign_epi16(*(x3),*(__m256i*)conjugatedft);
+  x3_flip = simde_mm256_sign_epi16(*(x3), *(simde__m256i *)conjugatedft);
   x3_flip = simde_mm256_shuffle_epi8(x3_flip,complex_shuffle);
   x02t    = simde_mm256_subs_epi16(*(x0),*(x2));
   x13t    = simde_mm256_subs_epi16(x1_flip,x3_flip);
@@ -1255,88 +948,49 @@ static inline void bfly4_tw1_256(__m256i *x0,__m256i *x1,__m256i *x2,__m256i *x3
   *(y3)   = simde_mm256_subs_epi16(x02t,x13t);  // x0 - x1f - x2 + x3f
 }
 
-#elif defined(__arm__) || defined(__aarch64__)
-static inline void bfly4_tw1(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,int16x8_t *x3,
-                             int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,int16x8_t *y3)__attribute__((always_inline));
-
-static inline void bfly4_tw1(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,int16x8_t *x3,
-                             int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,int16x8_t *y3)
-{
-
-  register int16x8_t x1_flip,x3_flip;
-
-  *(y0) = vqaddq_s16(*(x0),vqaddq_s16(*(x1),vqaddq_s16(*(x2),*(x3))));
-  x1_flip = vrev32q_s16(vmulq_s16(*(x1),*(int16x8_t*)conjugatedft));
-  x3_flip = vrev32q_s16(vmulq_s16(*(x3),*(int16x8_t*)conjugatedft));
-  *(y1)   = vqaddq_s16(*(x0),vqsubq_s16(x1_flip,vqaddq_s16(*(x2),x3_flip)));
-  *(y2)   = vqsubq_s16(*(x0),vqsubq_s16(*(x1),vqsubq_s16(*(x2),*(x3))));
-  *(y3)   = vqsubq_s16(*(x0),vqaddq_s16(x1_flip,vqsubq_s16(*(x2),x3_flip)));
-}
-#endif // defined(__x86_64__) || defined(__i386__)
-
-#if defined(__x86_64__) || defined(__i386__)
-#if 0
-static inline void ibfly4_tw1(__m128i *x0,__m128i *x1,__m128i *x2,__m128i *x3,
-                              __m128i *y0,__m128i *y1,__m128i *y2,__m128i *y3)__attribute__((always_inline));
-
-static inline void ibfly4_tw1(__m128i *x0,__m128i *x1,__m128i *x2,__m128i *x3,
-                              __m128i *y0,__m128i *y1,__m128i *y2,__m128i *y3)
-{
-
-  register __m128i x1_flip,x3_flip;
-
-  *(y0) = _mm_adds_epi16(*(x0),_mm_adds_epi16(*(x1),_mm_adds_epi16(*(x2),*(x3))));
-
-  x1_flip = _mm_sign_epi16(*(x1),*(__m128i*)conjugatedft);
-  //  x1_flip = _mm_shufflelo_epi16(x1_flip,_MM_SHUFFLE(2,3,0,1));
-  //  x1_flip = _mm_shufflehi_epi16(x1_flip,_MM_SHUFFLE(2,3,0,1));
-  x1_flip = _mm_shuffle_epi8(x1_flip,_mm_set_epi8(13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2));
-  x3_flip = _mm_sign_epi16(*(x3),*(__m128i*)conjugatedft);
-  //  x3_flip = _mm_shufflelo_epi16(x3_flip,_MM_SHUFFLE(2,3,0,1));
-  //  x3_flip = _mm_shufflehi_epi16(x3_flip,_MM_SHUFFLE(2,3,0,1));
-  x3_flip = _mm_shuffle_epi8(x3_flip,_mm_set_epi8(13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2));
-  *(y1)   = _mm_subs_epi16(*(x0),_mm_adds_epi16(x1_flip,_mm_subs_epi16(*(x2),x3_flip)));
-  *(y2)   = _mm_subs_epi16(*(x0),_mm_subs_epi16(*(x1),_mm_subs_epi16(*(x2),*(x3))));
-  *(y3)   = _mm_adds_epi16(*(x0),_mm_subs_epi16(x1_flip,_mm_adds_epi16(*(x2),x3_flip)));
-}
-#endif
-
-#elif defined(__arm__) || defined(__aarch64__)
-#if 0
-static inline void ibfly4_tw1(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,int16x8_t *x3,
-			      int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,int16x8_t *y3)__attribute__((always_inline));
-
-static inline void ibfly4_tw1(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,int16x8_t *x3,
-			      int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,int16x8_t *y3)
-{
-
-  register int16x8_t x1_flip,x3_flip;
-
-  *(y0) = vqaddq_s16(*(x0),vqaddq_s16(*(x1),vqaddq_s16(*(x2),*(x3))));
-  x1_flip = vrev32q_s16(vmulq_s16(*(x1),*(int16x8_t*)conjugatedft));
-  x3_flip = vrev32q_s16(vmulq_s16(*(x3),*(int16x8_t*)conjugatedft));
-  *(y1)   = vqsubq_s16(*(x0),vqaddq_s16(x1_flip,vqsubq_s16(*(x2),x3_flip)));
-  *(y2)   = vqsubq_s16(*(x0),vqsubq_s16(*(x1),vqsubq_s16(*(x2),*(x3))));
-  *(y3)   = vqaddq_s16(*(x0),vqsubq_s16(x1_flip,vqaddq_s16(*(x2),x3_flip)));
-}
-#endif
-#endif // defined(__x86_64__) || defined(__i386__)
-
-#if defined(__x86_64__) || defined(__i386__)
-#if 0
-static inline void bfly4_16(__m128i *x0,__m128i *x1,__m128i *x2,__m128i *x3,
-                            __m128i *y0,__m128i *y1,__m128i *y2,__m128i *y3,
-                            __m128i *tw1,__m128i *tw2,__m128i *tw3,
-                            __m128i *tw1b,__m128i *tw2b,__m128i *tw3b)__attribute__((always_inline));
-
-static inline void bfly4_16(__m128i *x0,__m128i *x1,__m128i *x2,__m128i *x3,
-                            __m128i *y0,__m128i *y1,__m128i *y2,__m128i *y3,
-                            __m128i *tw1,__m128i *tw2,__m128i *tw3,
-                            __m128i *tw1b,__m128i *tw2b,__m128i *tw3b)
-{
-
-  register __m128i x1t,x2t,x3t,x02t,x13t;
-  register __m128i x1_flip,x3_flip;
+static inline void ibfly4_tw1(simde__m128i *x0,
+                              simde__m128i *x1,
+                              simde__m128i *x2,
+                              simde__m128i *x3,
+                              simde__m128i *y0,
+                              simde__m128i *y1,
+                              simde__m128i *y2,
+                              simde__m128i *y3) __attribute__((always_inline))
+{
+  register simde__m128i x1_flip, x3_flip;
+
+  *(y0) = simde_mm_adds_epi16(*(x0), simde_mm_adds_epi16(*(x1), simde_mm_adds_epi16(*(x2), *(x3))));
+
+  x1_flip = simde_mm_sign_epi16(*(x1), *(simde__m128i *)conjugatedft);
+  //  x1_flip = simde_mm_shufflelo_epi16(x1_flip, SIMDE_MM_SHUFFLE(2,3,0,1));
+  //  x1_flip = simde_mm_shufflehi_epi16(x1_flip, SIMDE_MM_SHUFFLE(2,3,0,1));
+  x1_flip = simde_mm_shuffle_epi8(x1_flip, simde_mm_set_epi8(13, 12, 15, 14, 9, 8, 11, 10, 5, 4, 7, 6, 1, 0, 3, 2));
+  x3_flip = simde_mm_sign_epi16(*(x3), *(simde__m128i *)conjugatedft);
+  //  x3_flip = simde_mm_shufflelo_epi16(x3_flip, SIMDE_MM_SHUFFLE(2,3,0,1));
+  //  x3_flip = simde_mm_shufflehi_epi16(x3_flip, SIMDE_MM_SHUFFLE(2,3,0,1));
+  x3_flip = simde_mm_shuffle_epi8(x3_flip, simde_mm_set_epi8(13, 12, 15, 14, 9, 8, 11, 10, 5, 4, 7, 6, 1, 0, 3, 2));
+  *(y1) = simde_mm_subs_epi16(*(x0), simde_mm_adds_epi16(x1_flip, simde_mm_subs_epi16(*(x2), x3_flip)));
+  *(y2) = simde_mm_subs_epi16(*(x0), simde_mm_subs_epi16(*(x1), simde_mm_subs_epi16(*(x2), *(x3))));
+  *(y3) = simde_mm_adds_epi16(*(x0), simde_mm_subs_epi16(x1_flip, simde_mm_adds_epi16(*(x2), x3_flip)));
+}
+
+static inline void bfly4_16(simde__m128i *x0,
+                            simde__m128i *x1,
+                            simde__m128i *x2,
+                            simde__m128i *x3,
+                            simde__m128i *y0,
+                            simde__m128i *y1,
+                            simde__m128i *y2,
+                            simde__m128i *y3,
+                            simde__m128i *tw1,
+                            simde__m128i *tw2,
+                            simde__m128i *tw3,
+                            simde__m128i *tw1b,
+                            simde__m128i *tw2b,
+                            simde__m128i *tw3b) __attribute__((always_inline))
+{
+  register simde__m128i x1t, x2t, x3t, x02t, x13t;
+  register simde__m128i x1_flip, x3_flip;
 
   x1t = packed_cmult2(*(x1),*(tw1),*(tw1b));
   x2t = packed_cmult2(*(x2),*(tw2),*(tw2b));
@@ -1344,48 +998,82 @@ static inline void bfly4_16(__m128i *x0,__m128i *x1,__m128i *x2,__m128i *x3,
 
 
   //  bfly4_tw1(x0,&x1t,&x2t,&x3t,y0,y1,y2,y3);
-  x02t  = _mm_adds_epi16(*(x0),x2t);
-  x13t  = _mm_adds_epi16(x1t,x3t);
+  x02t = simde_mm_adds_epi16(*(x0), x2t);
+  x13t = simde_mm_adds_epi16(x1t, x3t);
   /*
-  *(y0) = _mm_adds_epi16(*(x0),_mm_adds_epi16(x1t,_mm_adds_epi16(x2t,x3t)));
-  *(y2)   = _mm_subs_epi16(*(x0),_mm_subs_epi16(x1t,_mm_subs_epi16(x2t,x3t)));
-  */
-  *(y0)   = _mm_adds_epi16(x02t,x13t);
-  *(y2)   = _mm_subs_epi16(x02t,x13t);
-
-  x1_flip = _mm_sign_epi16(x1t,*(__m128i*)conjugatedft);
-  //  x1_flip = _mm_shufflelo_epi16(x1_flip,_MM_SHUFFLE(2,3,0,1));
-  //  x1_flip = _mm_shufflehi_epi16(x1_flip,_MM_SHUFFLE(2,3,0,1));
-  x1_flip = _mm_shuffle_epi8(x1_flip,_mm_set_epi8(13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2));
-  x3_flip = _mm_sign_epi16(x3t,*(__m128i*)conjugatedft);
-  //  x3_flip = _mm_shufflelo_epi16(x3_flip,_MM_SHUFFLE(2,3,0,1));
-  //  x3_flip = _mm_shufflehi_epi16(x3_flip,_MM_SHUFFLE(2,3,0,1));
-  x3_flip = _mm_shuffle_epi8(x3_flip,_mm_set_epi8(13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2));
-  x02t  = _mm_subs_epi16(*(x0),x2t);
-  x13t  = _mm_subs_epi16(x1_flip,x3_flip);
+   *(y0) = simde_mm_adds_epi16(*(x0),simde_mm_adds_epi16(x1t,simde_mm_adds_epi16(x2t,x3t)));
+   *(y2)   = simde_mm_subs_epi16(*(x0),simde_mm_subs_epi16(x1t,simde_mm_subs_epi16(x2t,x3t)));
+   */
+  *(y0) = simde_mm_adds_epi16(x02t, x13t);
+  *(y2) = simde_mm_subs_epi16(x02t, x13t);
+
+  x1_flip = simde_mm_sign_epi16(x1t, *(simde__m128i *)conjugatedft);
+  //  x1_flip = simde_mm_shufflelo_epi16(x1_flip, SIMDE_MM_SHUFFLE(2,3,0,1));
+  //  x1_flip = simde_mm_shufflehi_epi16(x1_flip, SIMDE_MM_SHUFFLE(2,3,0,1));
+  x1_flip = simde_mm_shuffle_epi8(x1_flip, simde_mm_set_epi8(13, 12, 15, 14, 9, 8, 11, 10, 5, 4, 7, 6, 1, 0, 3, 2));
+  x3_flip = simde_mm_sign_epi16(x3t, *(simde__m128i *)conjugatedft);
+  //  x3_flip = simde_mm_shufflelo_epi16(x3_flip, SIMDE_MM_SHUFFLE(2,3,0,1));
+  //  x3_flip = simde_mm_shufflehi_epi16(x3_flip, SIMDE_MM_SHUFFLE(2,3,0,1));
+  x3_flip = simde_mm_shuffle_epi8(x3_flip, simde_mm_set_epi8(13, 12, 15, 14, 9, 8, 11, 10, 5, 4, 7, 6, 1, 0, 3, 2));
+  x02t = simde_mm_subs_epi16(*(x0), x2t);
+  x13t = simde_mm_subs_epi16(x1_flip, x3_flip);
   /*
-  *(y1)   = _mm_adds_epi16(*(x0),_mm_subs_epi16(x1_flip,_mm_adds_epi16(x2t,x3_flip)));  // x0 + x1f - x2 - x3f
-  *(y3)   = _mm_subs_epi16(*(x0),_mm_adds_epi16(x1_flip,_mm_subs_epi16(x2t,x3_flip)));  // x0 - x1f - x2 + x3f
-  */
-  *(y1)   = _mm_adds_epi16(x02t,x13t);  // x0 + x1f - x2 - x3f
-  *(y3)   = _mm_subs_epi16(x02t,x13t);  // x0 - x1f - x2 + x3f
-
-}
-#endif
-static inline void bfly4_16_256(__m256i *x0,__m256i *x1,__m256i *x2,__m256i *x3,
-				__m256i *y0,__m256i *y1,__m256i *y2,__m256i *y3,
-				__m256i *tw1,__m256i *tw2,__m256i *tw3,
-				__m256i *tw1b,__m256i *tw2b,__m256i *tw3b)__attribute__((always_inline));
-
-static inline void bfly4_16_256(__m256i *x0,__m256i *x1,__m256i *x2,__m256i *x3,
-				__m256i *y0,__m256i *y1,__m256i *y2,__m256i *y3,
-				__m256i *tw1,__m256i *tw2,__m256i *tw3,
-				__m256i *tw1b,__m256i *tw2b,__m256i *tw3b)
-{
-
-  register __m256i x1t,x2t,x3t,x02t,x13t;
-  register __m256i x1_flip,x3_flip;
-  register __m256i complex_shuffle = simde_mm256_set_epi8(29,28,31,30,25,24,27,26,21,20,23,22,17,16,19,18,13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2);
+   *(y1)   = simde_mm_adds_epi16(*(x0),simde_mm_subs_epi16(x1_flip,simde_mm_adds_epi16(x2t,x3_flip)));  // x0 + x1f - x2 - x3f
+   *(y3)   = simde_mm_subs_epi16(*(x0),simde_mm_adds_epi16(x1_flip,simde_mm_subs_epi16(x2t,x3_flip)));  // x0 - x1f - x2 + x3f
+   */
+  *(y1) = simde_mm_adds_epi16(x02t, x13t); // x0 + x1f - x2 - x3f
+  *(y3) = simde_mm_subs_epi16(x02t, x13t); // x0 - x1f - x2 + x3f
+}
+
+static inline void bfly4_16_256(simde__m256i *x0,
+                                simde__m256i *x1,
+                                simde__m256i *x2,
+                                simde__m256i *x3,
+                                simde__m256i *y0,
+                                simde__m256i *y1,
+                                simde__m256i *y2,
+                                simde__m256i *y3,
+                                simde__m256i *tw1,
+                                simde__m256i *tw2,
+                                simde__m256i *tw3,
+                                simde__m256i *tw1b,
+                                simde__m256i *tw2b,
+                                simde__m256i *tw3b) __attribute__((always_inline))
+{
+  register simde__m256i x1t, x2t, x3t, x02t, x13t;
+  register simde__m256i x1_flip, x3_flip;
+  register simde__m256i complex_shuffle = simde_mm256_set_epi8(29,
+                                                               28,
+                                                               31,
+                                                               30,
+                                                               25,
+                                                               24,
+                                                               27,
+                                                               26,
+                                                               21,
+                                                               20,
+                                                               23,
+                                                               22,
+                                                               17,
+                                                               16,
+                                                               19,
+                                                               18,
+                                                               13,
+                                                               12,
+                                                               15,
+                                                               14,
+                                                               9,
+                                                               8,
+                                                               11,
+                                                               10,
+                                                               5,
+                                                               4,
+                                                               7,
+                                                               6,
+                                                               1,
+                                                               0,
+                                                               3,
+                                                               2);
 
   // each input xi is assumed to be to consecutive vectors xi0 xi1 on which to perform the 8 butterflies
   // [xi00 xi01 xi02 xi03 xi10 xi20 xi30 xi40]
@@ -1400,67 +1088,33 @@ static inline void bfly4_16_256(__m256i *x0,__m256i *x1,__m256i *x2,__m256i *x3,
   *(y0)   = simde_mm256_adds_epi16(x02t,x13t);
   *(y2)   = simde_mm256_subs_epi16(x02t,x13t);
 
-  x1_flip = simde_mm256_sign_epi16(x1t,*(__m256i*)conjugatedft);
+  x1_flip = simde_mm256_sign_epi16(x1t, *(simde__m256i *)conjugatedft);
   x1_flip = simde_mm256_shuffle_epi8(x1_flip,complex_shuffle);
-  x3_flip = simde_mm256_sign_epi16(x3t,*(__m256i*)conjugatedft);
+  x3_flip = simde_mm256_sign_epi16(x3t, *(simde__m256i *)conjugatedft);
   x3_flip = simde_mm256_shuffle_epi8(x3_flip,complex_shuffle);
   x02t  = simde_mm256_subs_epi16(*(x0),x2t);
   x13t  = simde_mm256_subs_epi16(x1_flip,x3_flip);
   *(y1)   = simde_mm256_adds_epi16(x02t,x13t);  // x0 + x1f - x2 - x3f
-  *(y3)   = simde_mm256_subs_epi16(x02t,x13t);  // x0 - x1f - x2 + x3f
-
-}
-
-#elif defined(__arm__) || defined(__aarch64__)
-
-static inline void bfly4_16(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,int16x8_t *x3,
-                            int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,int16x8_t *y3,
-                            int16x8_t *tw1,int16x8_t *tw2,int16x8_t *tw3,
-                            int16x8_t *tw1b,int16x8_t *tw2b,int16x8_t *tw3b)__attribute__((always_inline));
-
-static inline void bfly4_16(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,int16x8_t *x3,
-                            int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,int16x8_t *y3,
-                            int16x8_t *tw1,int16x8_t *tw2,int16x8_t *tw3,
-                            int16x8_t *tw1b,int16x8_t *tw2b,int16x8_t *tw3b)
-{
-
-  register int16x8_t x1t,x2t,x3t,x02t,x13t;
-  register int16x8_t x1_flip,x3_flip;
-
-  x1t = packed_cmult2(*(x1),*(tw1),*(tw1b));
-  x2t = packed_cmult2(*(x2),*(tw2),*(tw2b));
-  x3t = packed_cmult2(*(x3),*(tw3),*(tw3b));
-
-
-
-  x02t  = vqaddq_s16(*(x0),x2t);
-  x13t  = vqaddq_s16(x1t,x3t);
-  *(y0)   = vqaddq_s16(x02t,x13t);
-  *(y2)   = vqsubq_s16(x02t,x13t);
-  x1_flip = vrev32q_s16(vmulq_s16(x1t,*(int16x8_t*)conjugatedft));
-  x3_flip = vrev32q_s16(vmulq_s16(x3t,*(int16x8_t*)conjugatedft));
-  x02t  = vqsubq_s16(*(x0),x2t);
-  x13t  = vqsubq_s16(x1_flip,x3_flip);
-  *(y1)   = vqaddq_s16(x02t,x13t);  // x0 + x1f - x2 - x3f
-  *(y3)   = vqsubq_s16(x02t,x13t);  // x0 - x1f - x2 + x3f
-}
-#endif // defined(__x86_64__) || defined(__i386__)
-
-#if defined(__x86_64__) || defined(__i386__)
-#if 0
-static inline void ibfly4_16(__m128i *x0,__m128i *x1,__m128i *x2,__m128i *x3,
-                             __m128i *y0,__m128i *y1,__m128i *y2,__m128i *y3,
-                             __m128i *tw1,__m128i *tw2,__m128i *tw3,
-                             __m128i *tw1b,__m128i *tw2b,__m128i *tw3b)__attribute__((always_inline));
-
-static inline void ibfly4_16(__m128i *x0,__m128i *x1,__m128i *x2,__m128i *x3,
-                             __m128i *y0,__m128i *y1,__m128i *y2,__m128i *y3,
-                             __m128i *tw1,__m128i *tw2,__m128i *tw3,
-                             __m128i *tw1b,__m128i *tw2b,__m128i *tw3b)
-{
-
-  register __m128i x1t,x2t,x3t,x02t,x13t;
-  register __m128i x1_flip,x3_flip;
+  *(y3) = simde_mm256_subs_epi16(x02t, x13t); // x0 - x1f - x2 + x3f
+}
+
+static inline void ibfly4_16(simde__m128i *x0,
+                             simde__m128i *x1,
+                             simde__m128i *x2,
+                             simde__m128i *x3,
+                             simde__m128i *y0,
+                             simde__m128i *y1,
+                             simde__m128i *y2,
+                             simde__m128i *y3,
+                             simde__m128i *tw1,
+                             simde__m128i *tw2,
+                             simde__m128i *tw3,
+                             simde__m128i *tw1b,
+                             simde__m128i *tw2b,
+                             simde__m128i *tw3b) __attribute__((always_inline))
+{
+  register simde__m128i x1t, x2t, x3t, x02t, x13t;
+  register simde__m128i x1_flip, x3_flip;
 
   x1t = packed_cmult2(*(x1),*(tw1),*(tw1b));
   x2t = packed_cmult2(*(x2),*(tw2),*(tw2b));
@@ -1468,48 +1122,82 @@ static inline void ibfly4_16(__m128i *x0,__m128i *x1,__m128i *x2,__m128i *x3,
 
 
   //  bfly4_tw1(x0,&x1t,&x2t,&x3t,y0,y1,y2,y3);
-  x02t  = _mm_adds_epi16(*(x0),x2t);
-  x13t  = _mm_adds_epi16(x1t,x3t);
+  x02t = simde_mm_adds_epi16(*(x0), x2t);
+  x13t = simde_mm_adds_epi16(x1t, x3t);
   /*
-  *(y0) = _mm_adds_epi16(*(x0),_mm_adds_epi16(x1t,_mm_adds_epi16(x2t,x3t)));
-  *(y2)   = _mm_subs_epi16(*(x0),_mm_subs_epi16(x1t,_mm_subs_epi16(x2t,x3t)));
-  */
-  *(y0)   = _mm_adds_epi16(x02t,x13t);
-  *(y2)   = _mm_subs_epi16(x02t,x13t);
-
-  x1_flip = _mm_sign_epi16(x1t,*(__m128i*)conjugatedft);
-  //  x1_flip = _mm_shufflelo_epi16(x1_flip,_MM_SHUFFLE(2,3,0,1));
-  //  x1_flip = _mm_shufflehi_epi16(x1_flip,_MM_SHUFFLE(2,3,0,1));
-  x1_flip = _mm_shuffle_epi8(x1_flip,_mm_set_epi8(13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2));
-  x3_flip = _mm_sign_epi16(x3t,*(__m128i*)conjugatedft);
-  //  x3_flip = _mm_shufflelo_epi16(x3_flip,_MM_SHUFFLE(2,3,0,1));
-  //  x3_flip = _mm_shufflehi_epi16(x3_flip,_MM_SHUFFLE(2,3,0,1));
-  x3_flip = _mm_shuffle_epi8(x3_flip,_mm_set_epi8(13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2));
-  x02t  = _mm_subs_epi16(*(x0),x2t);
-  x13t  = _mm_subs_epi16(x1_flip,x3_flip);
+   *(y0) = simde_mm_adds_epi16(*(x0),simde_mm_adds_epi16(x1t,simde_mm_adds_epi16(x2t,x3t)));
+   *(y2)   = simde_mm_subs_epi16(*(x0),simde_mm_subs_epi16(x1t,simde_mm_subs_epi16(x2t,x3t)));
+   */
+  *(y0) = simde_mm_adds_epi16(x02t, x13t);
+  *(y2) = simde_mm_subs_epi16(x02t, x13t);
+
+  x1_flip = simde_mm_sign_epi16(x1t, *(simde__m128i *)conjugatedft);
+  //  x1_flip = simde_mm_shufflelo_epi16(x1_flip, SIMDE_MM_SHUFFLE(2,3,0,1));
+  //  x1_flip = simde_mm_shufflehi_epi16(x1_flip, SIMDE_MM_SHUFFLE(2,3,0,1));
+  x1_flip = simde_mm_shuffle_epi8(x1_flip, simde_mm_set_epi8(13, 12, 15, 14, 9, 8, 11, 10, 5, 4, 7, 6, 1, 0, 3, 2));
+  x3_flip = simde_mm_sign_epi16(x3t, *(simde__m128i *)conjugatedft);
+  //  x3_flip = simde_mm_shufflelo_epi16(x3_flip, SIMDE_MM_SHUFFLE(2,3,0,1));
+  //  x3_flip = simde_mm_shufflehi_epi16(x3_flip, SIMDE_MM_SHUFFLE(2,3,0,1));
+  x3_flip = simde_mm_shuffle_epi8(x3_flip, simde_mm_set_epi8(13, 12, 15, 14, 9, 8, 11, 10, 5, 4, 7, 6, 1, 0, 3, 2));
+  x02t = simde_mm_subs_epi16(*(x0), x2t);
+  x13t = simde_mm_subs_epi16(x1_flip, x3_flip);
   /*
-  *(y1)   = _mm_adds_epi16(*(x0),_mm_subs_epi16(x1_flip,_mm_adds_epi16(x2t,x3_flip)));  // x0 + x1f - x2 - x3f
-  *(y3)   = _mm_subs_epi16(*(x0),_mm_adds_epi16(x1_flip,_mm_subs_epi16(x2t,x3_flip)));  // x0 - x1f - x2 + x3f
-  */
-  *(y3)   = _mm_adds_epi16(x02t,x13t);  // x0 + x1f - x2 - x3f
-  *(y1)   = _mm_subs_epi16(x02t,x13t);  // x0 - x1f - x2 + x3f
-
-}
-#endif
-static inline void ibfly4_16_256(__m256i *x0,__m256i *x1,__m256i *x2,__m256i *x3,
-				 __m256i *y0,__m256i *y1,__m256i *y2,__m256i *y3,
-				 __m256i *tw1,__m256i *tw2,__m256i *tw3,
-				 __m256i *tw1b,__m256i *tw2b,__m256i *tw3b)__attribute__((always_inline));
-
-static inline void ibfly4_16_256(__m256i *x0,__m256i *x1,__m256i *x2,__m256i *x3,
-				 __m256i *y0,__m256i *y1,__m256i *y2,__m256i *y3,
-				 __m256i *tw1,__m256i *tw2,__m256i *tw3,
-				 __m256i *tw1b,__m256i *tw2b,__m256i *tw3b)
-{
-
-  register __m256i x1t,x2t,x3t,x02t,x13t;
-  register __m256i x1_flip,x3_flip;
-  register __m256i complex_shuffle = simde_mm256_set_epi8(29,28,31,30,25,24,27,26,21,20,23,22,17,16,19,18,13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2);
+   *(y1)   = simde_mm_adds_epi16(*(x0),simde_mm_subs_epi16(x1_flip,simde_mm_adds_epi16(x2t,x3_flip)));  // x0 + x1f - x2 - x3f
+   *(y3)   = simde_mm_subs_epi16(*(x0),simde_mm_adds_epi16(x1_flip,simde_mm_subs_epi16(x2t,x3_flip)));  // x0 - x1f - x2 + x3f
+   */
+  *(y3) = simde_mm_adds_epi16(x02t, x13t); // x0 + x1f - x2 - x3f
+  *(y1) = simde_mm_subs_epi16(x02t, x13t); // x0 - x1f - x2 + x3f
+}
+
+static inline void ibfly4_16_256(simde__m256i *x0,
+                                 simde__m256i *x1,
+                                 simde__m256i *x2,
+                                 simde__m256i *x3,
+                                 simde__m256i *y0,
+                                 simde__m256i *y1,
+                                 simde__m256i *y2,
+                                 simde__m256i *y3,
+                                 simde__m256i *tw1,
+                                 simde__m256i *tw2,
+                                 simde__m256i *tw3,
+                                 simde__m256i *tw1b,
+                                 simde__m256i *tw2b,
+                                 simde__m256i *tw3b) __attribute__((always_inline))
+{
+  register simde__m256i x1t, x2t, x3t, x02t, x13t;
+  register simde__m256i x1_flip, x3_flip;
+  register simde__m256i complex_shuffle = simde_mm256_set_epi8(29,
+                                                               28,
+                                                               31,
+                                                               30,
+                                                               25,
+                                                               24,
+                                                               27,
+                                                               26,
+                                                               21,
+                                                               20,
+                                                               23,
+                                                               22,
+                                                               17,
+                                                               16,
+                                                               19,
+                                                               18,
+                                                               13,
+                                                               12,
+                                                               15,
+                                                               14,
+                                                               9,
+                                                               8,
+                                                               11,
+                                                               10,
+                                                               5,
+                                                               4,
+                                                               7,
+                                                               6,
+                                                               1,
+                                                               0,
+                                                               3,
+                                                               2);
 
   // each input xi is assumed to be to consecutive vectors xi0 xi1 on which to perform the 8 butterflies
   // [xi00 xi01 xi02 xi03 xi10 xi20 xi30 xi40]
@@ -1524,114 +1212,83 @@ static inline void ibfly4_16_256(__m256i *x0,__m256i *x1,__m256i *x2,__m256i *x3
   *(y0)   = simde_mm256_adds_epi16(x02t,x13t);
   *(y2)   = simde_mm256_subs_epi16(x02t,x13t);
 
-  x1_flip = simde_mm256_sign_epi16(x1t,*(__m256i*)conjugatedft);
+  x1_flip = simde_mm256_sign_epi16(x1t, *(simde__m256i *)conjugatedft);
   x1_flip = simde_mm256_shuffle_epi8(x1_flip,complex_shuffle);
-  x3_flip = simde_mm256_sign_epi16(x3t,*(__m256i*)conjugatedft);
+  x3_flip = simde_mm256_sign_epi16(x3t, *(simde__m256i *)conjugatedft);
   x3_flip = simde_mm256_shuffle_epi8(x3_flip,complex_shuffle);
   x02t  = simde_mm256_subs_epi16(*(x0),x2t);
   x13t  = simde_mm256_subs_epi16(x1_flip,x3_flip);
   *(y3)   = simde_mm256_adds_epi16(x02t,x13t);  // x0 + x1f - x2 - x3f
-  *(y1)   = simde_mm256_subs_epi16(x02t,x13t);  // x0 - x1f - x2 + x3f
-
-}
-
-#elif defined(__arm__) || defined(__aarch64__)
-#if 0
-static inline void ibfly4_16(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,int16x8_t *x3,
-			     int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,int16x8_t *y3,
-			     int16x8_t *tw1,int16x8_t *tw2,int16x8_t *tw3,
-			     int16x8_t *tw1b,int16x8_t *tw2b,int16x8_t *tw3b)__attribute__((always_inline));
-
-static inline void ibfly4_16(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,int16x8_t *x3,
-			     int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,int16x8_t *y3,
-			     int16x8_t *tw1,int16x8_t *tw2,int16x8_t *tw3,
-			     int16x8_t *tw1b,int16x8_t *tw2b,int16x8_t *tw3b)
-{
-
-  register int16x8_t x1t,x2t,x3t,x02t,x13t;
-  register int16x8_t x1_flip,x3_flip;
-
-  x1t = packed_cmult2(*(x1),*(tw1),*(tw1b));
-  x2t = packed_cmult2(*(x2),*(tw2),*(tw2b));
-  x3t = packed_cmult2(*(x3),*(tw3),*(tw3b));
-
-  x02t    = vqaddq_s16(*(x0),x2t);
-  x13t    = vqaddq_s16(x1t,x3t);
-  *(y0)   = vqaddq_s16(x02t,x13t);
-  *(y2)   = vqsubq_s16(x02t,x13t);
-  x1_flip = vrev32q_s16(vmulq_s16(x1t,*(int16x8_t*)conjugatedft));
-  x3_flip = vrev32q_s16(vmulq_s16(x3t,*(int16x8_t*)conjugatedft));
-  x02t    = vqsubq_s16(*(x0),x2t);
-  x13t    = vqsubq_s16(x1_flip,x3_flip);
-  *(y3)   = vqaddq_s16(x02t,x13t);  // x0 - x1f - x2 + x3f
-  *(y1)   = vqsubq_s16(x02t,x13t);  // x0 + x1f - x2 - x3f
-}
-#endif
-#endif // defined(__x86_64__) || defined(__i386__)
-
-#if defined(__x86_64__) || defined(__i386__)
-static inline void bfly5(__m128i *x0, __m128i *x1, __m128i *x2, __m128i *x3,__m128i *x4,
-                         __m128i *y0, __m128i *y1, __m128i *y2, __m128i *y3,__m128i *y4,
-                         __m128i *tw1,__m128i *tw2,__m128i *tw3,__m128i *tw4)__attribute__((always_inline));
-
-static inline void bfly5(__m128i *x0, __m128i *x1, __m128i *x2, __m128i *x3,__m128i *x4,
-                         __m128i *y0, __m128i *y1, __m128i *y2, __m128i *y3,__m128i *y4,
-                         __m128i *tw1,__m128i *tw2,__m128i *tw3,__m128i *tw4)
-{
-
-
-
-  __m128i x1_2,x2_2,x3_2,x4_2,tmpre,tmpim;
+  *(y1) = simde_mm256_subs_epi16(x02t, x13t); // x0 - x1f - x2 + x3f
+}
+
+static inline void bfly5(simde__m128i *x0,
+                         simde__m128i *x1,
+                         simde__m128i *x2,
+                         simde__m128i *x3,
+                         simde__m128i *x4,
+                         simde__m128i *y0,
+                         simde__m128i *y1,
+                         simde__m128i *y2,
+                         simde__m128i *y3,
+                         simde__m128i *y4,
+                         simde__m128i *tw1,
+                         simde__m128i *tw2,
+                         simde__m128i *tw3,
+                         simde__m128i *tw4) __attribute__((always_inline))
+{
+  simde__m128i x1_2, x2_2, x3_2, x4_2, tmpre, tmpim;
 
   packed_cmult(*(x1),*(tw1),&x1_2);
   packed_cmult(*(x2),*(tw2),&x2_2);
   packed_cmult(*(x3),*(tw3),&x3_2);
   packed_cmult(*(x4),*(tw4),&x4_2);
 
-  *(y0)  = _mm_adds_epi16(*(x0),_mm_adds_epi16(x1_2,_mm_adds_epi16(x2_2,_mm_adds_epi16(x3_2,x4_2))));
+  *(y0) = simde_mm_adds_epi16(*(x0), simde_mm_adds_epi16(x1_2, simde_mm_adds_epi16(x2_2, simde_mm_adds_epi16(x3_2, x4_2))));
   cmult(x1_2,*(W15),&tmpre,&tmpim);
   cmac(x2_2,*(W25),&tmpre,&tmpim);
   cmac(x3_2,*(W35),&tmpre,&tmpim);
   cmac(x4_2,*(W45),&tmpre,&tmpim);
   *(y1) = cpack(tmpre,tmpim);
-  *(y1) = _mm_adds_epi16(*(x0),*(y1));
+  *(y1) = simde_mm_adds_epi16(*(x0), *(y1));
 
   cmult(x1_2,*(W25),&tmpre,&tmpim);
   cmac(x2_2,*(W45),&tmpre,&tmpim);
   cmac(x3_2,*(W15),&tmpre,&tmpim);
   cmac(x4_2,*(W35),&tmpre,&tmpim);
   *(y2) = cpack(tmpre,tmpim);
-  *(y2) = _mm_adds_epi16(*(x0),*(y2));
+  *(y2) = simde_mm_adds_epi16(*(x0), *(y2));
 
   cmult(x1_2,*(W35),&tmpre,&tmpim);
   cmac(x2_2,*(W15),&tmpre,&tmpim);
   cmac(x3_2,*(W45),&tmpre,&tmpim);
   cmac(x4_2,*(W25),&tmpre,&tmpim);
   *(y3) = cpack(tmpre,tmpim);
-  *(y3) = _mm_adds_epi16(*(x0),*(y3));
+  *(y3) = simde_mm_adds_epi16(*(x0), *(y3));
 
   cmult(x1_2,*(W45),&tmpre,&tmpim);
   cmac(x2_2,*(W35),&tmpre,&tmpim);
   cmac(x3_2,*(W25),&tmpre,&tmpim);
   cmac(x4_2,*(W15),&tmpre,&tmpim);
   *(y4) = cpack(tmpre,tmpim);
-  *(y4) = _mm_adds_epi16(*(x0),*(y4));
-
-
-}
-#if 0
-static inline void bfly5_256(__m256i *x0, __m256i *x1, __m256i *x2, __m256i *x3,__m256i *x4,
-			     __m256i *y0, __m256i *y1, __m256i *y2, __m256i *y3,__m256i *y4,
-			     __m256i *tw1,__m256i *tw2,__m256i *tw3,__m256i *tw4)__attribute__((always_inline));
-
-static inline void bfly5_256(__m256i *x0, __m256i *x1, __m256i *x2, __m256i *x3,__m256i *x4,
-			     __m256i *y0, __m256i *y1, __m256i *y2, __m256i *y3,__m256i *y4,
-			     __m256i *tw1,__m256i *tw2,__m256i *tw3,__m256i *tw4)
-{
-
-
-
-  __m256i x1_2,x2_2,x3_2,x4_2,tmpre,tmpim;
+  *(y4) = simde_mm_adds_epi16(*(x0), *(y4));
+}
+static inline void bfly5_256(simde__m256i *x0,
+                             simde__m256i *x1,
+                             simde__m256i *x2,
+                             simde__m256i *x3,
+                             simde__m256i *x4,
+                             simde__m256i *y0,
+                             simde__m256i *y1,
+                             simde__m256i *y2,
+                             simde__m256i *y3,
+                             simde__m256i *y4,
+                             simde__m256i *tw1,
+                             simde__m256i *tw2,
+                             simde__m256i *tw3,
+                             simde__m256i *tw4) __attribute__((always_inline))
+{
+  simde__m256i x1_2, x2_2, x3_2, x4_2, tmpre, tmpim;
 
   packed_cmult_256(*(x1),*(tw1),&x1_2);
   packed_cmult_256(*(x2),*(tw2),&x2_2);
@@ -1665,245 +1322,129 @@ static inline void bfly5_256(__m256i *x0, __m256i *x1, __m256i *x2, __m256i *x3,
   cmac_256(x3_2,*(W25_256),&tmpre,&tmpim);
   cmac_256(x4_2,*(W15_256),&tmpre,&tmpim);
   *(y4) = cpack_256(tmpre,tmpim);
-  *(y4) = simde_mm256_adds_epi16(*(x0),*(y4));
-
-
-}
-#endif
-#elif defined(__arm__) || defined(__aarch64__)
-static inline void bfly5(int16x8_t *x0, int16x8_t *x1, int16x8_t *x2, int16x8_t *x3,int16x8_t *x4,
-                         int16x8_t *y0, int16x8_t *y1, int16x8_t *y2, int16x8_t *y3,int16x8_t *y4,
-                         int16x8_t *tw1,int16x8_t *tw2,int16x8_t *tw3,int16x8_t *tw4)__attribute__((always_inline));
-
-static inline void bfly5(int16x8_t *x0, int16x8_t *x1, int16x8_t *x2, int16x8_t *x3,int16x8_t *x4,
-                         int16x8_t *y0, int16x8_t *y1, int16x8_t *y2, int16x8_t *y3,int16x8_t *y4,
-                         int16x8_t *tw1,int16x8_t *tw2,int16x8_t *tw3,int16x8_t *tw4)
-{
-
-
-
-  int16x8_t x1_2,x2_2,x3_2,x4_2;
-  int32x4_t tmpre,tmpim;
-
-  packed_cmult(*(x1),*(tw1),&x1_2);
-  packed_cmult(*(x2),*(tw2),&x2_2);
-  packed_cmult(*(x3),*(tw3),&x3_2);
-  packed_cmult(*(x4),*(tw4),&x4_2);
-
-  *(y0)  = vqaddq_s16(*(x0),vqaddq_s16(x1_2,vqaddq_s16(x2_2,vqaddq_s16(x3_2,x4_2))));
-  cmult(x1_2,*(W15),&tmpre,&tmpim);
-  cmac(x2_2,*(W25),&tmpre,&tmpim);
-  cmac(x3_2,*(W35),&tmpre,&tmpim);
-  cmac(x4_2,*(W45),&tmpre,&tmpim);
-  *(y1) = cpack(tmpre,tmpim);
-  *(y1) = vqaddq_s16(*(x0),*(y1));
-
-  cmult(x1_2,*(W25),&tmpre,&tmpim);
-  cmac(x2_2,*(W45),&tmpre,&tmpim);
-  cmac(x3_2,*(W15),&tmpre,&tmpim);
-  cmac(x4_2,*(W35),&tmpre,&tmpim);
-  *(y2) = cpack(tmpre,tmpim);
-  *(y2) = vqaddq_s16(*(x0),*(y2));
-
-  cmult(x1_2,*(W35),&tmpre,&tmpim);
-  cmac(x2_2,*(W15),&tmpre,&tmpim);
-  cmac(x3_2,*(W45),&tmpre,&tmpim);
-  cmac(x4_2,*(W25),&tmpre,&tmpim);
-  *(y3) = cpack(tmpre,tmpim);
-  *(y3) = vqaddq_s16(*(x0),*(y3));
-
-  cmult(x1_2,*(W45),&tmpre,&tmpim);
-  cmac(x2_2,*(W35),&tmpre,&tmpim);
-  cmac(x3_2,*(W25),&tmpre,&tmpim);
-  cmac(x4_2,*(W15),&tmpre,&tmpim);
-  *(y4) = cpack(tmpre,tmpim);
-  *(y4) = vqaddq_s16(*(x0),*(y4));
-
-
+  *(y4) = simde_mm256_adds_epi16(*(x0), *(y4));
 }
 
-
-#endif // defined(__x86_64__) || defined(__i386__)
-
-#if defined(__x86_64__) || defined(__i386__)
-static inline void bfly5_tw1(__m128i *x0, __m128i *x1, __m128i *x2, __m128i *x3,__m128i *x4,
-                             __m128i *y0, __m128i *y1, __m128i *y2, __m128i *y3,__m128i *y4) __attribute__((always_inline));
-
-static inline void bfly5_tw1(__m128i *x0, __m128i *x1, __m128i *x2, __m128i *x3,__m128i *x4,
-                             __m128i *y0, __m128i *y1, __m128i *y2, __m128i *y3,__m128i *y4)
+static inline void bfly5_tw1(simde__m128i *x0,
+                             simde__m128i *x1,
+                             simde__m128i *x2,
+                             simde__m128i *x3,
+                             simde__m128i *x4,
+                             simde__m128i *y0,
+                             simde__m128i *y1,
+                             simde__m128i *y2,
+                             simde__m128i *y3,
+                             simde__m128i *y4) __attribute__((always_inline))
 {
+  simde__m128i tmpre, tmpim;
 
-  __m128i tmpre,tmpim;
-
-  *(y0) = _mm_adds_epi16(*(x0),_mm_adds_epi16(*(x1),_mm_adds_epi16(*(x2),_mm_adds_epi16(*(x3),*(x4)))));
+  *(y0) = simde_mm_adds_epi16(*(x0), simde_mm_adds_epi16(*(x1), simde_mm_adds_epi16(*(x2), simde_mm_adds_epi16(*(x3), *(x4)))));
   cmult(*(x1),*(W15),&tmpre,&tmpim);
   cmac(*(x2),*(W25),&tmpre,&tmpim);
   cmac(*(x3),*(W35),&tmpre,&tmpim);
   cmac(*(x4),*(W45),&tmpre,&tmpim);
   *(y1) = cpack(tmpre,tmpim);
-  *(y1) = _mm_adds_epi16(*(x0),*(y1));
+  *(y1) = simde_mm_adds_epi16(*(x0), *(y1));
   cmult(*(x1),*(W25),&tmpre,&tmpim);
   cmac(*(x2),*(W45),&tmpre,&tmpim);
   cmac(*(x3),*(W15),&tmpre,&tmpim);
   cmac(*(x4),*(W35),&tmpre,&tmpim);
   *(y2) = cpack(tmpre,tmpim);
-  *(y2) = _mm_adds_epi16(*(x0),*(y2));
+  *(y2) = simde_mm_adds_epi16(*(x0), *(y2));
   cmult(*(x1),*(W35),&tmpre,&tmpim);
   cmac(*(x2),*(W15),&tmpre,&tmpim);
   cmac(*(x3),*(W45),&tmpre,&tmpim);
   cmac(*(x4),*(W25),&tmpre,&tmpim);
   *(y3) = cpack(tmpre,tmpim);
-  *(y3) = _mm_adds_epi16(*(x0),*(y3));
+  *(y3) = simde_mm_adds_epi16(*(x0), *(y3));
   cmult(*(x1),*(W45),&tmpre,&tmpim);
   cmac(*(x2),*(W35),&tmpre,&tmpim);
   cmac(*(x3),*(W25),&tmpre,&tmpim);
   cmac(*(x4),*(W15),&tmpre,&tmpim);
   *(y4) = cpack(tmpre,tmpim);
-  *(y4) = _mm_adds_epi16(*(x0),*(y4));
+  *(y4) = simde_mm_adds_epi16(*(x0), *(y4));
 }
-#if 0
-static inline void bfly5_tw1_256(__m256i *x0, __m256i *x1, __m256i *x2, __m256i *x3,__m256i *x4,
-				 __m256i *y0, __m256i *y1, __m256i *y2, __m256i *y3,__m256i *y4) __attribute__((always_inline));
 
-static inline void bfly5_tw1_256(__m256i *x0, __m256i *x1, __m256i *x2, __m256i *x3,__m256i *x4,
-				 __m256i *y0, __m256i *y1, __m256i *y2, __m256i *y3,__m256i *y4)
+static inline void bfly5_tw1_256(simde__m256i *x0,
+                                 simde__m256i *x1,
+                                 simde__m256i *x2,
+                                 simde__m256i *x3,
+                                 simde__m256i *x4,
+                                 simde__m256i *y0,
+                                 simde__m256i *y1,
+                                 simde__m256i *y2,
+                                 simde__m256i *y3,
+                                 simde__m256i *y4) __attribute__((always_inline))
 {
-
-  __m256i tmpre,tmpim;
+  simde__m256i tmpre, tmpim;
 
   *(y0) = simde_mm256_adds_epi16(*(x0),simde_mm256_adds_epi16(*(x1),simde_mm256_adds_epi16(*(x2),simde_mm256_adds_epi16(*(x3),*(x4)))));
   cmult_256(*(x1),*(W15_256),&tmpre,&tmpim);
   cmac_256(*(x2),*(W25_256),&tmpre,&tmpim);
   cmac_256(*(x3),*(W35_256),&tmpre,&tmpim);
-  cmac_256(*(x4),*(W45_256),&tmpre,&tmpim);
-  *(y1) = cpack_256(tmpre,tmpim);
-  *(y1) = simde_mm256_adds_epi16(*(x0),*(y1));
-  cmult_256(*(x1),*(W25_256),&tmpre,&tmpim);
-  cmac_256(*(x2),*(W45_256),&tmpre,&tmpim);
-  cmac_256(*(x3),*(W15_256),&tmpre,&tmpim);
-  cmac_256(*(x4),*(W35_256),&tmpre,&tmpim);
-  *(y2) = cpack_256(tmpre,tmpim);
-  *(y2) = simde_mm256_adds_epi16(*(x0),*(y2));
-  cmult_256(*(x1),*(W35_256),&tmpre,&tmpim);
-  cmac_256(*(x2),*(W15_256),&tmpre,&tmpim);
-  cmac_256(*(x3),*(W45_256),&tmpre,&tmpim);
-  cmac_256(*(x4),*(W25_256),&tmpre,&tmpim);
-  *(y3) = cpack_256(tmpre,tmpim);
-  *(y3) = simde_mm256_adds_epi16(*(x0),*(y3));
-  cmult_256(*(x1),*(W45_256),&tmpre,&tmpim);
-  cmac_256(*(x2),*(W35_256),&tmpre,&tmpim);
-  cmac_256(*(x3),*(W25_256),&tmpre,&tmpim);
-  cmac_256(*(x4),*(W15_256),&tmpre,&tmpim);
-  *(y4) = cpack_256(tmpre,tmpim);
-  *(y4) = simde_mm256_adds_epi16(*(x0),*(y4));
-}
-#endif
-#elif defined(__arm__) || defined(__aarch64__)
-static inline void bfly5_tw1(int16x8_t *x0, int16x8_t *x1, int16x8_t *x2, int16x8_t *x3,int16x8_t *x4,
-                             int16x8_t *y0, int16x8_t *y1, int16x8_t *y2, int16x8_t *y3,int16x8_t *y4) __attribute__((always_inline));
-
-static inline void bfly5_tw1(int16x8_t *x0, int16x8_t *x1, int16x8_t *x2, int16x8_t *x3,int16x8_t *x4,
-                             int16x8_t *y0, int16x8_t *y1, int16x8_t *y2, int16x8_t *y3,int16x8_t *y4)
-{
-
-  int32x4_t tmpre,tmpim;
-
-  *(y0) = vqaddq_s16(*(x0),vqaddq_s16(*(x1),vqaddq_s16(*(x2),vqaddq_s16(*(x3),*(x4)))));
-  cmult(*(x1),*(W15),&tmpre,&tmpim);
-  cmac(*(x2),*(W25),&tmpre,&tmpim);
-  cmac(*(x3),*(W35),&tmpre,&tmpim);
-  cmac(*(x4),*(W45),&tmpre,&tmpim);
-  *(y1) = cpack(tmpre,tmpim);
-  *(y1) = vqaddq_s16(*(x0),*(y1));
-  cmult(*(x1),*(W25),&tmpre,&tmpim);
-  cmac(*(x2),*(W45),&tmpre,&tmpim);
-  cmac(*(x3),*(W15),&tmpre,&tmpim);
-  cmac(*(x4),*(W35),&tmpre,&tmpim);
-  *(y2) = cpack(tmpre,tmpim);
-  *(y2) = vqaddq_s16(*(x0),*(y2));
-  cmult(*(x1),*(W35),&tmpre,&tmpim);
-  cmac(*(x2),*(W15),&tmpre,&tmpim);
-  cmac(*(x3),*(W45),&tmpre,&tmpim);
-  cmac(*(x4),*(W25),&tmpre,&tmpim);
-  *(y3) = cpack(tmpre,tmpim);
-  *(y3) = vqaddq_s16(*(x0),*(y3));
-  cmult(*(x1),*(W45),&tmpre,&tmpim);
-  cmac(*(x2),*(W35),&tmpre,&tmpim);
-  cmac(*(x3),*(W25),&tmpre,&tmpim);
-  cmac(*(x4),*(W15),&tmpre,&tmpim);
-  *(y4) = cpack(tmpre,tmpim);
-  *(y4) = vqaddq_s16(*(x0),*(y4));
+  cmac_256(*(x4),*(W45_256),&tmpre,&tmpim);
+  *(y1) = cpack_256(tmpre,tmpim);
+  *(y1) = simde_mm256_adds_epi16(*(x0),*(y1));
+  cmult_256(*(x1),*(W25_256),&tmpre,&tmpim);
+  cmac_256(*(x2),*(W45_256),&tmpre,&tmpim);
+  cmac_256(*(x3),*(W15_256),&tmpre,&tmpim);
+  cmac_256(*(x4),*(W35_256),&tmpre,&tmpim);
+  *(y2) = cpack_256(tmpre,tmpim);
+  *(y2) = simde_mm256_adds_epi16(*(x0),*(y2));
+  cmult_256(*(x1),*(W35_256),&tmpre,&tmpim);
+  cmac_256(*(x2),*(W15_256),&tmpre,&tmpim);
+  cmac_256(*(x3),*(W45_256),&tmpre,&tmpim);
+  cmac_256(*(x4),*(W25_256),&tmpre,&tmpim);
+  *(y3) = cpack_256(tmpre,tmpim);
+  *(y3) = simde_mm256_adds_epi16(*(x0),*(y3));
+  cmult_256(*(x1),*(W45_256),&tmpre,&tmpim);
+  cmac_256(*(x2),*(W35_256),&tmpre,&tmpim);
+  cmac_256(*(x3),*(W25_256),&tmpre,&tmpim);
+  cmac_256(*(x4),*(W15_256),&tmpre,&tmpim);
+  *(y4) = cpack_256(tmpre,tmpim);
+  *(y4) = simde_mm256_adds_epi16(*(x0),*(y4));
 }
 
-#endif // defined(__x86_64__) || defined(__i386__)
-
 // performs 4x4 transpose of input x (complex interleaved) using 128bit SIMD intrinsics
 // i.e. x = [x0r x0i x1r x1i ... x15r x15i], y = [x0r x0i x4r x4i x8r x8i x12r x12i x1r x1i x5r x5i x9r x9i x13r x13i x2r x2i ... x15r x15i]
-
-#if defined(__x86_64__) || defined(__i386__)
-#if 0
-static inline void transpose16(__m128i *x,__m128i *y) __attribute__((always_inline));
-static inline void transpose16(__m128i *x,__m128i *y)
-{
-  register __m128i ytmp0,ytmp1,ytmp2,ytmp3;
-
-  ytmp0 = _mm_unpacklo_epi32(x[0],x[1]);
-  ytmp1 = _mm_unpackhi_epi32(x[0],x[1]);
-  ytmp2 = _mm_unpacklo_epi32(x[2],x[3]);
-  ytmp3 = _mm_unpackhi_epi32(x[2],x[3]);
-  y[0]    = _mm_unpacklo_epi64(ytmp0,ytmp2);
-  y[1]    = _mm_unpackhi_epi64(ytmp0,ytmp2);
-  y[2]    = _mm_unpacklo_epi64(ytmp1,ytmp3);
-  y[3]    = _mm_unpackhi_epi64(ytmp1,ytmp3);
-}
-#endif
-#elif defined(__arm__) || defined(__aarch64__)
-#if 0
-static inline void transpose16(int16x8_t *x,int16x8_t *y) __attribute__((always_inline));
-static inline void transpose16(int16x8_t *x,int16x8_t *y)
+static inline void transpose16(simde__m128i *x, simde__m128i *y) __attribute__((always_inline))
 {
-  register uint32x4x2_t ytmp0,ytmp1;
+  register simde__m128i ytmp0, ytmp1, ytmp2, ytmp3;
 
-  ytmp0 = vtrnq_u32((uint32x4_t)(x[0]),(uint32x4_t)(x[1]));
-  ytmp1 = vtrnq_u32((uint32x4_t)(x[2]),(uint32x4_t)(x[3]));
-
-  y[0]  = vcombine_s16(vget_low_s16((int16x8_t)ytmp0.val[0]),vget_low_s16((int16x8_t)ytmp1.val[0]));
-  y[1]  = vcombine_s16(vget_high_s16((int16x8_t)ytmp0.val[0]),vget_high_s16((int16x8_t)ytmp1.val[0]));
-  y[2]  = vcombine_s16(vget_low_s16((int16x8_t)ytmp0.val[1]),vget_low_s16((int16x8_t)ytmp1.val[1]));
-  y[3]  = vcombine_s16(vget_high_s16((int16x8_t)ytmp0.val[1]),vget_high_s16((int16x8_t)ytmp1.val[1]));
+  ytmp0 = simde_mm_unpacklo_epi32(x[0], x[1]);
+  ytmp1 = simde_mm_unpackhi_epi32(x[0], x[1]);
+  ytmp2 = simde_mm_unpacklo_epi32(x[2], x[3]);
+  ytmp3 = simde_mm_unpackhi_epi32(x[2], x[3]);
+  y[0] = simde_mm_unpacklo_epi64(ytmp0, ytmp2);
+  y[1] = simde_mm_unpackhi_epi64(ytmp0, ytmp2);
+  y[2] = simde_mm_unpacklo_epi64(ytmp1, ytmp3);
+  y[3] = simde_mm_unpackhi_epi64(ytmp1, ytmp3);
 }
-#endif
-#endif // defined(__x86_64__) || defined(__i386__)
 
 // same as above but output is offset by off
-#if defined(__x86_64__) || defined(__i386__)
-#if 0
-static inline void transpose16_ooff(__m128i *x,__m128i *y,int off) __attribute__((always_inline));
-
-static inline void transpose16_ooff(__m128i *x,__m128i *y,int off)
+static inline void transpose16_ooff(simde__m128i *x, simde__m128i *y, int off) __attribute__((always_inline))
 {
-  register __m128i ytmp0,ytmp1,ytmp2,ytmp3;
-  __m128i *y2=y;
+  register simde__m128i ytmp0, ytmp1, ytmp2, ytmp3;
+  simde__m128i *y2 = y;
 
-  ytmp0 = _mm_unpacklo_epi32(x[0],x[1]); // x00 x10 x01 x11
-  ytmp1 = _mm_unpackhi_epi32(x[0],x[1]); // x02 x12 x03 x13
-  ytmp2 = _mm_unpacklo_epi32(x[2],x[3]); // x20 x30 x21 x31
-  ytmp3 = _mm_unpackhi_epi32(x[2],x[3]); // x22 x32 x23 x33
-  *y2     = _mm_unpacklo_epi64(ytmp0,ytmp2); // x00 x10 x20 x30 
+  ytmp0 = simde_mm_unpacklo_epi32(x[0], x[1]); // x00 x10 x01 x11
+  ytmp1 = simde_mm_unpackhi_epi32(x[0], x[1]); // x02 x12 x03 x13
+  ytmp2 = simde_mm_unpacklo_epi32(x[2], x[3]); // x20 x30 x21 x31
+  ytmp3 = simde_mm_unpackhi_epi32(x[2], x[3]); // x22 x32 x23 x33
+  *y2 = simde_mm_unpacklo_epi64(ytmp0, ytmp2); // x00 x10 x20 x30
   y2+=off;
-  *y2     = _mm_unpackhi_epi64(ytmp0,ytmp2); // x01 x11 x21 x31
+  *y2 = simde_mm_unpackhi_epi64(ytmp0, ytmp2); // x01 x11 x21 x31
   y2+=off;
-  *y2     = _mm_unpacklo_epi64(ytmp1,ytmp3); // x02 x12 x22 x32
+  *y2 = simde_mm_unpacklo_epi64(ytmp1, ytmp3); // x02 x12 x22 x32
   y2+=off;
-  *y2     = _mm_unpackhi_epi64(ytmp1,ytmp3); // x03 x13 x23 x33
+  *y2 = simde_mm_unpackhi_epi64(ytmp1, ytmp3); // x03 x13 x23 x33
 }
-#endif
-static inline void transpose16_ooff_simd256(__m256i *x,__m256i *y,int off) __attribute__((always_inline));
-static inline void transpose16_ooff_simd256(__m256i *x,__m256i *y,int off)
+
+static inline void transpose16_ooff_simd256(simde__m256i *x, simde__m256i *y, int off) __attribute__((always_inline))
 {
-  register __m256i ytmp0,ytmp1,ytmp2,ytmp3,ytmp4,ytmp5,ytmp6,ytmp7;
-  __m256i *y2=y;
-  __m256i const perm_mask = simde_mm256_set_epi32(7, 3, 5, 1, 6, 2, 4, 0);
+  register simde__m256i ytmp0, ytmp1, ytmp2, ytmp3, ytmp4, ytmp5, ytmp6, ytmp7;
+  simde__m256i *y2 = y;
+  simde__m256i const perm_mask = simde_mm256_set_epi32(7, 3, 5, 1, 6, 2, 4, 0);
 
   ytmp0 = simde_mm256_permutevar8x32_epi32(x[0],perm_mask);  // x00 x10 x01 x11 x02 x12 x03 x13
   ytmp1 = simde_mm256_permutevar8x32_epi32(x[1],perm_mask);  // x20 x30 x21 x31 x22 x32 x23 x33
@@ -1923,47 +1464,21 @@ static inline void transpose16_ooff_simd256(__m256i *x,__m256i *y,int off)
   *y2    = simde_mm256_insertf128_si256(ytmp7,simde_mm256_extracti128_si256(ytmp5,1),0);  //x01 x11 x21 x31 x41 x51 x61 x71
 }
 
-#elif defined(__arm__) || defined(__aarch64__)
-#if 0
-static inline void transpose16_ooff(int16x8_t *x,int16x8_t *y,int off) __attribute__((always_inline));
-
-static inline void transpose16_ooff(int16x8_t *x,int16x8_t *y,int off)
-{
-  int16x8_t *y2=y;
-  register uint32x4x2_t ytmp0,ytmp1;
-
-  ytmp0 = vtrnq_u32((uint32x4_t)(x[0]),(uint32x4_t)(x[1]));
-  ytmp1 = vtrnq_u32((uint32x4_t)(x[2]),(uint32x4_t)(x[3]));
-
-  *y2   = (int16x8_t)vcombine_s16(vget_low_s16((int16x8_t)ytmp0.val[0]),vget_low_s16((int16x8_t)ytmp1.val[0])); y2+=off;
-  *y2   = (int16x8_t)vcombine_s16(vget_low_s16((int16x8_t)ytmp0.val[1]),vget_low_s16((int16x8_t)ytmp1.val[1])); y2+=off;
-  *y2   = (int16x8_t)vcombine_s16(vget_high_s16((int16x8_t)ytmp0.val[0]),vget_high_s16((int16x8_t)ytmp1.val[0])); y2+=off;
-  *y2   = (int16x8_t)vcombine_s16(vget_high_s16((int16x8_t)ytmp0.val[1]),vget_high_s16((int16x8_t)ytmp1.val[1]));
-
-
-}
-#endif
-#endif // defined(__x86_64__) || defined(__i386__)
-
-#if defined(__x86_64__) || defined(__i386__)
-#if 0
-static inline void transpose4_ooff(__m64 *x,__m64 *y,int off)__attribute__((always_inline));
-static inline void transpose4_ooff(__m64 *x,__m64 *y,int off)
+static inline void transpose4_ooff(simde__m64 *x, simde__m64 *y, int off) __attribute__((always_inline))
 {
-  y[0]   = _mm_unpacklo_pi32(x[0],x[1]);
-  y[off] = _mm_unpackhi_pi32(x[0],x[1]);
+  y[0] = simde_mm_unpacklo_pi32(x[0], x[1]);
+  y[off] = simde_mm_unpackhi_pi32(x[0], x[1]);
 
   // x[0] = [x0 x1]
   // x[1] = [x2 x3]
   // y[0] = [x0 x2]
   // y[off] = [x1 x3]
 }
-#endif
-static inline void transpose4_ooff_simd256(__m256i *x,__m256i *y,int off)__attribute__((always_inline));
-static inline void transpose4_ooff_simd256(__m256i *x,__m256i *y,int off)
+
+static inline void transpose4_ooff_simd256(simde__m256i *x, simde__m256i *y, int off) __attribute__((always_inline))
 {
-  __m256i const perm_mask = simde_mm256_set_epi32(7, 5, 3, 1, 6, 4, 2, 0);
-  __m256i perm_tmp0,perm_tmp1;
+  simde__m256i const perm_mask = simde_mm256_set_epi32(7, 5, 3, 1, 6, 4, 2, 0);
+  simde__m256i perm_tmp0, perm_tmp1;
 
   // x[0] = [x0 x1 x2 x3 x4 x5 x6 x7]
   // x[1] = [x8 x9 x10 x11 x12 x13 x14]
@@ -1975,19 +1490,6 @@ static inline void transpose4_ooff_simd256(__m256i *x,__m256i *y,int off)
   y[off] = simde_mm256_insertf128_si256(perm_tmp1,simde_mm256_extracti128_si256(perm_tmp0,1),0);
 }
 
-#elif defined(__arm__) || defined(__aarch64__)
-#if 0
-static inline void transpose4_ooff(int16x4_t *x,int16x4_t *y,int off)__attribute__((always_inline));
-static inline void transpose4_ooff(int16x4_t *x,int16x4_t *y,int off)
-{
-  uint32x2x2_t ytmp = vtrn_u32((uint32x2_t)x[0],(uint32x2_t)x[1]);
-
-  y[0]   = (int16x4_t)ytmp.val[0];
-  y[off] = (int16x4_t)ytmp.val[1];
-}
-#endif
-#endif // defined(__x86_64__) || defined(__i386__)
-
 // 16-point optimized DFT kernel
 
 const static int16_t tw16[24] __attribute__((aligned(32))) = { 32767,0,30272,-12540,23169 ,-23170,12539 ,-30273,
@@ -2030,17 +1532,10 @@ const static int16_t tw16b[24] __attribute__((aligned(32))) = { 0,32767,-12540,3
                                                    0,32767,-30273,12539,-23170,-23170,12539 ,-30273
                                                  };
 
-
-static inline void dft16(int16_t *x,int16_t *y) __attribute__((always_inline));
-
-static inline void dft16(int16_t *x,int16_t *y)
+static inline void dft16(int16_t *x,int16_t *y) __attribute__((always_inline)
 {
-
-#if defined(__x86_64__) || defined(__i386__)
-
-  __m128i *tw16a_128=(__m128i *)tw16a,*tw16b_128=(__m128i *)tw16b,*x128=(__m128i *)x,*y128=(__m128i *)y;
-
-
+  simde__m128i *tw16a_128 = (simde__m128i *)tw16a, *tw16b_128 = (simde__m128i *)tw16b, *x128 = (simde__m128i *)x,
+               *y128 = (simde__m128i *)y;
 
   /*  This is the original version before unrolling
 
@@ -2054,135 +1549,94 @@ static inline void dft16(int16_t *x,int16_t *y)
      tw16_128,tw16_128+1,tw16_128+2);
   */
 
-  register __m128i x1_flip,x3_flip,x02t,x13t;
-  register __m128i ytmp0,ytmp1,ytmp2,ytmp3,xtmp0,xtmp1,xtmp2,xtmp3;
-  register __m128i complex_shuffle = _mm_set_epi8(13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2);
+  register simde__m128i x1_flip, x3_flip, x02t, x13t;
+  register simde__m128i ytmp0, ytmp1, ytmp2, ytmp3, xtmp0, xtmp1, xtmp2, xtmp3;
+  register simde__m128i complex_shuffle = simde_mm_set_epi8(13, 12, 15, 14, 9, 8, 11, 10, 5, 4, 7, 6, 1, 0, 3, 2);
 
   // First stage : 4 Radix-4 butterflies without input twiddles
 
-  x02t    = _mm_adds_epi16(x128[0],x128[2]);
-  x13t    = _mm_adds_epi16(x128[1],x128[3]);
-  xtmp0   = _mm_adds_epi16(x02t,x13t);
-  xtmp2   = _mm_subs_epi16(x02t,x13t);
-  x1_flip = _mm_sign_epi16(x128[1],*(__m128i*)conjugatedft);
-  x1_flip = _mm_shuffle_epi8(x1_flip,complex_shuffle);
-  x3_flip = _mm_sign_epi16(x128[3],*(__m128i*)conjugatedft);
-  x3_flip = _mm_shuffle_epi8(x3_flip,complex_shuffle);
-  x02t    = _mm_subs_epi16(x128[0],x128[2]);
-  x13t    = _mm_subs_epi16(x1_flip,x3_flip);
-  xtmp1   = _mm_adds_epi16(x02t,x13t);  // x0 + x1f - x2 - x3f
-  xtmp3   = _mm_subs_epi16(x02t,x13t);  // x0 - x1f - x2 + x3f
-
-  ytmp0   = _mm_unpacklo_epi32(xtmp0,xtmp1);
-  ytmp1   = _mm_unpackhi_epi32(xtmp0,xtmp1);
-  ytmp2   = _mm_unpacklo_epi32(xtmp2,xtmp3);
-  ytmp3   = _mm_unpackhi_epi32(xtmp2,xtmp3);
-  xtmp0   = _mm_unpacklo_epi64(ytmp0,ytmp2);
-  xtmp1   = _mm_unpackhi_epi64(ytmp0,ytmp2);
-  xtmp2   = _mm_unpacklo_epi64(ytmp1,ytmp3);
-  xtmp3   = _mm_unpackhi_epi64(ytmp1,ytmp3);
+  x02t = simde_mm_adds_epi16(x128[0], x128[2]);
+  x13t = simde_mm_adds_epi16(x128[1], x128[3]);
+  xtmp0 = simde_mm_adds_epi16(x02t, x13t);
+  xtmp2 = simde_mm_subs_epi16(x02t, x13t);
+  x1_flip = simde_mm_sign_epi16(x128[1], *(simde__m128i *)conjugatedft);
+  x1_flip = simde_mm_shuffle_epi8(x1_flip, complex_shuffle);
+  x3_flip = simde_mm_sign_epi16(x128[3], *(simde__m128i *)conjugatedft);
+  x3_flip = simde_mm_shuffle_epi8(x3_flip, complex_shuffle);
+  x02t = simde_mm_subs_epi16(x128[0], x128[2]);
+  x13t = simde_mm_subs_epi16(x1_flip, x3_flip);
+  xtmp1 = simde_mm_adds_epi16(x02t, x13t); // x0 + x1f - x2 - x3f
+  xtmp3 = simde_mm_subs_epi16(x02t, x13t); // x0 - x1f - x2 + x3f
+
+  ytmp0 = simde_mm_unpacklo_epi32(xtmp0, xtmp1);
+  ytmp1 = simde_mm_unpackhi_epi32(xtmp0, xtmp1);
+  ytmp2 = simde_mm_unpacklo_epi32(xtmp2, xtmp3);
+  ytmp3 = simde_mm_unpackhi_epi32(xtmp2, xtmp3);
+  xtmp0 = simde_mm_unpacklo_epi64(ytmp0, ytmp2);
+  xtmp1 = simde_mm_unpackhi_epi64(ytmp0, ytmp2);
+  xtmp2 = simde_mm_unpacklo_epi64(ytmp1, ytmp3);
+  xtmp3 = simde_mm_unpackhi_epi64(ytmp1, ytmp3);
 
   // Second stage : 4 Radix-4 butterflies with input twiddles
   xtmp1 = packed_cmult2(xtmp1,tw16a_128[0],tw16b_128[0]);
   xtmp2 = packed_cmult2(xtmp2,tw16a_128[1],tw16b_128[1]);
   xtmp3 = packed_cmult2(xtmp3,tw16a_128[2],tw16b_128[2]);
 
-  x02t    = _mm_adds_epi16(xtmp0,xtmp2);
-  x13t    = _mm_adds_epi16(xtmp1,xtmp3);
-  y128[0] = _mm_adds_epi16(x02t,x13t);
-  y128[2] = _mm_subs_epi16(x02t,x13t);
-  x1_flip = _mm_sign_epi16(xtmp1,*(__m128i*)conjugatedft);
-  x1_flip = _mm_shuffle_epi8(x1_flip,_mm_set_epi8(13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2));
-  x3_flip = _mm_sign_epi16(xtmp3,*(__m128i*)conjugatedft);
-  x3_flip = _mm_shuffle_epi8(x3_flip,_mm_set_epi8(13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2));
-  x02t    = _mm_subs_epi16(xtmp0,xtmp2);
-  x13t    = _mm_subs_epi16(x1_flip,x3_flip);
-  y128[1] = _mm_adds_epi16(x02t,x13t);  // x0 + x1f - x2 - x3f
-  y128[3] = _mm_subs_epi16(x02t,x13t);  // x0 - x1f - x2 + x3f
-
-#elif defined(__arm__) || defined(__aarch64__)
-
-  int16x8_t *tw16a_128=(int16x8_t *)tw16a,*tw16b_128=(int16x8_t *)tw16b,*x128=(int16x8_t *)x,*y128=(int16x8_t *)y;
-
-  /*  This is the original version before unrolling
-
-  bfly4_tw1(x128,x128+1,x128+2,x128+3,
-      y128,y128+1,y128+2,y128+3);
-
-  transpose16(y128,ytmp);
-
-  bfly4_16(ytmp,ytmp+1,ytmp+2,ytmp+3,
-     y128,y128+1,y128+2,y128+3,
-     tw16_128,tw16_128+1,tw16_128+2);
-  */
-
-  register int16x8_t x1_flip,x3_flip,x02t,x13t;
-  register int16x8_t xtmp0,xtmp1,xtmp2,xtmp3;
-  register uint32x4x2_t ytmp0,ytmp1;
-  register int16x8_t ytmp0b,ytmp1b,ytmp2b,ytmp3b;
-
-  // First stage : 4 Radix-4 butterflies without input twiddles
-  
-  x02t    = vqaddq_s16(x128[0],x128[2]);
-  x13t    = vqaddq_s16(x128[1],x128[3]);
-  xtmp0   = vqaddq_s16(x02t,x13t);
-  xtmp2   = vqsubq_s16(x02t,x13t);
-  x1_flip = vrev32q_s16(vmulq_s16(x128[1],*(int16x8_t*)conjugatedft));
-  x3_flip = vrev32q_s16(vmulq_s16(x128[3],*(int16x8_t*)conjugatedft));
-  x02t    = vqsubq_s16(x128[0],x128[2]);
-  x13t    = vqsubq_s16(x1_flip,x3_flip);
-  xtmp1   = vqaddq_s16(x02t,x13t);  // x0 + x1f - x2 - x3f
-  xtmp3   = vqsubq_s16(x02t,x13t);  // x0 - x1f - x2 + x3f
-
-  ytmp0  = vtrnq_u32((uint32x4_t)(xtmp0),(uint32x4_t)(xtmp1));
-// y0[0] = [x00 x10 x02 x12], y0[1] = [x01 x11 x03 x13]
-  ytmp1  = vtrnq_u32((uint32x4_t)(xtmp2),(uint32x4_t)(xtmp3));
-// y1[0] = [x20 x30 x22 x32], y1[1] = [x21 x31 x23 x33]
-
-
-  ytmp0b = vcombine_s16(vget_low_s16((int16x8_t)ytmp0.val[0]),vget_low_s16((int16x8_t)ytmp1.val[0]));
-// y0 = [x00 x10 x20 x30] 
-  ytmp1b = vcombine_s16(vget_low_s16((int16x8_t)ytmp0.val[1]),vget_low_s16((int16x8_t)ytmp1.val[1]));
-// t1 = [x01 x11 x21 x31] 
-  ytmp2b = vcombine_s16(vget_high_s16((int16x8_t)ytmp0.val[0]),vget_high_s16((int16x8_t)ytmp1.val[0]));
-// t2 = [x02 x12 x22 x32]
-  ytmp3b = vcombine_s16(vget_high_s16((int16x8_t)ytmp0.val[1]),vget_high_s16((int16x8_t)ytmp1.val[1]));
-// t3 = [x03 x13 x23 x33]
-
-
-  // Second stage : 4 Radix-4 butterflies with input twiddles
-  xtmp1 = packed_cmult2(ytmp1b,tw16a_128[0],tw16b_128[0]);
-  xtmp2 = packed_cmult2(ytmp2b,tw16a_128[1],tw16b_128[1]);
-  xtmp3 = packed_cmult2(ytmp3b,tw16a_128[2],tw16b_128[2]);
-
-  x02t    = vqaddq_s16(ytmp0b,xtmp2);
-  x13t    = vqaddq_s16(xtmp1,xtmp3);
-  y128[0] = vqaddq_s16(x02t,x13t);
-  y128[2] = vqsubq_s16(x02t,x13t);
-  x1_flip = vrev32q_s16(vmulq_s16(xtmp1,*(int16x8_t*)conjugatedft));
-  x3_flip = vrev32q_s16(vmulq_s16(xtmp3,*(int16x8_t*)conjugatedft));
-  x02t    = vqsubq_s16(ytmp0b,xtmp2);
-  x13t    = vqsubq_s16(x1_flip,x3_flip);
-  y128[1] = vqaddq_s16(x02t,x13t);  // x0 + x1f - x2 - x3f
-  y128[3] = vqsubq_s16(x02t,x13t);  // x0 - x1f - x2 + x3f
-
-
-#endif // defined(__x86_64__) || defined(__i386__)
+  x02t = simde_mm_adds_epi16(xtmp0, xtmp2);
+  x13t = simde_mm_adds_epi16(xtmp1, xtmp3);
+  y128[0] = simde_mm_adds_epi16(x02t, x13t);
+  y128[2] = simde_mm_subs_epi16(x02t, x13t);
+  x1_flip = simde_mm_sign_epi16(xtmp1, *(simde__m128i *)conjugatedft);
+  x1_flip = simde_mm_shuffle_epi8(x1_flip, simde_mm_set_epi8(13, 12, 15, 14, 9, 8, 11, 10, 5, 4, 7, 6, 1, 0, 3, 2));
+  x3_flip = simde_mm_sign_epi16(xtmp3, *(simde__m128i *)conjugatedft);
+  x3_flip = simde_mm_shuffle_epi8(x3_flip, simde_mm_set_epi8(13, 12, 15, 14, 9, 8, 11, 10, 5, 4, 7, 6, 1, 0, 3, 2));
+  x02t = simde_mm_subs_epi16(xtmp0, xtmp2);
+  x13t = simde_mm_subs_epi16(x1_flip, x3_flip);
+  y128[1] = simde_mm_adds_epi16(x02t, x13t); // x0 + x1f - x2 - x3f
+  y128[3] = simde_mm_subs_epi16(x02t, x13t); // x0 - x1f - x2 + x3f
 }
 #endif
 
-#if defined(__x86_64__) || defined(__i386__)
-
 // Does two 16-point DFTS (x[0 .. 15] is 128 LSBs of input vector, x[16..31] is in 128 MSBs) 
-static inline void dft16_simd256(int16_t *x,int16_t *y) __attribute__((always_inline));
-static inline void dft16_simd256(int16_t *x,int16_t *y)
-{
-
-  __m256i *tw16a_256=(__m256i *)tw16arep,*tw16b_256=(__m256i *)tw16brep,*x256=(__m256i *)x,*y256=(__m256i *)y;
-
-  __m256i x1_flip,x3_flip,x02t,x13t;
-  __m256i ytmp0,ytmp1,ytmp2,ytmp3,xtmp0,xtmp1,xtmp2,xtmp3;
-  register __m256i complex_shuffle = simde_mm256_set_epi8(29,28,31,30,25,24,27,26,21,20,23,22,17,16,19,18,13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2);
+static inline void dft16_simd256(int16_t *x,int16_t *y) __attribute__((always_inline))
+{
+  simde__m256i *tw16a_256 = (simde__m256i *)tw16arep, *tw16b_256 = (simde__m256i *)tw16brep, *x256 = (simde__m256i *)x,
+               *y256 = (simde__m256i *)y;
+
+  simde__m256i x1_flip, x3_flip, x02t, x13t;
+  simde__m256i ytmp0, ytmp1, ytmp2, ytmp3, xtmp0, xtmp1, xtmp2, xtmp3;
+  register simde__m256i complex_shuffle = simde_mm256_set_epi8(29,
+                                                               28,
+                                                               31,
+                                                               30,
+                                                               25,
+                                                               24,
+                                                               27,
+                                                               26,
+                                                               21,
+                                                               20,
+                                                               23,
+                                                               22,
+                                                               17,
+                                                               16,
+                                                               19,
+                                                               18,
+                                                               13,
+                                                               12,
+                                                               15,
+                                                               14,
+                                                               9,
+                                                               8,
+                                                               11,
+                                                               10,
+                                                               5,
+                                                               4,
+                                                               7,
+                                                               6,
+                                                               1,
+                                                               0,
+                                                               3,
+                                                               2);
 
   // First stage : 4 Radix-4 butterflies without input twiddles
 
@@ -2190,9 +1644,9 @@ static inline void dft16_simd256(int16_t *x,int16_t *y)
   x13t    = simde_mm256_adds_epi16(x256[1],x256[3]);
   xtmp0   = simde_mm256_adds_epi16(x02t,x13t);
   xtmp2   = simde_mm256_subs_epi16(x02t,x13t);
-  x1_flip = simde_mm256_sign_epi16(x256[1],*(__m256i*)conjugatedft);
+  x1_flip = simde_mm256_sign_epi16(x256[1], *(simde__m256i *)conjugatedft);
   x1_flip = simde_mm256_shuffle_epi8(x1_flip,complex_shuffle);
-  x3_flip = simde_mm256_sign_epi16(x256[3],*(__m256i*)conjugatedft);
+  x3_flip = simde_mm256_sign_epi16(x256[3], *(simde__m256i *)conjugatedft);
   x3_flip = simde_mm256_shuffle_epi8(x3_flip,complex_shuffle);
   x02t    = simde_mm256_subs_epi16(x256[0],x256[2]);
   x13t    = simde_mm256_subs_epi16(x1_flip,x3_flip);
@@ -2227,9 +1681,9 @@ static inline void dft16_simd256(int16_t *x,int16_t *y)
   x13t    = simde_mm256_adds_epi16(xtmp1,xtmp3);
   ytmp0   = simde_mm256_adds_epi16(x02t,x13t);
   ytmp2   = simde_mm256_subs_epi16(x02t,x13t);
-  x1_flip = simde_mm256_sign_epi16(xtmp1,*(__m256i*)conjugatedft);
+  x1_flip = simde_mm256_sign_epi16(xtmp1, *(simde__m256i *)conjugatedft);
   x1_flip = simde_mm256_shuffle_epi8(x1_flip,complex_shuffle);
-  x3_flip = simde_mm256_sign_epi16(xtmp3,*(__m256i*)conjugatedft);
+  x3_flip = simde_mm256_sign_epi16(xtmp3, *(simde__m256i *)conjugatedft);
   x3_flip = simde_mm256_shuffle_epi8(x3_flip,complex_shuffle);
   x02t    = simde_mm256_subs_epi16(xtmp0,xtmp2);
   x13t    = simde_mm256_subs_epi16(x1_flip,x3_flip);
@@ -2253,15 +1707,10 @@ static inline void dft16_simd256(int16_t *x,int16_t *y)
   // [y24 y25 y26 y27 y28 y29 y30 y31]
 }
 
-#endif // defined(__x86_64__) || defined(__i386__)
-
-static inline void idft16(int16_t *x,int16_t *y) __attribute__((always_inline));
-
-static inline void idft16(int16_t *x,int16_t *y)
+static inline void idft16(int16_t *x,int16_t *y) __attribute__((always_inline))
 {
-
-#if defined(__x86_64__) || defined(__i386__)
-  __m128i *tw16a_128=(__m128i *)tw16,*tw16b_128=(__m128i *)tw16c,*x128=(__m128i *)x,*y128=(__m128i *)y;
+  simde__m128i *tw16a_128 = (simde__m128i *)tw16, *tw16b_128 = (simde__m128i *)tw16c, *x128 = (simde__m128i *)x,
+               *y128 = (simde__m128i *)y;
 
   /*
   bfly4_tw1(x128,x128+1,x128+2,x128+3,
@@ -2274,133 +1723,95 @@ static inline void idft16(int16_t *x,int16_t *y)
      tw16_128,tw16_128+1,tw16_128+2);
   */
 
-  register __m128i x1_flip,x3_flip,x02t,x13t;
-  register __m128i ytmp0,ytmp1,ytmp2,ytmp3,xtmp0,xtmp1,xtmp2,xtmp3;
+  register simde__m128i x1_flip, x3_flip, x02t, x13t;
+  register simde__m128i ytmp0, ytmp1, ytmp2, ytmp3, xtmp0, xtmp1, xtmp2, xtmp3;
 
   // First stage : 4 Radix-4 butterflies without input twiddles
 
-  x02t    = _mm_adds_epi16(x128[0],x128[2]);
-  x13t    = _mm_adds_epi16(x128[1],x128[3]);
-  xtmp0   = _mm_adds_epi16(x02t,x13t);
-  xtmp2   = _mm_subs_epi16(x02t,x13t);
-  x1_flip = _mm_sign_epi16(x128[1],*(__m128i*)conjugatedft);
-  x1_flip = _mm_shuffle_epi8(x1_flip,_mm_set_epi8(13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2));
-  x3_flip = _mm_sign_epi16(x128[3],*(__m128i*)conjugatedft);
-  x3_flip = _mm_shuffle_epi8(x3_flip,_mm_set_epi8(13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2));
-  x02t    = _mm_subs_epi16(x128[0],x128[2]);
-  x13t    = _mm_subs_epi16(x1_flip,x3_flip);
-  xtmp3   = _mm_adds_epi16(x02t,x13t);  // x0 + x1f - x2 - x3f
-  xtmp1   = _mm_subs_epi16(x02t,x13t);  // x0 - x1f - x2 + x3f
-
-  ytmp0   = _mm_unpacklo_epi32(xtmp0,xtmp1);
-  ytmp1   = _mm_unpackhi_epi32(xtmp0,xtmp1);
-  ytmp2   = _mm_unpacklo_epi32(xtmp2,xtmp3);
-  ytmp3   = _mm_unpackhi_epi32(xtmp2,xtmp3);
-  xtmp0   = _mm_unpacklo_epi64(ytmp0,ytmp2);
-  xtmp1   = _mm_unpackhi_epi64(ytmp0,ytmp2);
-  xtmp2   = _mm_unpacklo_epi64(ytmp1,ytmp3);
-  xtmp3   = _mm_unpackhi_epi64(ytmp1,ytmp3);
+  x02t = simde_mm_adds_epi16(x128[0], x128[2]);
+  x13t = simde_mm_adds_epi16(x128[1], x128[3]);
+  xtmp0 = simde_mm_adds_epi16(x02t, x13t);
+  xtmp2 = simde_mm_subs_epi16(x02t, x13t);
+  x1_flip = simde_mm_sign_epi16(x128[1], *(simde__m128i *)conjugatedft);
+  x1_flip = simde_mm_shuffle_epi8(x1_flip, simde_mm_set_epi8(13, 12, 15, 14, 9, 8, 11, 10, 5, 4, 7, 6, 1, 0, 3, 2));
+  x3_flip = simde_mm_sign_epi16(x128[3], *(simde__m128i *)conjugatedft);
+  x3_flip = simde_mm_shuffle_epi8(x3_flip, simde_mm_set_epi8(13, 12, 15, 14, 9, 8, 11, 10, 5, 4, 7, 6, 1, 0, 3, 2));
+  x02t = simde_mm_subs_epi16(x128[0], x128[2]);
+  x13t = simde_mm_subs_epi16(x1_flip, x3_flip);
+  xtmp3 = simde_mm_adds_epi16(x02t, x13t); // x0 + x1f - x2 - x3f
+  xtmp1 = simde_mm_subs_epi16(x02t, x13t); // x0 - x1f - x2 + x3f
+
+  ytmp0 = simde_mm_unpacklo_epi32(xtmp0, xtmp1);
+  ytmp1 = simde_mm_unpackhi_epi32(xtmp0, xtmp1);
+  ytmp2 = simde_mm_unpacklo_epi32(xtmp2, xtmp3);
+  ytmp3 = simde_mm_unpackhi_epi32(xtmp2, xtmp3);
+  xtmp0 = simde_mm_unpacklo_epi64(ytmp0, ytmp2);
+  xtmp1 = simde_mm_unpackhi_epi64(ytmp0, ytmp2);
+  xtmp2 = simde_mm_unpacklo_epi64(ytmp1, ytmp3);
+  xtmp3 = simde_mm_unpackhi_epi64(ytmp1, ytmp3);
 
   // Second stage : 4 Radix-4 butterflies with input twiddles
   xtmp1 = packed_cmult2(xtmp1,tw16a_128[0],tw16b_128[0]);
   xtmp2 = packed_cmult2(xtmp2,tw16a_128[1],tw16b_128[1]);
   xtmp3 = packed_cmult2(xtmp3,tw16a_128[2],tw16b_128[2]);
 
-  x02t    = _mm_adds_epi16(xtmp0,xtmp2);
-  x13t    = _mm_adds_epi16(xtmp1,xtmp3);
-  y128[0] = _mm_adds_epi16(x02t,x13t);
-  y128[2] = _mm_subs_epi16(x02t,x13t);
-  x1_flip = _mm_sign_epi16(xtmp1,*(__m128i*)conjugatedft);
-  x1_flip = _mm_shuffle_epi8(x1_flip,_mm_set_epi8(13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2));
-  x3_flip = _mm_sign_epi16(xtmp3,*(__m128i*)conjugatedft);
-  x3_flip = _mm_shuffle_epi8(x3_flip,_mm_set_epi8(13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2));
-  x02t    = _mm_subs_epi16(xtmp0,xtmp2);
-  x13t    = _mm_subs_epi16(x1_flip,x3_flip);
-  y128[3] = _mm_adds_epi16(x02t,x13t);  // x0 + x1f - x2 - x3f
-  y128[1] = _mm_subs_epi16(x02t,x13t);  // x0 - x1f - x2 + x3f
-
-#elif defined(__arm__) || defined(__aarch64__)
-  int16x8_t *tw16a_128=(int16x8_t *)tw16,*tw16b_128=(int16x8_t *)tw16c,*x128=(int16x8_t *)x,*y128=(int16x8_t *)y;
-
-  /*  This is the original version before unrolling
-
-  bfly4_tw1(x128,x128+1,x128+2,x128+3,
-      y128,y128+1,y128+2,y128+3);
-
-  transpose16(y128,ytmp);
-
-  bfly4_16(ytmp,ytmp+1,ytmp+2,ytmp+3,
-     y128,y128+1,y128+2,y128+3,
-     tw16_128,tw16_128+1,tw16_128+2);
-  */
-
-  register int16x8_t x1_flip,x3_flip,x02t,x13t;
-  register int16x8_t xtmp0,xtmp1,xtmp2,xtmp3;
-  register uint32x4x2_t ytmp0,ytmp1;
-  register int16x8_t ytmp0b,ytmp1b,ytmp2b,ytmp3b;
-
-  // First stage : 4 Radix-4 butterflies without input twiddles
-
-  x02t    = vqaddq_s16(x128[0],x128[2]);
-  x13t    = vqaddq_s16(x128[1],x128[3]);
-  xtmp0   = vqaddq_s16(x02t,x13t);
-  xtmp2   = vqsubq_s16(x02t,x13t);
-  x1_flip = vrev32q_s16(vmulq_s16(x128[1],*(int16x8_t*)conjugatedft));
-  x3_flip = vrev32q_s16(vmulq_s16(x128[3],*(int16x8_t*)conjugatedft));
-  x02t    = vqsubq_s16(x128[0],x128[2]);
-  x13t    = vqsubq_s16(x1_flip,x3_flip);
-  xtmp3   = vqaddq_s16(x02t,x13t);  // x0 + x1f - x2 - x3f
-  xtmp1   = vqsubq_s16(x02t,x13t);  // x0 - x1f - x2 + x3f
-
-  ytmp0  = vtrnq_u32((uint32x4_t)(xtmp0),(uint32x4_t)(xtmp1));
-// y0[0] = [x00 x10 x02 x12], y0[1] = [x01 x11 x03 x13]
-  ytmp1  = vtrnq_u32((uint32x4_t)(xtmp2),(uint32x4_t)(xtmp3));
-// y1[0] = [x20 x30 x22 x32], y1[1] = [x21 x31 x23 x33]
-
-
-  ytmp0b = vcombine_s16(vget_low_s16((int16x8_t)ytmp0.val[0]),vget_low_s16((int16x8_t)ytmp1.val[0]));
-// y0 = [x00 x10 x20 x30] 
-  ytmp1b = vcombine_s16(vget_low_s16((int16x8_t)ytmp0.val[1]),vget_low_s16((int16x8_t)ytmp1.val[1]));
-// t1 = [x01 x11 x21 x31] 
-  ytmp2b = vcombine_s16(vget_high_s16((int16x8_t)ytmp0.val[0]),vget_high_s16((int16x8_t)ytmp1.val[0]));
-// t2 = [x02 x12 x22 x32]
-  ytmp3b = vcombine_s16(vget_high_s16((int16x8_t)ytmp0.val[1]),vget_high_s16((int16x8_t)ytmp1.val[1]));
-// t3 = [x03 x13 x23 x33]
-
-  // Second stage : 4 Radix-4 butterflies with input twiddles
-  xtmp1 = packed_cmult2(ytmp1b,tw16a_128[0],tw16b_128[0]);
-  xtmp2 = packed_cmult2(ytmp2b,tw16a_128[1],tw16b_128[1]);
-  xtmp3 = packed_cmult2(ytmp3b,tw16a_128[2],tw16b_128[2]);
-
-  x02t    = vqaddq_s16(ytmp0b,xtmp2);
-  x13t    = vqaddq_s16(xtmp1,xtmp3);
-  y128[0] = vqaddq_s16(x02t,x13t);
-  y128[2] = vqsubq_s16(x02t,x13t);
-  x1_flip = vrev32q_s16(vmulq_s16(xtmp1,*(int16x8_t*)conjugatedft));
-  x3_flip = vrev32q_s16(vmulq_s16(xtmp3,*(int16x8_t*)conjugatedft));
-  x02t    = vqsubq_s16(ytmp0b,xtmp2);
-  x13t    = vqsubq_s16(x1_flip,x3_flip);
-  y128[3] = vqaddq_s16(x02t,x13t);  // x0 + x1f - x2 - x3f
-  y128[1] = vqsubq_s16(x02t,x13t);  // x0 - x1f - x2 + x3f
-
-#endif // defined(__x86_64__) || defined(__i386__)
+  x02t = simde_mm_adds_epi16(xtmp0, xtmp2);
+  x13t = simde_mm_adds_epi16(xtmp1, xtmp3);
+  y128[0] = simde_mm_adds_epi16(x02t, x13t);
+  y128[2] = simde_mm_subs_epi16(x02t, x13t);
+  x1_flip = simde_mm_sign_epi16(xtmp1, *(simde__m128i *)conjugatedft);
+  x1_flip = simde_mm_shuffle_epi8(x1_flip, simde_mm_set_epi8(13, 12, 15, 14, 9, 8, 11, 10, 5, 4, 7, 6, 1, 0, 3, 2));
+  x3_flip = simde_mm_sign_epi16(xtmp3, *(simde__m128i *)conjugatedft);
+  x3_flip = simde_mm_shuffle_epi8(x3_flip, simde_mm_set_epi8(13, 12, 15, 14, 9, 8, 11, 10, 5, 4, 7, 6, 1, 0, 3, 2));
+  x02t = simde_mm_subs_epi16(xtmp0, xtmp2);
+  x13t = simde_mm_subs_epi16(x1_flip, x3_flip);
+  y128[3] = simde_mm_adds_epi16(x02t, x13t); // x0 + x1f - x2 - x3f
+  y128[1] = simde_mm_subs_epi16(x02t, x13t); // x0 - x1f - x2 + x3f
 }
 
 void idft16f(int16_t *x,int16_t *y) {
   idft16(x,y);
 }
 
-#if defined(__x86_64__) || defined(__i386__)
-
 // Does two 16-point IDFTS (x[0 .. 15] is 128 LSBs of input vector, x[16..31] is in 128 MSBs) 
-static inline void idft16_simd256(int16_t *x,int16_t *y) __attribute__((always_inline));
-static inline void idft16_simd256(int16_t *x,int16_t *y)
-{
-
-  __m256i *tw16a_256=(__m256i *)tw16rep,*tw16b_256=(__m256i *)tw16crep,*x256=(__m256i *)x,*y256=(__m256i *)y;
-  register __m256i x1_flip,x3_flip,x02t,x13t;
-  register __m256i ytmp0,ytmp1,ytmp2,ytmp3,xtmp0,xtmp1,xtmp2,xtmp3;
-  register __m256i complex_shuffle = simde_mm256_set_epi8(29,28,31,30,25,24,27,26,21,20,23,22,17,16,19,18,13,12,15,14,9,8,11,10,5,4,7,6,1,0,3,2);
+static inline void idft16_simd256(int16_t *x,int16_t *y) __attribute__((always_inline))
+{
+  simde__m256i *tw16a_256 = (simde__m256i *)tw16rep, *tw16b_256 = (simde__m256i *)tw16crep, *x256 = (simde__m256i *)x,
+               *y256 = (simde__m256i *)y;
+  register simde__m256i x1_flip, x3_flip, x02t, x13t;
+  register simde__m256i ytmp0, ytmp1, ytmp2, ytmp3, xtmp0, xtmp1, xtmp2, xtmp3;
+  register simde__m256i complex_shuffle = simde_mm256_set_epi8(29,
+                                                               28,
+                                                               31,
+                                                               30,
+                                                               25,
+                                                               24,
+                                                               27,
+                                                               26,
+                                                               21,
+                                                               20,
+                                                               23,
+                                                               22,
+                                                               17,
+                                                               16,
+                                                               19,
+                                                               18,
+                                                               13,
+                                                               12,
+                                                               15,
+                                                               14,
+                                                               9,
+                                                               8,
+                                                               11,
+                                                               10,
+                                                               5,
+                                                               4,
+                                                               7,
+                                                               6,
+                                                               1,
+                                                               0,
+                                                               3,
+                                                               2);
 
   // First stage : 4 Radix-4 butterflies without input twiddles
 
@@ -2408,9 +1819,9 @@ static inline void idft16_simd256(int16_t *x,int16_t *y)
   x13t    = simde_mm256_adds_epi16(x256[1],x256[3]);
   xtmp0   = simde_mm256_adds_epi16(x02t,x13t);
   xtmp2   = simde_mm256_subs_epi16(x02t,x13t);
-  x1_flip = simde_mm256_sign_epi16(x256[1],*(__m256i*)conjugatedft);
+  x1_flip = simde_mm256_sign_epi16(x256[1], *(simde__m256i *)conjugatedft);
   x1_flip = simde_mm256_shuffle_epi8(x1_flip,complex_shuffle);
-  x3_flip = simde_mm256_sign_epi16(x256[3],*(__m256i*)conjugatedft);
+  x3_flip = simde_mm256_sign_epi16(x256[3], *(simde__m256i *)conjugatedft);
   x3_flip = simde_mm256_shuffle_epi8(x3_flip,complex_shuffle);
   x02t    = simde_mm256_subs_epi16(x256[0],x256[2]);
   x13t    = simde_mm256_subs_epi16(x1_flip,x3_flip);
@@ -2435,9 +1846,9 @@ static inline void idft16_simd256(int16_t *x,int16_t *y)
   x13t    = simde_mm256_adds_epi16(xtmp1,xtmp3);
   ytmp0   = simde_mm256_adds_epi16(x02t,x13t);
   ytmp2   = simde_mm256_subs_epi16(x02t,x13t);
-  x1_flip = simde_mm256_sign_epi16(xtmp1,*(__m256i*)conjugatedft);
+  x1_flip = simde_mm256_sign_epi16(xtmp1, *(simde__m256i *)conjugatedft);
   x1_flip = simde_mm256_shuffle_epi8(x1_flip,complex_shuffle);
-  x3_flip = simde_mm256_sign_epi16(xtmp3,*(__m256i*)conjugatedft);
+  x3_flip = simde_mm256_sign_epi16(xtmp3, *(simde__m256i *)conjugatedft);
   x3_flip = simde_mm256_shuffle_epi8(x3_flip,complex_shuffle);
   x02t    = simde_mm256_subs_epi16(xtmp0,xtmp2);
   x13t    = simde_mm256_subs_epi16(x1_flip,x3_flip);
@@ -2455,8 +1866,6 @@ static inline void idft16_simd256(int16_t *x,int16_t *y)
   y256[3] = simde_mm256_insertf128_si256(ytmp3,simde_mm256_extracti128_si256(ytmp2,1),0);
 
 }
-#endif // defined(__x86_64__) || defined(__i386__)
-
 // 64-point optimized DFT
 
 const static int16_t tw64[96] __attribute__((aligned(32))) = { 
@@ -2515,27 +1924,15 @@ const static int16_t tw64c[96] __attribute__((aligned(32))) = {
 23170,-23170,15447,-28898,6393,-32138,-3211,-32610,
 -12539,-30273,-20787,-25330,-27244,-18205,-31356,-9512
                                                  };
-#if defined(__x86_64__) || defined(__i386__)
-#define simd_q15_t __m128i
-#define simdshort_q15_t __m64
-#define shiftright_int16(a,shift) _mm_srai_epi16(a,shift)
-#define mulhi_int16(a,b) _mm_mulhrs_epi16 (a,b)
-#define simd256_q15_t __m256i
+#define simd_q15_t simde__m128i
+#define simdshort_q15_t simde__m64
+#define shiftright_int16(a,shift) simde_mm_srai_epi16(a,shift)
+#define mulhi_int16(a,b) simde_mm_mulhrs_epi16 (a,b)
+#define simd256_q15_t simde__m256i
 #define shiftright_int16_simd256(a,shift) simde_mm256_srai_epi16(a,shift)
 #define set1_int16_simd256(a) simde_mm256_set1_epi16(a);
 #define mulhi_int16_simd256(a,b) simde_mm256_mulhrs_epi16(a,b); //simde_mm256_slli_epi16(simde_mm256_mulhi_epi16(a,b),1);
 
-#elif defined(__arm__) || defined(__aarch64__)
-#define simd_q15_t int16x8_t
-#define simdshort_q15_t int16x4_t
-#define shiftright_int16(a,shift) vshrq_n_s16(a,shift)
-#define set1_int16(a) vdupq_n_s16(a)
-#define mulhi_int16(a,b) vqdmulhq_s16(a,b);
-#define _mm_empty() 
-#define _m_empty()
-
-#endif // defined(__x86_64__) || defined(__i386__)
-
 void dft64(int16_t *x,int16_t *y,unsigned char scale)
 {
 
@@ -2676,10 +2073,8 @@ void dft64(int16_t *x,int16_t *y,unsigned char scale)
     y256[7]  = shiftright_int16_simd256(y256[7],3);
   }
 
-  _mm_empty();
-  _m_empty();
-
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void idft64(int16_t *x,int16_t *y,unsigned char scale)
@@ -2777,18 +2172,17 @@ void idft64(int16_t *x,int16_t *y,unsigned char scale)
     y256[7]  = shiftright_int16_simd256(y256[7],3);
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
-int16_t tw128[128] __attribute__((aligned(32))) = {  32767,0,32727,-1608,32609,-3212,32412,-4808,32137,-6393,31785,-7962,31356,-9512,30851,-11039,30272,-12540,29621,-14010,28897,-15447,28105,-16846,27244,-18205,26318,-19520,25329,-20788,24278,-22005,23169,-23170,22004,-24279,20787,-25330,19519,-26319,18204,-27245,16845,-28106,15446,-28898,14009,-29622,12539,-30273,11038,-30852,9511,-31357,7961,-31786,6392,-32138,4807,-32413,3211,-32610,1607,-32728,0,-32767,-1608,-32728,-3212,-32610,-4808,-32413,-6393,-32138,-7962,-31786,-9512,-31357,-11039,-30852,-12540,-30273,-14010,-29622,-15447,-28898,-16846,-28106,-18205,-27245,-19520,-26319,-20788,-25330,-22005,-24279,-23170,-23170,-24279,-22005,-25330,-20788,-26319,-19520,-27245,-18205,-28106,-16846,-28898,-15447,-29622,-14010,-30273,-12540,-30852,-11039,-31357,-9512,-31786,-7962,-32138,-6393,-32413,-4808,-32610,-3212,-32728,-1608};
+static const int16_t tw128[128] __attribute__((aligned(32))) = {  32767,0,32727,-1608,32609,-3212,32412,-4808,32137,-6393,31785,-7962,31356,-9512,30851,-11039,30272,-12540,29621,-14010,28897,-15447,28105,-16846,27244,-18205,26318,-19520,25329,-20788,24278,-22005,23169,-23170,22004,-24279,20787,-25330,19519,-26319,18204,-27245,16845,-28106,15446,-28898,14009,-29622,12539,-30273,11038,-30852,9511,-31357,7961,-31786,6392,-32138,4807,-32413,3211,-32610,1607,-32728,0,-32767,-1608,-32728,-3212,-32610,-4808,-32413,-6393,-32138,-7962,-31786,-9512,-31357,-11039,-30852,-12540,-30273,-14010,-29622,-15447,-28898,-16846,-28106,-18205,-27245,-19520,-26319,-20788,-25330,-22005,-24279,-23170,-23170,-24279,-22005,-25330,-20788,-26319,-19520,-27245,-18205,-28106,-16846,-28898,-15447,-29622,-14010,-30273,-12540,-30852,-11039,-31357,-9512,-31786,-7962,-32138,-6393,-32413,-4808,-32610,-3212,-32728,-1608};
 
-int16_t tw128a[128] __attribute__((aligned(32))) = { 32767,0,32727,1608,32609,3212,32412,4808,32137,6393,31785,7962,31356,9512,30851,11039,30272,12540,29621,14010,28897,15447,28105,16846,27244,18205,26318,19520,25329,20788,24278,22005,23169,23170,22004,24279,20787,25330,19519,26319,18204,27245,16845,28106,15446,28898,14009,29622,12539,30273,11038,30852,9511,31357,7961,31786,6392,32138,4807,32413,3211,32610,1607,32728,0,32767,-1608,32728,-3212,32610,-4808,32413,-6393,32138,-7962,31786,-9512,31357,-11039,30852,-12540,30273,-14010,29622,-15447,28898,-16846,28106,-18205,27245,-19520,26319,-20788,25330,-22005,24279,-23170,23170,-24279,22005,-25330,20788,-26319,19520,-27245,18205,-28106,16846,-28898,15447,-29622,14010,-30273,12540,-30852,11039,-31357,9512,-31786,7962,-32138,6393,-32413,4808,-32610,3212,-32728,1608};
+static const int16_t tw128a[128] __attribute__((aligned(32))) = { 32767,0,32727,1608,32609,3212,32412,4808,32137,6393,31785,7962,31356,9512,30851,11039,30272,12540,29621,14010,28897,15447,28105,16846,27244,18205,26318,19520,25329,20788,24278,22005,23169,23170,22004,24279,20787,25330,19519,26319,18204,27245,16845,28106,15446,28898,14009,29622,12539,30273,11038,30852,9511,31357,7961,31786,6392,32138,4807,32413,3211,32610,1607,32728,0,32767,-1608,32728,-3212,32610,-4808,32413,-6393,32138,-7962,31786,-9512,31357,-11039,30852,-12540,30273,-14010,29622,-15447,28898,-16846,28106,-18205,27245,-19520,26319,-20788,25330,-22005,24279,-23170,23170,-24279,22005,-25330,20788,-26319,19520,-27245,18205,-28106,16846,-28898,15447,-29622,14010,-30273,12540,-30852,11039,-31357,9512,-31786,7962,-32138,6393,-32413,4808,-32610,3212,-32728,1608};
 
-int16_t tw128b[128] __attribute__((aligned(32))) = {0,32767,-1608,32727,-3212,32609,-4808,32412,-6393,32137,-7962,31785,-9512,31356,-11039,30851,-12540,30272,-14010,29621,-15447,28897,-16846,28105,-18205,27244,-19520,26318,-20788,25329,-22005,24278,-23170,23169,-24279,22004,-25330,20787,-26319,19519,-27245,18204,-28106,16845,-28898,15446,-29622,14009,-30273,12539,-30852,11038,-31357,9511,-31786,7961,-32138,6392,-32413,4807,-32610,3211,-32728,1607,-32767,0,-32728,-1608,-32610,-3212,-32413,-4808,-32138,-6393,-31786,-7962,-31357,-9512,-30852,-11039,-30273,-12540,-29622,-14010,-28898,-15447,-28106,-16846,-27245,-18205,-26319,-19520,-25330,-20788,-24279,-22005,-23170,-23170,-22005,-24279,-20788,-25330,-19520,-26319,-18205,-27245,-16846,-28106,-15447,-28898,-14010,-29622,-12540,-30273,-11039,-30852,-9512,-31357,-7962,-31786,-6393,-32138,-4808,-32413,-3212,-32610,-1608,-32728};
+static const int16_t tw128b[128] __attribute__((aligned(32))) = {0,32767,-1608,32727,-3212,32609,-4808,32412,-6393,32137,-7962,31785,-9512,31356,-11039,30851,-12540,30272,-14010,29621,-15447,28897,-16846,28105,-18205,27244,-19520,26318,-20788,25329,-22005,24278,-23170,23169,-24279,22004,-25330,20787,-26319,19519,-27245,18204,-28106,16845,-28898,15446,-29622,14009,-30273,12539,-30852,11038,-31357,9511,-31786,7961,-32138,6392,-32413,4807,-32610,3211,-32728,1607,-32767,0,-32728,-1608,-32610,-3212,-32413,-4808,-32138,-6393,-31786,-7962,-31357,-9512,-30852,-11039,-30273,-12540,-29622,-14010,-28898,-15447,-28106,-16846,-27245,-18205,-26319,-19520,-25330,-20788,-24279,-22005,-23170,-23170,-22005,-24279,-20788,-25330,-19520,-26319,-18205,-27245,-16846,-28106,-15447,-28898,-14010,-29622,-12540,-30273,-11039,-30852,-9512,-31357,-7962,-31786,-6393,-32138,-4808,-32413,-3212,-32610,-1608,-32728};
 
-int16_t tw128c[128] __attribute__((aligned(32))) = {0,32767,1608,32727,3212,32609,4808,32412,6393,32137,7962,31785,9512,31356,11039,30851,12540,30272,14010,29621,15447,28897,16846,28105,18205,27244,19520,26318,20788,25329,22005,24278,23170,23169,24279,22004,25330,20787,26319,19519,27245,18204,28106,16845,28898,15446,29622,14009,30273,12539,30852,11038,31357,9511,31786,7961,32138,6392,32413,4807,32610,3211,32728,1607,32767,0,32728,-1608,32610,-3212,32413,-4808,32138,-6393,31786,-7962,31357,-9512,30852,-11039,30273,-12540,29622,-14010,28898,-15447,28106,-16846,27245,-18205,26319,-19520,25330,-20788,24279,-22005,23170,-23170,22005,-24279,20788,-25330,19520,-26319,18205,-27245,16846,-28106,15447,-28898,14010,-29622,12540,-30273,11039,-30852,9512,-31357,7962,-31786,6393,-32138,4808,-32413,3212,-32610,1608,-32728};
+static const int16_t tw128c[128] __attribute__((aligned(32))) = {0,32767,1608,32727,3212,32609,4808,32412,6393,32137,7962,31785,9512,31356,11039,30851,12540,30272,14010,29621,15447,28897,16846,28105,18205,27244,19520,26318,20788,25329,22005,24278,23170,23169,24279,22004,25330,20787,26319,19519,27245,18204,28106,16845,28898,15446,29622,14009,30273,12539,30852,11038,31357,9511,31786,7961,32138,6392,32413,4807,32610,3211,32728,1607,32767,0,32728,-1608,32610,-3212,32413,-4808,32138,-6393,31786,-7962,31357,-9512,30852,-11039,30273,-12540,29622,-14010,28898,-15447,28106,-16846,27245,-18205,26319,-19520,25330,-20788,24279,-22005,23170,-23170,22005,-24279,20788,-25330,19520,-26319,18205,-27245,16846,-28106,15447,-28898,14010,-29622,12540,-30273,11039,-30852,9512,-31357,7962,-31786,6393,-32138,4808,-32413,3212,-32610,1608,-32728};
 
 void dft128(int16_t *x,int16_t *y,unsigned char scale)
 {
@@ -2917,17 +2311,17 @@ void idft128(int16_t *x,int16_t *y,unsigned char scale)
 
 }
 
-int16_t tw256[384] __attribute__((aligned(32))) = {  32767,0,32757,-805,32727,-1608,32678,-2411,32609,-3212,32520,-4012,32412,-4808,32284,-5602,32137,-6393,31970,-7180,31785,-7962,31580,-8740,31356,-9512,31113,-10279,30851,-11039,30571,-11793,30272,-12540,29955,-13279,29621,-14010,29268,-14733,28897,-15447,28510,-16151,28105,-16846,27683,-17531,27244,-18205,26789,-18868,26318,-19520,25831,-20160,25329,-20788,24811,-21403,24278,-22005,23731,-22595,23169,-23170,22594,-23732,22004,-24279,21402,-24812,20787,-25330,20159,-25832,19519,-26319,18867,-26790,18204,-27245,17530,-27684,16845,-28106,16150,-28511,15446,-28898,14732,-29269,14009,-29622,13278,-29956,12539,-30273,11792,-30572,11038,-30852,10278,-31114,9511,-31357,8739,-31581,7961,-31786,7179,-31971,6392,-32138,5601,-32285,4807,-32413,4011,-32521,3211,-32610,2410,-32679,1607,-32728,804,-32758,
+static const int16_t tw256[384] __attribute__((aligned(32))) = {  32767,0,32757,-805,32727,-1608,32678,-2411,32609,-3212,32520,-4012,32412,-4808,32284,-5602,32137,-6393,31970,-7180,31785,-7962,31580,-8740,31356,-9512,31113,-10279,30851,-11039,30571,-11793,30272,-12540,29955,-13279,29621,-14010,29268,-14733,28897,-15447,28510,-16151,28105,-16846,27683,-17531,27244,-18205,26789,-18868,26318,-19520,25831,-20160,25329,-20788,24811,-21403,24278,-22005,23731,-22595,23169,-23170,22594,-23732,22004,-24279,21402,-24812,20787,-25330,20159,-25832,19519,-26319,18867,-26790,18204,-27245,17530,-27684,16845,-28106,16150,-28511,15446,-28898,14732,-29269,14009,-29622,13278,-29956,12539,-30273,11792,-30572,11038,-30852,10278,-31114,9511,-31357,8739,-31581,7961,-31786,7179,-31971,6392,-32138,5601,-32285,4807,-32413,4011,-32521,3211,-32610,2410,-32679,1607,-32728,804,-32758,
                                                      32767,0,32727,-1608,32609,-3212,32412,-4808,32137,-6393,31785,-7962,31356,-9512,30851,-11039,30272,-12540,29621,-14010,28897,-15447,28105,-16846,27244,-18205,26318,-19520,25329,-20788,24278,-22005,23169,-23170,22004,-24279,20787,-25330,19519,-26319,18204,-27245,16845,-28106,15446,-28898,14009,-29622,12539,-30273,11038,-30852,9511,-31357,7961,-31786,6392,-32138,4807,-32413,3211,-32610,1607,-32728,0,-32767,-1608,-32728,-3212,-32610,-4808,-32413,-6393,-32138,-7962,-31786,-9512,-31357,-11039,-30852,-12540,-30273,-14010,-29622,-15447,-28898,-16846,-28106,-18205,-27245,-19520,-26319,-20788,-25330,-22005,-24279,-23170,-23170,-24279,-22005,-25330,-20788,-26319,-19520,-27245,-18205,-28106,-16846,-28898,-15447,-29622,-14010,-30273,-12540,-30852,-11039,-31357,-9512,-31786,-7962,-32138,-6393,-32413,-4808,-32610,-3212,-32728,-1608,
                                                      32767,0,32678,-2411,32412,-4808,31970,-7180,31356,-9512,30571,-11793,29621,-14010,28510,-16151,27244,-18205,25831,-20160,24278,-22005,22594,-23732,20787,-25330,18867,-26790,16845,-28106,14732,-29269,12539,-30273,10278,-31114,7961,-31786,5601,-32285,3211,-32610,804,-32758,-1608,-32728,-4012,-32521,-6393,-32138,-8740,-31581,-11039,-30852,-13279,-29956,-15447,-28898,-17531,-27684,-19520,-26319,-21403,-24812,-23170,-23170,-24812,-21403,-26319,-19520,-27684,-17531,-28898,-15447,-29956,-13279,-30852,-11039,-31581,-8740,-32138,-6393,-32521,-4012,-32728,-1608,-32758,804,-32610,3211,-32285,5601,-31786,7961,-31114,10278,-30273,12539,-29269,14732,-28106,16845,-26790,18867,-25330,20787,-23732,22594,-22005,24278,-20160,25831,-18205,27244,-16151,28510,-14010,29621,-11793,30571,-9512,31356,-7180,31970,-4808,32412,-2411,32678
                                                   };
 
-int16_t tw256a[384] __attribute__((aligned(32))) = { 32767,0,32757,804,32727,1607,32678,2410,32609,3211,32520,4011,32412,4807,32284,5601,32137,6392,31970,7179,31785,7961,31580,8739,31356,9511,31113,10278,30851,11038,30571,11792,30272,12539,29955,13278,29621,14009,29268,14732,28897,15446,28510,16150,28105,16845,27683,17530,27244,18204,26789,18867,26318,19519,25831,20159,25329,20787,24811,21402,24278,22004,23731,22594,23169,23169,22594,23731,22004,24278,21402,24811,20787,25329,20159,25831,19519,26318,18867,26789,18204,27244,17530,27683,16845,28105,16150,28510,15446,28897,14732,29268,14009,29621,13278,29955,12539,30272,11792,30571,11038,30851,10278,31113,9511,31356,8739,31580,7961,31785,7179,31970,6392,32137,5601,32284,4807,32412,4011,32520,3211,32609,2410,32678,1607,32727,804,32757,
+static const int16_t tw256a[384] __attribute__((aligned(32))) = { 32767,0,32757,804,32727,1607,32678,2410,32609,3211,32520,4011,32412,4807,32284,5601,32137,6392,31970,7179,31785,7961,31580,8739,31356,9511,31113,10278,30851,11038,30571,11792,30272,12539,29955,13278,29621,14009,29268,14732,28897,15446,28510,16150,28105,16845,27683,17530,27244,18204,26789,18867,26318,19519,25831,20159,25329,20787,24811,21402,24278,22004,23731,22594,23169,23169,22594,23731,22004,24278,21402,24811,20787,25329,20159,25831,19519,26318,18867,26789,18204,27244,17530,27683,16845,28105,16150,28510,15446,28897,14732,29268,14009,29621,13278,29955,12539,30272,11792,30571,11038,30851,10278,31113,9511,31356,8739,31580,7961,31785,7179,31970,6392,32137,5601,32284,4807,32412,4011,32520,3211,32609,2410,32678,1607,32727,804,32757,
                                                      32767,0,32727,1607,32609,3211,32412,4807,32137,6392,31785,7961,31356,9511,30851,11038,30272,12539,29621,14009,28897,15446,28105,16845,27244,18204,26318,19519,25329,20787,24278,22004,23169,23169,22004,24278,20787,25329,19519,26318,18204,27244,16845,28105,15446,28897,14009,29621,12539,30272,11038,30851,9511,31356,7961,31785,6392,32137,4807,32412,3211,32609,1607,32727,0,32767,-1608,32727,-3212,32609,-4808,32412,-6393,32137,-7962,31785,-9512,31356,-11039,30851,-12540,30272,-14010,29621,-15447,28897,-16846,28105,-18205,27244,-19520,26318,-20788,25329,-22005,24278,-23170,23169,-24279,22004,-25330,20787,-26319,19519,-27245,18204,-28106,16845,-28898,15446,-29622,14009,-30273,12539,-30852,11038,-31357,9511,-31786,7961,-32138,6392,-32413,4807,-32610,3211,-32728,1607,
                                                      32767,0,32678,2410,32412,4807,31970,7179,31356,9511,30571,11792,29621,14009,28510,16150,27244,18204,25831,20159,24278,22004,22594,23731,20787,25329,18867,26789,16845,28105,14732,29268,12539,30272,10278,31113,7961,31785,5601,32284,3211,32609,804,32757,-1608,32727,-4012,32520,-6393,32137,-8740,31580,-11039,30851,-13279,29955,-15447,28897,-17531,27683,-19520,26318,-21403,24811,-23170,23169,-24812,21402,-26319,19519,-27684,17530,-28898,15446,-29956,13278,-30852,11038,-31581,8739,-32138,6392,-32521,4011,-32728,1607,-32758,-805,-32610,-3212,-32285,-5602,-31786,-7962,-31114,-10279,-30273,-12540,-29269,-14733,-28106,-16846,-26790,-18868,-25330,-20788,-23732,-22595,-22005,-24279,-20160,-25832,-18205,-27245,-16151,-28511,-14010,-29622,-11793,-30572,-9512,-31357,-7180,-31971,-4808,-32413,-2411,-32679
                                                    };
 
-int16_t tw256b[384] __attribute__((aligned(32))) = {0,32767,-805,32757,-1608,32727,-2411,32678,-3212,32609,-4012,32520,-4808,32412,-5602,32284,-6393,32137,-7180,31970,-7962,31785,-8740,31580,-9512,31356,-10279,31113,-11039,30851,-11793,30571,-12540,30272,-13279,29955,-14010,29621,-14733,29268,-15447,28897,-16151,28510,-16846,28105,-17531,27683,-18205,27244,-18868,26789,-19520,26318,-20160,25831,-20788,25329,-21403,24811,-22005,24278,-22595,23731,-23170,23169,-23732,22594,-24279,22004,-24812,21402,-25330,20787,-25832,20159,-26319,19519,-26790,18867,-27245,18204,-27684,17530,-28106,16845,-28511,16150,-28898,15446,-29269,14732,-29622,14009,-29956,13278,-30273,12539,-30572,11792,-30852,11038,-31114,10278,-31357,9511,-31581,8739,-31786,7961,-31971,7179,-32138,6392,-32285,5601,-32413,4807,-32521,4011,-32610,3211,-32679,2410,-32728,1607,-32758,804,
+static const int16_t tw256b[384] __attribute__((aligned(32))) = {0,32767,-805,32757,-1608,32727,-2411,32678,-3212,32609,-4012,32520,-4808,32412,-5602,32284,-6393,32137,-7180,31970,-7962,31785,-8740,31580,-9512,31356,-10279,31113,-11039,30851,-11793,30571,-12540,30272,-13279,29955,-14010,29621,-14733,29268,-15447,28897,-16151,28510,-16846,28105,-17531,27683,-18205,27244,-18868,26789,-19520,26318,-20160,25831,-20788,25329,-21403,24811,-22005,24278,-22595,23731,-23170,23169,-23732,22594,-24279,22004,-24812,21402,-25330,20787,-25832,20159,-26319,19519,-26790,18867,-27245,18204,-27684,17530,-28106,16845,-28511,16150,-28898,15446,-29269,14732,-29622,14009,-29956,13278,-30273,12539,-30572,11792,-30852,11038,-31114,10278,-31357,9511,-31581,8739,-31786,7961,-31971,7179,-32138,6392,-32285,5601,-32413,4807,-32521,4011,-32610,3211,-32679,2410,-32728,1607,-32758,804,
                                                     0,32767,-1608,32727,-3212,32609,-4808,32412,-6393,32137,-7962,31785,-9512,31356,-11039,30851,-12540,30272,-14010,29621,-15447,28897,-16846,28105,-18205,27244,-19520,26318,-20788,25329,-22005,24278,-23170,23169,-24279,22004,-25330,20787,-26319,19519,-27245,18204,-28106,16845,-28898,15446,-29622,14009,-30273,12539,-30852,11038,-31357,9511,-31786,7961,-32138,6392,-32413,4807,-32610,3211,-32728,1607,-32767,0,-32728,-1608,-32610,-3212,-32413,-4808,-32138,-6393,-31786,-7962,-31357,-9512,-30852,-11039,-30273,-12540,-29622,-14010,-28898,-15447,-28106,-16846,-27245,-18205,-26319,-19520,-25330,-20788,-24279,-22005,-23170,-23170,-22005,-24279,-20788,-25330,-19520,-26319,-18205,-27245,-16846,-28106,-15447,-28898,-14010,-29622,-12540,-30273,-11039,-30852,-9512,-31357,-7962,-31786,-6393,-32138,-4808,-32413,-3212,-32610,-1608,-32728,
                                                     0,32767,-2411,32678,-4808,32412,-7180,31970,-9512,31356,-11793,30571,-14010,29621,-16151,28510,-18205,27244,-20160,25831,-22005,24278,-23732,22594,-25330,20787,-26790,18867,-28106,16845,-29269,14732,-30273,12539,-31114,10278,-31786,7961,-32285,5601,-32610,3211,-32758,804,-32728,-1608,-32521,-4012,-32138,-6393,-31581,-8740,-30852,-11039,-29956,-13279,-28898,-15447,-27684,-17531,-26319,-19520,-24812,-21403,-23170,-23170,-21403,-24812,-19520,-26319,-17531,-27684,-15447,-28898,-13279,-29956,-11039,-30852,-8740,-31581,-6393,-32138,-4012,-32521,-1608,-32728,804,-32758,3211,-32610,5601,-32285,7961,-31786,10278,-31114,12539,-30273,14732,-29269,16845,-28106,18867,-26790,20787,-25330,22594,-23732,24278,-22005,25831,-20160,27244,-18205,28510,-16151,29621,-14010,30571,-11793,31356,-9512,31970,-7180,32412,-4808,32678,-2411
                                                    };
@@ -3018,9 +2412,8 @@ void dft256(int16_t *x,int16_t *y,unsigned char scale)
 
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void idft256(int16_t *x,int16_t *y,unsigned char scale)
@@ -3103,26 +2496,25 @@ void idft256(int16_t *x,int16_t *y,unsigned char scale)
 
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
-int16_t tw512[512] __attribute__((aligned(32))) = {
+static const int16_t tw512[512] __attribute__((aligned(32))) = {
   32767,0,32764,-403,32757,-805,32744,-1207,32727,-1608,32705,-2010,32678,-2411,32646,-2812,32609,-3212,32567,-3612,32520,-4012,32468,-4410,32412,-4808,32350,-5206,32284,-5602,32213,-5998,32137,-6393,32056,-6787,31970,-7180,31880,-7572,31785,-7962,31684,-8352,31580,-8740,31470,-9127,31356,-9512,31236,-9896,31113,-10279,30984,-10660,30851,-11039,30713,-11417,30571,-11793,30424,-12167,30272,-12540,30116,-12910,29955,-13279,29790,-13646,29621,-14010,29446,-14373,29268,-14733,29085,-15091,28897,-15447,28706,-15800,28510,-16151,28309,-16500,28105,-16846,27896,-17190,27683,-17531,27466,-17869,27244,-18205,27019,-18538,26789,-18868,26556,-19195,26318,-19520,26077,-19841,25831,-20160,25582,-20475,25329,-20788,25072,-21097,24811,-21403,24546,-21706,24278,-22005,24006,-22302,23731,-22595,23452,-22884,23169,-23170,22883,-23453,22594,-23732,22301,-24007,22004,-24279,21705,-24547,21402,-24812,21096,-25073,20787,-25330,20474,-25583,20159,-25832,19840,-26078,19519,-26319,19194,-26557,18867,-26790,18537,-27020,18204,-27245,17868,-27467,17530,-27684,17189,-27897,16845,-28106,16499,-28310,16150,-28511,15799,-28707,15446,-28898,15090,-29086,14732,-29269,14372,-29447,14009,-29622,13645,-29791,13278,-29956,12909,-30117,12539,-30273,12166,-30425,11792,-30572,11416,-30714,11038,-30852,10659,-30985,10278,-31114,9895,-31237,9511,-31357,9126,-31471,8739,-31581,8351,-31685,7961,-31786,7571,-31881,7179,-31971,6786,-32057,6392,-32138,5997,-32214,5601,-32285,5205,-32351,4807,-32413,4409,-32469,4011,-32521,3611,-32568,3211,-32610,2811,-32647,2410,-32679,2009,-32706,1607,-32728,1206,-32745,804,-32758,402,-32765,0,-32767,-403,-32765,-805,-32758,-1207,-32745,-1608,-32728,-2010,-32706,-2411,-32679,-2812,-32647,-3212,-32610,-3612,-32568,-4012,-32521,-4410,-32469,-4808,-32413,-5206,-32351,-5602,-32285,-5998,-32214,-6393,-32138,-6787,-32057,-7180,-31971,-7572,-31881,-7962,-31786,-8352,-31685,-8740,-31581,-9127,-31471,-9512,-31357,-9896,-31237,-10279,-31114,-10660,-30985,-11039,-30852,-11417,-30714,-11793,-30572,-12167,-30425,-12540,-30273,-12910,-30117,-13279,-29956,-13646,-29791,-14010,-29622,-14373,-29447,-14733,-29269,-15091,-29086,-15447,-28898,-15800,-28707,-16151,-28511,-16500,-28310,-16846,-28106,-17190,-27897,-17531,-27684,-17869,-27467,-18205,-27245,-18538,-27020,-18868,-26790,-19195,-26557,-19520,-26319,-19841,-26078,-20160,-25832,-20475,-25583,-20788,-25330,-21097,-25073,-21403,-24812,-21706,-24547,-22005,-24279,-22302,-24007,-22595,-23732,-22884,-23453,-23170,-23170,-23453,-22884,-23732,-22595,-24007,-22302,-24279,-22005,-24547,-21706,-24812,-21403,-25073,-21097,-25330,-20788,-25583,-20475,-25832,-20160,-26078,-19841,-26319,-19520,-26557,-19195,-26790,-18868,-27020,-18538,-27245,-18205,-27467,-17869,-27684,-17531,-27897,-17190,-28106,-16846,-28310,-16500,-28511,-16151,-28707,-15800,-28898,-15447,-29086,-15091,-29269,-14733,-29447,-14373,-29622,-14010,-29791,-13646,-29956,-13279,-30117,-12910,-30273,-12540,-30425,-12167,-30572,-11793,-30714,-11417,-30852,-11039,-30985,-10660,-31114,-10279,-31237,-9896,-31357,-9512,-31471,-9127,-31581,-8740,-31685,-8352,-31786,-7962,-31881,-7572,-31971,-7180,-32057,-6787,-32138,-6393,-32214,-5998,-32285,-5602,-32351,-5206,-32413,-4808,-32469,-4410,-32521,-4012,-32568,-3612,-32610,-3212,-32647,-2812,-32679,-2411,-32706,-2010,-32728,-1608,-32745,-1207,-32758,-805,-32765,-403
 };
 
-int16_t tw512a[512] __attribute__((aligned(32))) = {
+static const int16_t tw512a[512] __attribute__((aligned(32))) = {
   32767,0,32764,403,32757,805,32744,1207,32727,1608,32705,2010,32678,2411,32646,2812,32609,3212,32567,3612,32520,4012,32468,4410,32412,4808,32350,5206,32284,5602,32213,5998,32137,6393,32056,6787,31970,7180,31880,7572,31785,7962,31684,8352,31580,8740,31470,9127,31356,9512,31236,9896,31113,10279,30984,10660,30851,11039,30713,11417,30571,11793,30424,12167,30272,12540,30116,12910,29955,13279,29790,13646,29621,14010,29446,14373,29268,14733,29085,15091,28897,15447,28706,15800,28510,16151,28309,16500,28105,16846,27896,17190,27683,17531,27466,17869,27244,18205,27019,18538,26789,18868,26556,19195,26318,19520,26077,19841,25831,20160,25582,20475,25329,20788,25072,21097,24811,21403,24546,21706,24278,22005,24006,22302,23731,22595,23452,22884,23169,23170,22883,23453,22594,23732,22301,24007,22004,24279,21705,24547,21402,24812,21096,25073,20787,25330,20474,25583,20159,25832,19840,26078,19519,26319,19194,26557,18867,26790,18537,27020,18204,27245,17868,27467,17530,27684,17189,27897,16845,28106,16499,28310,16150,28511,15799,28707,15446,28898,15090,29086,14732,29269,14372,29447,14009,29622,13645,29791,13278,29956,12909,30117,12539,30273,12166,30425,11792,30572,11416,30714,11038,30852,10659,30985,10278,31114,9895,31237,9511,31357,9126,31471,8739,31581,8351,31685,7961,31786,7571,31881,7179,31971,6786,32057,6392,32138,5997,32214,5601,32285,5205,32351,4807,32413,4409,32469,4011,32521,3611,32568,3211,32610,2811,32647,2410,32679,2009,32706,1607,32728,1206,32745,804,32758,402,32765,0,32767,-403,32765,-805,32758,-1207,32745,-1608,32728,-2010,32706,-2411,32679,-2812,32647,-3212,32610,-3612,32568,-4012,32521,-4410,32469,-4808,32413,-5206,32351,-5602,32285,-5998,32214,-6393,32138,-6787,32057,-7180,31971,-7572,31881,-7962,31786,-8352,31685,-8740,31581,-9127,31471,-9512,31357,-9896,31237,-10279,31114,-10660,30985,-11039,30852,-11417,30714,-11793,30572,-12167,30425,-12540,30273,-12910,30117,-13279,29956,-13646,29791,-14010,29622,-14373,29447,-14733,29269,-15091,29086,-15447,28898,-15800,28707,-16151,28511,-16500,28310,-16846,28106,-17190,27897,-17531,27684,-17869,27467,-18205,27245,-18538,27020,-18868,26790,-19195,26557,-19520,26319,-19841,26078,-20160,25832,-20475,25583,-20788,25330,-21097,25073,-21403,24812,-21706,24547,-22005,24279,-22302,24007,-22595,23732,-22884,23453,-23170,23170,-23453,22884,-23732,22595,-24007,22302,-24279,22005,-24547,21706,-24812,21403,-25073,21097,-25330,20788,-25583,20475,-25832,20160,-26078,19841,-26319,19520,-26557,19195,-26790,18868,-27020,18538,-27245,18205,-27467,17869,-27684,17531,-27897,17190,-28106,16846,-28310,16500,-28511,16151,-28707,15800,-28898,15447,-29086,15091,-29269,14733,-29447,14373,-29622,14010,-29791,13646,-29956,13279,-30117,12910,-30273,12540,-30425,12167,-30572,11793,-30714,11417,-30852,11039,-30985,10660,-31114,10279,-31237,9896,-31357,9512,-31471,9127,-31581,8740,-31685,8352,-31786,7962,-31881,7572,-31971,7180,-32057,6787,-32138,6393,-32214,5998,-32285,5602,-32351,5206,-32413,4808,-32469,4410,-32521,4012,-32568,3612,-32610,3212,-32647,2812,-32679,2411,-32706,2010,-32728,1608,-32745,1207,-32758,805,-32765,403
 };
 
 
 
-int16_t tw512b[512] __attribute__((aligned(32))) = {
+static const int16_t tw512b[512] __attribute__((aligned(32))) = {
   0,32767,-403,32764,-805,32757,-1207,32744,-1608,32727,-2010,32705,-2411,32678,-2812,32646,-3212,32609,-3612,32567,-4012,32520,-4410,32468,-4808,32412,-5206,32350,-5602,32284,-5998,32213,-6393,32137,-6787,32056,-7180,31970,-7572,31880,-7962,31785,-8352,31684,-8740,31580,-9127,31470,-9512,31356,-9896,31236,-10279,31113,-10660,30984,-11039,30851,-11417,30713,-11793,30571,-12167,30424,-12540,30272,-12910,30116,-13279,29955,-13646,29790,-14010,29621,-14373,29446,-14733,29268,-15091,29085,-15447,28897,-15800,28706,-16151,28510,-16500,28309,-16846,28105,-17190,27896,-17531,27683,-17869,27466,-18205,27244,-18538,27019,-18868,26789,-19195,26556,-19520,26318,-19841,26077,-20160,25831,-20475,25582,-20788,25329,-21097,25072,-21403,24811,-21706,24546,-22005,24278,-22302,24006,-22595,23731,-22884,23452,-23170,23169,-23453,22883,-23732,22594,-24007,22301,-24279,22004,-24547,21705,-24812,21402,-25073,21096,-25330,20787,-25583,20474,-25832,20159,-26078,19840,-26319,19519,-26557,19194,-26790,18867,-27020,18537,-27245,18204,-27467,17868,-27684,17530,-27897,17189,-28106,16845,-28310,16499,-28511,16150,-28707,15799,-28898,15446,-29086,15090,-29269,14732,-29447,14372,-29622,14009,-29791,13645,-29956,13278,-30117,12909,-30273,12539,-30425,12166,-30572,11792,-30714,11416,-30852,11038,-30985,10659,-31114,10278,-31237,9895,-31357,9511,-31471,9126,-31581,8739,-31685,8351,-31786,7961,-31881,7571,-31971,7179,-32057,6786,-32138,6392,-32214,5997,-32285,5601,-32351,5205,-32413,4807,-32469,4409,-32521,4011,-32568,3611,-32610,3211,-32647,2811,-32679,2410,-32706,2009,-32728,1607,-32745,1206,-32758,804,-32765,402,-32767,0,-32765,-403,-32758,-805,-32745,-1207,-32728,-1608,-32706,-2010,-32679,-2411,-32647,-2812,-32610,-3212,-32568,-3612,-32521,-4012,-32469,-4410,-32413,-4808,-32351,-5206,-32285,-5602,-32214,-5998,-32138,-6393,-32057,-6787,-31971,-7180,-31881,-7572,-31786,-7962,-31685,-8352,-31581,-8740,-31471,-9127,-31357,-9512,-31237,-9896,-31114,-10279,-30985,-10660,-30852,-11039,-30714,-11417,-30572,-11793,-30425,-12167,-30273,-12540,-30117,-12910,-29956,-13279,-29791,-13646,-29622,-14010,-29447,-14373,-29269,-14733,-29086,-15091,-28898,-15447,-28707,-15800,-28511,-16151,-28310,-16500,-28106,-16846,-27897,-17190,-27684,-17531,-27467,-17869,-27245,-18205,-27020,-18538,-26790,-18868,-26557,-19195,-26319,-19520,-26078,-19841,-25832,-20160,-25583,-20475,-25330,-20788,-25073,-21097,-24812,-21403,-24547,-21706,-24279,-22005,-24007,-22302,-23732,-22595,-23453,-22884,-23170,-23170,-22884,-23453,-22595,-23732,-22302,-24007,-22005,-24279,-21706,-24547,-21403,-24812,-21097,-25073,-20788,-25330,-20475,-25583,-20160,-25832,-19841,-26078,-19520,-26319,-19195,-26557,-18868,-26790,-18538,-27020,-18205,-27245,-17869,-27467,-17531,-27684,-17190,-27897,-16846,-28106,-16500,-28310,-16151,-28511,-15800,-28707,-15447,-28898,-15091,-29086,-14733,-29269,-14373,-29447,-14010,-29622,-13646,-29791,-13279,-29956,-12910,-30117,-12540,-30273,-12167,-30425,-11793,-30572,-11417,-30714,-11039,-30852,-10660,-30985,-10279,-31114,-9896,-31237,-9512,-31357,-9127,-31471,-8740,-31581,-8352,-31685,-7962,-31786,-7572,-31881,-7180,-31971,-6787,-32057,-6393,-32138,-5998,-32214,-5602,-32285,-5206,-32351,-4808,-32413,-4410,-32469,-4012,-32521,-3612,-32568,-3212,-32610,-2812,-32647,-2411,-32679,-2010,-32706,-1608,-32728,-1207,-32745,-805,-32758,-403,-32765
 };
 
-int16_t tw512c[512] __attribute__((aligned(32))) = {
+static const int16_t tw512c[512] __attribute__((aligned(32))) = {
   0,32767,403,32764,805,32757,1207,32744,1608,32727,2010,32705,2411,32678,2812,32646,3212,32609,3612,32567,4012,32520,4410,32468,4808,32412,5206,32350,5602,32284,5998,32213,6393,32137,6787,32056,7180,31970,7572,31880,7962,31785,8352,31684,8740,31580,9127,31470,9512,31356,9896,31236,10279,31113,10660,30984,11039,30851,11417,30713,11793,30571,12167,30424,12540,30272,12910,30116,13279,29955,13646,29790,14010,29621,14373,29446,14733,29268,15091,29085,15447,28897,15800,28706,16151,28510,16500,28309,16846,28105,17190,27896,17531,27683,17869,27466,18205,27244,18538,27019,18868,26789,19195,26556,19520,26318,19841,26077,20160,25831,20475,25582,20788,25329,21097,25072,21403,24811,21706,24546,22005,24278,22302,24006,22595,23731,22884,23452,23170,23169,23453,22883,23732,22594,24007,22301,24279,22004,24547,21705,24812,21402,25073,21096,25330,20787,25583,20474,25832,20159,26078,19840,26319,19519,26557,19194,26790,18867,27020,18537,27245,18204,27467,17868,27684,17530,27897,17189,28106,16845,28310,16499,28511,16150,28707,15799,28898,15446,29086,15090,29269,14732,29447,14372,29622,14009,29791,13645,29956,13278,30117,12909,30273,12539,30425,12166,30572,11792,30714,11416,30852,11038,30985,10659,31114,10278,31237,9895,31357,9511,31471,9126,31581,8739,31685,8351,31786,7961,31881,7571,31971,7179,32057,6786,32138,6392,32214,5997,32285,5601,32351,5205,32413,4807,32469,4409,32521,4011,32568,3611,32610,3211,32647,2811,32679,2410,32706,2009,32728,1607,32745,1206,32758,804,32765,402,32767,0,32765,-403,32758,-805,32745,-1207,32728,-1608,32706,-2010,32679,-2411,32647,-2812,32610,-3212,32568,-3612,32521,-4012,32469,-4410,32413,-4808,32351,-5206,32285,-5602,32214,-5998,32138,-6393,32057,-6787,31971,-7180,31881,-7572,31786,-7962,31685,-8352,31581,-8740,31471,-9127,31357,-9512,31237,-9896,31114,-10279,30985,-10660,30852,-11039,30714,-11417,30572,-11793,30425,-12167,30273,-12540,30117,-12910,29956,-13279,29791,-13646,29622,-14010,29447,-14373,29269,-14733,29086,-15091,28898,-15447,28707,-15800,28511,-16151,28310,-16500,28106,-16846,27897,-17190,27684,-17531,27467,-17869,27245,-18205,27020,-18538,26790,-18868,26557,-19195,26319,-19520,26078,-19841,25832,-20160,25583,-20475,25330,-20788,25073,-21097,24812,-21403,24547,-21706,24279,-22005,24007,-22302,23732,-22595,23453,-22884,23170,-23170,22884,-23453,22595,-23732,22302,-24007,22005,-24279,21706,-24547,21403,-24812,21097,-25073,20788,-25330,20475,-25583,20160,-25832,19841,-26078,19520,-26319,19195,-26557,18868,-26790,18538,-27020,18205,-27245,17869,-27467,17531,-27684,17190,-27897,16846,-28106,16500,-28310,16151,-28511,15800,-28707,15447,-28898,15091,-29086,14733,-29269,14373,-29447,14010,-29622,13646,-29791,13279,-29956,12910,-30117,12540,-30273,12167,-30425,11793,-30572,11417,-30714,11039,-30852,10660,-30985,10279,-31114,9896,-31237,9512,-31357,9127,-31471,8740,-31581,8352,-31685,7962,-31786,7572,-31881,7180,-31971,6787,-32057,6393,-32138,5998,-32214,5602,-32285,5206,-32351,4808,-32413,4410,-32469,4012,-32521,3612,-32568,3212,-32610,2812,-32647,2411,-32679,2010,-32706,1608,-32728,1207,-32745,805,-32758,403,-32765
 };
 
@@ -3343,9 +2735,8 @@ void dft1024(int16_t *x,int16_t *y,unsigned char scale)
 
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void idft1024(int16_t *x,int16_t *y,unsigned char scale)
@@ -3399,9 +2790,8 @@ void idft1024(int16_t *x,int16_t *y,unsigned char scale)
 
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 int16_t tw2048[2048] __attribute__((aligned(32)));
@@ -3492,9 +2882,8 @@ void dft2048(int16_t *x,int16_t *y,unsigned char scale)
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void idft2048(int16_t *x,int16_t *y,unsigned char scale)
@@ -3582,9 +2971,8 @@ void idft2048(int16_t *x,int16_t *y,unsigned char scale)
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 int16_t tw4096[3*2*1024];
@@ -3640,9 +3028,8 @@ void dft4096(int16_t *x,int16_t *y,unsigned char scale)
 
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void idft4096(int16_t *x,int16_t *y,unsigned char scale)
@@ -3696,9 +3083,8 @@ void idft4096(int16_t *x,int16_t *y,unsigned char scale)
 
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 int16_t tw8192[2*4096] __attribute__((aligned(32)));
@@ -3789,9 +3175,8 @@ void dft8192(int16_t *x,int16_t *y,unsigned char scale)
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void idft8192(int16_t *x,int16_t *y,unsigned char scale)
@@ -3879,9 +3264,8 @@ void idft8192(int16_t *x,int16_t *y,unsigned char scale)
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 int16_t tw16384[3*2*4096] __attribute__((aligned(32)));
@@ -3937,9 +3321,8 @@ void dft16384(int16_t *x,int16_t *y,unsigned char scale)
 
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void idft16384(int16_t *x,int16_t *y,unsigned char scale)
@@ -3993,9 +3376,8 @@ void idft16384(int16_t *x,int16_t *y,unsigned char scale)
 
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 int16_t tw32768[2*16384] __attribute__((aligned(32)));
@@ -4086,9 +3468,8 @@ void dft32768(int16_t *x,int16_t *y,unsigned char scale)
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void idft32768(int16_t *x,int16_t *y,unsigned char scale)
@@ -4176,9 +3557,8 @@ void idft32768(int16_t *x,int16_t *y,unsigned char scale)
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 int16_t twa768[512],twb768[512];
@@ -4231,9 +3611,8 @@ void idft768(int16_t *input, int16_t *output, unsigned char scale)
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void dft768(int16_t *input, int16_t *output, unsigned char scale)
@@ -4295,9 +3674,8 @@ void dft768(int16_t *input, int16_t *output, unsigned char scale)
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 int16_t twa1536[1024],twb1536[1024];
 
@@ -4349,9 +3727,8 @@ void idft1536(int16_t *input, int16_t *output, unsigned char scale)
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void dft1536(int16_t *input, int16_t *output, unsigned char scale)
@@ -4413,9 +3790,8 @@ void dft1536(int16_t *input, int16_t *output, unsigned char scale)
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 int16_t twa3072[2048] __attribute__((aligned(32)));
@@ -4467,8 +3843,8 @@ void dft3072(int16_t *input, int16_t *output,unsigned char scale)
     }
   }
 
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void idft3072(int16_t *input, int16_t *output,unsigned char scale)
@@ -4517,8 +3893,8 @@ void idft3072(int16_t *input, int16_t *output,unsigned char scale)
     }
   }
 
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 
@@ -4579,9 +3955,8 @@ void idft6144(int16_t *input, int16_t *output,unsigned char scale)
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 
@@ -4643,9 +4018,8 @@ void dft6144(int16_t *input, int16_t *output,unsigned char scale)
       y128p+=16;
     }
   }
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 int16_t twa9216[6144] __attribute__((aligned(32)));
@@ -4721,9 +4095,8 @@ void dft12288(int16_t *input, int16_t *output,unsigned char scale)
       y128p+=16;
     }
   }
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void idft12288(int16_t *input, int16_t *output,unsigned char scale)
@@ -4780,8 +4153,8 @@ void idft12288(int16_t *input, int16_t *output,unsigned char scale)
       y128p+=16;
     }
   }
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 #ifndef MR_MAIN
   if (LOG_DUMPFLAG(DEBUG_DFT)) {
      LOG_M("idft12288out.m","out",output,6144,1,1);
@@ -4836,8 +4209,8 @@ void dft18432(int16_t *input, int16_t *output,unsigned char scale) {
       y128p+=16;
     }
   }
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void idft18432(int16_t *input, int16_t *output,unsigned char scale) {
@@ -4884,8 +4257,8 @@ void idft18432(int16_t *input, int16_t *output,unsigned char scale) {
       y128p+=16;
     }
   }
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 
@@ -4950,8 +4323,8 @@ void dft24576(int16_t *input, int16_t *output,unsigned char scale)
       y128p+=16;
     }
   }
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 #ifndef MR_MAIN
   if (LOG_DUMPFLAG(DEBUG_DFT)) {
      LOG_M("out.m","out",output,24576,1,1);
@@ -5010,8 +4383,8 @@ void idft24576(int16_t *input, int16_t *output,unsigned char scale)
       y128p+=16;
     }
   }
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 #ifndef MR_MAIN
   if (LOG_DUMPFLAG(DEBUG_DFT)) {
     LOG_M("idft24576out.m","out",output,24576,1,1);
@@ -5074,8 +4447,8 @@ void dft36864(int16_t *input, int16_t *output,uint8_t scale) {
       y128p+=16;
     }
   }
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 #ifndef MR_MAIN
   if (LOG_DUMPFLAG(DEBUG_DFT)) {
      LOG_M("out.m","out",output,36864,1,1);
@@ -5127,8 +4500,8 @@ void idft36864(int16_t *input, int16_t *output,uint8_t scale) {
       y128p+=16;
     }
   }
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 int16_t twa49152[32768] __attribute__((aligned(32)));
@@ -5179,9 +4552,8 @@ void dft49152(int16_t *input, int16_t *output,uint8_t scale) {
       y128p+=16;
     }
   }
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void idft49152(int16_t *input, int16_t *output,uint8_t scale) {
@@ -5228,8 +4600,8 @@ void idft49152(int16_t *input, int16_t *output,uint8_t scale) {
       y128p+=16;
     }
   }
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 int16_t tw65536[3*2*16384] __attribute__((aligned(32)));
@@ -5285,8 +4657,8 @@ void idft65536(int16_t *x,int16_t *y,unsigned char scale)
 
   }
 
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 int16_t twa73728[49152] __attribute__((aligned(32)));
@@ -5350,9 +4722,8 @@ void dft98304(int16_t *input, int16_t *output,uint8_t scale) {
       y128p+=16;
     }
   }
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 void idft98304(int16_t *input, int16_t *output,uint8_t scale) {
@@ -5399,8 +4770,8 @@ void idft98304(int16_t *input, int16_t *output,uint8_t scale) {
       y128p+=16;
     }
   }
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 }
 
  
@@ -5413,39 +4784,11 @@ static int16_t const W3_12s[8] __attribute__((aligned(32))) = {0, -32767, 0, -32
 static int16_t const W4_12s[8] __attribute__((aligned(32))) = {-16383, -28377, -16383, -28377, -16383, -28377, -16383, -28377};
 static int16_t const W6_12s[8] __attribute__((aligned(32))) = {-32767, 0, -32767, 0, -32767, 0, -32767, 0};
 
-simd_q15_t *W1_12=(simd_q15_t *)W1_12s;
-simd_q15_t *W2_12=(simd_q15_t *)W2_12s;
-simd_q15_t *W3_12=(simd_q15_t *)W3_12s;
-simd_q15_t *W4_12=(simd_q15_t *)W4_12s;
-simd_q15_t *W6_12=(simd_q15_t *)W6_12s;
-
-
-static simd_q15_t norm128;
-
-static inline void dft12f(simd_q15_t *x0,
-                          simd_q15_t *x1,
-                          simd_q15_t *x2,
-                          simd_q15_t *x3,
-                          simd_q15_t *x4,
-                          simd_q15_t *x5,
-                          simd_q15_t *x6,
-                          simd_q15_t *x7,
-                          simd_q15_t *x8,
-                          simd_q15_t *x9,
-                          simd_q15_t *x10,
-                          simd_q15_t *x11,
-                          simd_q15_t *y0,
-                          simd_q15_t *y1,
-                          simd_q15_t *y2,
-                          simd_q15_t *y3,
-                          simd_q15_t *y4,
-                          simd_q15_t *y5,
-                          simd_q15_t *y6,
-                          simd_q15_t *y7,
-                          simd_q15_t *y8,
-                          simd_q15_t *y9,
-                          simd_q15_t *y10,
-                          simd_q15_t *y11) __attribute__((always_inline));
+simd_q15_t *const W1_12=(simd_q15_t *)W1_12s;
+simd_q15_t *const W2_12=(simd_q15_t *)W2_12s;
+simd_q15_t *const W3_12=(simd_q15_t *)W3_12s;
+simd_q15_t *const W4_12=(simd_q15_t *)W4_12s;
+simd_q15_t *const W6_12=(simd_q15_t *)W6_12s;
 
 static inline void dft12f(simd_q15_t *x0,
                           simd_q15_t *x1,
@@ -5470,7 +4813,7 @@ static inline void dft12f(simd_q15_t *x0,
                           simd_q15_t *y8,
                           simd_q15_t *y9,
                           simd_q15_t *y10,
-                          simd_q15_t *y11)
+                          simd_q15_t *y11) __attribute__((always_inline))
 {
 
 
@@ -5582,9 +4925,8 @@ void dft12(int16_t *x,int16_t *y ,unsigned char scale_flag)
          &y128[10],
          &y128[11]);
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 static const int16_t W1_12s_256[16] __attribute__((aligned(32))) =
@@ -5612,38 +4954,11 @@ static const int16_t W4_12s_256[16] __attribute__((aligned(32))) = {-16383,
 static const int16_t W6_12s_256[16]
     __attribute__((aligned(32))) = {-32767, 0, -32767, 0, -32767, 0, -32767, 0, -32767, 0, -32767, 0, -32767, 0, -32767, 0};
 
-simd256_q15_t *W1_12_256=(simd256_q15_t *)W1_12s_256;
-simd256_q15_t *W2_12_256=(simd256_q15_t *)W2_12s_256;
-simd256_q15_t *W3_12_256=(simd256_q15_t *)W3_12s_256;
-simd256_q15_t *W4_12_256=(simd256_q15_t *)W4_12s_256;
-simd256_q15_t *W6_12_256=(simd256_q15_t *)W6_12s_256;
-
-
-
-static inline void dft12f_simd256(simd256_q15_t *x0,
-				  simd256_q15_t *x1,
-				  simd256_q15_t *x2,
-				  simd256_q15_t *x3,
-				  simd256_q15_t *x4,
-				  simd256_q15_t *x5,
-				  simd256_q15_t *x6,
-				  simd256_q15_t *x7,
-				  simd256_q15_t *x8,
-				  simd256_q15_t *x9,
-				  simd256_q15_t *x10,
-				  simd256_q15_t *x11,
-				  simd256_q15_t *y0,
-				  simd256_q15_t *y1,
-				  simd256_q15_t *y2,
-				  simd256_q15_t *y3,
-				  simd256_q15_t *y4,
-				  simd256_q15_t *y5,
-				  simd256_q15_t *y6,
-				  simd256_q15_t *y7,
-				  simd256_q15_t *y8,
-				  simd256_q15_t *y9,
-				  simd256_q15_t *y10,
-				  simd256_q15_t *y11) __attribute__((always_inline));
+simd256_q15_t * const W1_12_256=(simd256_q15_t *)W1_12s_256;
+simd256_q15_t * const W2_12_256=(simd256_q15_t *)W2_12s_256;
+simd256_q15_t * const W3_12_256=(simd256_q15_t *)W3_12s_256;
+simd256_q15_t * const W4_12_256=(simd256_q15_t *)W4_12s_256;
+simd256_q15_t * const W6_12_256=(simd256_q15_t *)W6_12s_256;
 
 static inline void dft12f_simd256(simd256_q15_t *x0,
 				  simd256_q15_t *x1,
@@ -5668,7 +4983,7 @@ static inline void dft12f_simd256(simd256_q15_t *x0,
 				  simd256_q15_t *y8,
 				  simd256_q15_t *y9,
 				  simd256_q15_t *y10,
-				  simd256_q15_t *y11)
+				  simd256_q15_t *y11) __attribute__((always_inline))
 {
 
 
@@ -5759,33 +5074,32 @@ void dft12_simd256(int16_t *x,int16_t *y)
 
   simd256_q15_t *x256 = (simd256_q15_t *)x,*y256 = (simd256_q15_t *)y;
   dft12f_simd256(&x256[0],
-		 &x256[1],
-		 &x256[2],
-		 &x256[3],
-		 &x256[4],
-		 &x256[5],
-		 &x256[6],
-		 &x256[7],
-		 &x256[8],
-		 &x256[9],
-		 &x256[10],
-		 &x256[11],
-		 &y256[0],
-		 &y256[1],
-		 &y256[2],
-		 &y256[3],
-		 &y256[4],
-		 &y256[5],
-		 &y256[6],
-		 &y256[7],
-		 &y256[8],
-		 &y256[9],
-		 &y256[10],
-		 &y256[11]);
-  
-  _mm_empty();
-  _m_empty();
-
+                 &x256[1],
+                 &x256[2],
+                 &x256[3],
+                 &x256[4],
+                 &x256[5],
+                 &x256[6],
+                 &x256[7],
+                 &x256[8],
+                 &x256[9],
+                 &x256[10],
+                 &x256[11],
+                 &y256[0],
+                 &y256[1],
+                 &y256[2],
+                 &y256[3],
+                 &y256[4],
+                 &y256[5],
+                 &y256[6],
+                 &y256[7],
+                 &y256[8],
+                 &y256[9],
+                 &y256[10],
+                 &y256[11]);
+
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 static int16_t tw24[88]__attribute__((aligned(32)));
@@ -5871,16 +5185,15 @@ void dft24(int16_t *x,int16_t *y,unsigned char scale_flag)
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[1]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[1]);
 
     for (i=0; i<24; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 static int16_t twa36[88]__attribute__((aligned(32)));
@@ -5994,16 +5307,15 @@ void dft36(int16_t *x,int16_t *y,unsigned char scale_flag)
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[2]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[2]);
 
     for (i=0; i<36; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 static int16_t twa48[88]__attribute__((aligned(32)));
@@ -6155,16 +5467,15 @@ void dft48(int16_t *x, int16_t *y,unsigned char scale_flag)
   }
 
   if (scale_flag == 1) {
-    norm128 = set1_int16(dft_norm_table[3]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[3]);
 
     for (i=0; i<48; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 static int16_t twa60[88]__attribute__((aligned(32)));
@@ -6339,7 +5650,7 @@ void dft60(int16_t *x,int16_t *y,unsigned char scale)
   }
 
   if (scale == 1) {
-    norm128 = set1_int16(dft_norm_table[4]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[4]);
 
     for (i=0; i<60; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
@@ -6347,9 +5658,8 @@ void dft60(int16_t *x,int16_t *y,unsigned char scale)
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 static int16_t tw72[280]__attribute__((aligned(32)));
@@ -6384,16 +5694,15 @@ void dft72(int16_t *x,int16_t *y,unsigned char scale_flag)
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[5]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[5]);
 
     for (i=0; i<72; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 static int16_t tw96[376]__attribute__((aligned(32)));
@@ -6430,16 +5739,15 @@ void dft96(int16_t *x,int16_t *y,unsigned char scale_flag)
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[6]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[6]);
 
     for (i=0; i<96; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 static int16_t twa108[280]__attribute__((aligned(32)));
@@ -6481,16 +5789,15 @@ void dft108(int16_t *x,int16_t *y,unsigned char scale_flag)
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[7]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[7]);
 
     for (i=0; i<108; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 static int16_t tw120[472]__attribute__((aligned(32)));
@@ -6523,16 +5830,15 @@ void dft120(int16_t *x,int16_t *y, unsigned char scale_flag)
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[8]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[8]);
 
     for (i=0; i<120; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 static int16_t twa144[376]__attribute__((aligned(32)));
@@ -6574,16 +5880,15 @@ void dft144(int16_t *x,int16_t *y,unsigned char scale_flag)
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[9]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[9]);
 
     for (i=0; i<144; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 static int16_t twa180[472]__attribute__((aligned(32)));
@@ -6626,16 +5931,15 @@ void dft180(int16_t *x,int16_t *y,unsigned char scale_flag)
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[10]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[10]);
 
     for (i=0; i<180; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 static int16_t twa192[376]__attribute__((aligned(32)));
@@ -6685,16 +5989,15 @@ void dft192(int16_t *x,int16_t *y,unsigned char scale_flag)
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[11]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[11]);
 
     for (i=0; i<192; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 static int16_t twa216[568]__attribute__((aligned(32)));
@@ -6737,16 +6040,15 @@ void dft216(int16_t *x,int16_t *y,unsigned char scale_flag)
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[12]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[12]);
 
     for (i=0; i<216; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 static int16_t twa240[472]__attribute__((aligned(32)));
@@ -6796,16 +6098,15 @@ void dft240(int16_t *x,int16_t *y,unsigned char scale_flag)
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[13]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[13]);
 
     for (i=0; i<240; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 static int16_t twa288[760]__attribute__((aligned(32)));
@@ -6848,16 +6149,15 @@ void dft288(int16_t *x,int16_t *y,unsigned char scale_flag)
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[14]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[14]);
 
     for (i=0; i<288; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 static int16_t twa300[472]__attribute__((aligned(32)));
@@ -6914,16 +6214,15 @@ void dft300(int16_t *x,int16_t *y,unsigned char scale_flag)
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[15]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[15]);
 
     for (i=0; i<300; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 static int16_t twa324[107*2*4];
@@ -6965,16 +6264,15 @@ void dft324(int16_t *x,int16_t *y,unsigned char scale_flag)  // 108 x 3
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[14]);
+   simd_q15_t norm128 = set1_int16(dft_norm_table[14]);
 
     for (i=0; i<324; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 static int16_t twa360[119*2*4];
@@ -7016,16 +6314,15 @@ void dft360(int16_t *x,int16_t *y,unsigned char scale_flag)  // 120 x 3
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[14]);
+   simd_q15_t norm128 = set1_int16(dft_norm_table[14]);
 
     for (i=0; i<360; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 static int16_t twa384[95*2*4];
@@ -7074,16 +6371,15 @@ void dft384(int16_t *x,int16_t *y,unsigned char scale_flag)  // 96 x 4
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(16384);//dft_norm_table[13]);
+    simd_q15_t norm128 = set1_int16(16384);//dft_norm_table[13]);
 
     for (i=0; i<384; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 static int16_t twa432[107*2*4];
@@ -7131,16 +6427,15 @@ void dft432(int16_t *x,int16_t *y,unsigned char scale_flag)  // 108 x 4
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(16384);//dft_norm_table[13]);
+    simd_q15_t norm128 = set1_int16(16384);//dft_norm_table[13]);
 
     for (i=0; i<432; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 };
 static int16_t twa480[119*2*4];
 static int16_t twb480[119*2*4];
@@ -7188,16 +6483,15 @@ void dft480(int16_t *x,int16_t *y,unsigned char scale_flag)  // 120 x 4
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(16384);//dft_norm_table[13]);
+   simd_q15_t norm128 = set1_int16(16384);//dft_norm_table[13]);
 
     for (i=0; i<480; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 
@@ -7240,16 +6534,15 @@ void dft540(int16_t *x,int16_t *y,unsigned char scale_flag)  // 180 x 3
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[14]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[14]);
 
     for (i=0; i<540; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 static int16_t twa576[191*2*4];
@@ -7292,15 +6585,15 @@ void dft576(int16_t *x,int16_t *y,unsigned char scale_flag)  // 192 x 3
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[14]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[14]);
 
     for (i=0; i<576; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 
@@ -7336,15 +6629,15 @@ void dft600(int16_t *x,int16_t *y,unsigned char scale_flag)  // 300 x 2
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(ONE_OVER_SQRT2_Q15);
+    simd_q15_t norm128 = set1_int16(ONE_OVER_SQRT2_Q15);
 
     for (i=0; i<600; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 
@@ -7387,16 +6680,15 @@ void dft648(int16_t *x,int16_t *y,unsigned char scale_flag)  // 216 x 3
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[14]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[14]);
 
     for (i=0; i<648; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 
@@ -7447,16 +6739,15 @@ void dft720(int16_t *x,int16_t *y,unsigned char scale_flag)  // 180 x 4
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(16384);//dft_norm_table[13]);
+    simd_q15_t norm128 = set1_int16(16384);//dft_norm_table[13]);
 
     for (i=0; i<720; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 static int16_t twa768p[191*2*4];
@@ -7505,16 +6796,15 @@ void dft768p(int16_t *x,int16_t *y,unsigned char scale_flag) { // 192x 4;
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(16384);//dft_norm_table[13]);
+    simd_q15_t norm128 = set1_int16(16384);//dft_norm_table[13]);
 
     for (i=0; i<768; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 static int16_t twa384i[256];
@@ -7567,9 +6857,8 @@ void idft384(int16_t *input, int16_t *output, unsigned char scale)
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 
@@ -7612,16 +6901,15 @@ void dft864(int16_t *x,int16_t *y,unsigned char scale_flag)  // 288 x 3
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[14]);
+   simd_q15_t norm128 = set1_int16(dft_norm_table[14]);
 
     for (i=0; i<864; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 static int16_t twa900[299*2*4];
@@ -7663,16 +6951,15 @@ void dft900(int16_t *x,int16_t *y,unsigned char scale_flag)  // 300 x 3
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[14]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[14]);
 
     for (i=0; i<900; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 
@@ -7723,16 +7010,15 @@ void dft960(int16_t *x,int16_t *y,unsigned char scale_flag)  // 240 x 4
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(16384);//dft_norm_table[13]);
+    simd_q15_t norm128 = set1_int16(16384);//dft_norm_table[13]);
 
     for (i=0; i<960; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 
@@ -7775,16 +7061,15 @@ void dft972(int16_t *x,int16_t *y,unsigned char scale_flag)  // 324 x 3
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[14]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[14]);
 
     for (i=0; i<972; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 static int16_t twa1080[359*2*4];
@@ -7826,16 +7111,15 @@ void dft1080(int16_t *x,int16_t *y,unsigned char scale_flag)  // 360 x 3
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[14]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[14]);
 
     for (i=0; i<1080; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 static int16_t twa1152[287*2*4];
@@ -7885,15 +7169,15 @@ void dft1152(int16_t *x,int16_t *y,unsigned char scale_flag)  // 288 x 4
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(16384);//dft_norm_table[13]);
+    simd_q15_t norm128 = set1_int16(16384);//dft_norm_table[13]);
 
     for (i=0; i<1152; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 int16_t twa1200[4784];
@@ -7943,15 +7227,14 @@ void dft1200(int16_t *x,int16_t *y,unsigned char scale_flag)
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(16384);//dft_norm_table[13]);
+   simd_q15_t norm128 = set1_int16(16384);//dft_norm_table[13]);
     for (i=0; i<1200; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 
@@ -7995,16 +7278,15 @@ void dft1296(int16_t *x,int16_t *y,unsigned char scale_flag) //432 * 3
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[14]);
+   simd_q15_t norm128 = set1_int16(dft_norm_table[14]);
 
     for (i=0; i<1296; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 
@@ -8047,16 +7329,15 @@ void dft1440(int16_t *x,int16_t *y,unsigned char scale_flag)  // 480 x 3
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[14]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[14]);
 
     for (i=0; i<1440; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 static int16_t twa1500[2392]__attribute__((aligned(32)));
@@ -8113,16 +7394,15 @@ void dft1500(int16_t *x,int16_t *y,unsigned char scale_flag)
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[15]);
+   simd_q15_t norm128 = set1_int16(dft_norm_table[15]);
 
     for (i=0; i<1500; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 static int16_t twa1620[539*2*4];
@@ -8164,16 +7444,15 @@ void dft1620(int16_t *x,int16_t *y,unsigned char scale_flag)  // 540 x 3
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[14]);
+   simd_q15_t norm128 = set1_int16(dft_norm_table[14]);
 
     for (i=0; i<1620; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 static int16_t twa1728[575*2*4];
@@ -8215,16 +7494,15 @@ void dft1728(int16_t *x,int16_t *y,unsigned char scale_flag)  // 576 x 3
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[14]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[14]);
 
     for (i=0; i<1728; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 static int16_t twa1800[599*2*4];
@@ -8266,16 +7544,15 @@ void dft1800(int16_t *x,int16_t *y,unsigned char scale_flag)  // 600 x 3
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[14]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[14]);
 
     for (i=0; i<1800; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 static int16_t twa1920[479*2*4];
@@ -8324,15 +7601,14 @@ void dft1920(int16_t *x,int16_t *y,unsigned char scale_flag)  // 480 x 4
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[13]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[13]);
     for (i=0; i<1920; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 static int16_t twa1944[647*2*4];
@@ -8374,16 +7650,15 @@ void dft1944(int16_t *x,int16_t *y,unsigned char scale_flag)  // 648 x 3
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[14]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[14]);
 
     for (i=0; i<1944; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 static int16_t twa2160[719*2*4];
@@ -8425,16 +7700,15 @@ void dft2160(int16_t *x,int16_t *y,unsigned char scale_flag)  // 720 x 3
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[14]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[14]);
 
     for (i=0; i<2160; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 static int16_t twa2304[767*2*4];
@@ -8476,16 +7750,15 @@ void dft2304(int16_t *x,int16_t *y,unsigned char scale_flag)  // 768 x 3
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[14]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[14]);
 
     for (i=0; i<2304; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 static int16_t twa2400[599*2*4];
@@ -8535,15 +7808,14 @@ void dft2400(int16_t *x,int16_t *y,unsigned char scale_flag)  // 600 x 4
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[13]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[13]);
     for (i=0; i<2400; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 static int16_t twa2592[863*2*4];
@@ -8585,16 +7857,15 @@ void dft2592(int16_t *x,int16_t *y,unsigned char scale_flag)  // 864 x 3
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[14]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[14]);
 
     for (i=0; i<2592; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 static int16_t twa2700[899*2*4];
@@ -8636,16 +7907,15 @@ void dft2700(int16_t *x,int16_t *y,unsigned char scale_flag)  // 900 x 3
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[14]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[14]);
 
     for (i=0; i<2700; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 static int16_t twa2880[959*2*4];
@@ -8687,16 +7957,15 @@ void dft2880(int16_t *x,int16_t *y,unsigned char scale_flag)  // 960 x 3
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[14]);
+   simd_q15_t  norm128 = set1_int16(dft_norm_table[14]);
 
     for (i=0; i<2880; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 static int16_t twa2916[971*2*4];
@@ -8738,16 +8007,15 @@ void dft2916(int16_t *x,int16_t *y,unsigned char scale_flag)  // 972 x 3
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[14]);
+    simd_q15_t norm128 = set1_int16(dft_norm_table[14]);
 
     for (i=0; i<2916; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 static int16_t twa3000[599*8]__attribute__((aligned(32)));
@@ -8804,16 +8072,15 @@ void dft3000(int16_t *x,int16_t *y,unsigned char scale_flag) // 600 * 5
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[15]);
+   simd_q15_t norm128 = set1_int16(dft_norm_table[15]);
 
     for (i=0; i<3000; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 }
 
 static int16_t twa3240[1079*2*4];
@@ -8855,16 +8122,15 @@ void dft3240(int16_t *x,int16_t *y,unsigned char scale_flag)  // 1080 x 3
   }
 
   if (scale_flag==1) {
-    norm128 = set1_int16(dft_norm_table[14]);
+   simd_q15_t norm128 = set1_int16(dft_norm_table[14]);
 
     for (i=0; i<3240; i++) {
       y128[i] = mulhi_int16(y128[i],norm128);
     }
   }
 
-  _mm_empty();
-  _m_empty();
-
+  simde_mm_empty();
+  simde_m_empty();
 };
 
 void init_rad4(int N,int16_t *tw) {
@@ -9318,13 +8584,8 @@ int main(int argc, char**argv)
     ((int16_t *)&tw3)[7] = 0;
  */
     for (i=0;i<300;i++) {
-#if defined(__x86_64__) || defined(__i386__)
       x[i] = simde_mm256_set1_epi32(taus());
       x[i] = simde_mm256_srai_epi16(x[i],4);
-#elif defined(__arm__) || defined(__aarch64__)
-      x[i] = (int16x8_t)vdupq_n_s32(taus());
-      x[i] = vshrq_n_s16(x[i],4);
-#endif // defined(__x86_64__) || defined(__i386__)
     }
       /*
     bfly2_tw1(x,x+1,y,y+1);
@@ -10059,3 +9320,4 @@ int main(int argc, char**argv)
 
 
 #endif
+#endif
diff --git a/openair1/PHY/TOOLS/oai_dfts_neon.c b/openair1/PHY/TOOLS/oai_dfts_neon.c
new file mode 100644
index 0000000000000000000000000000000000000000..fa183893d497937bf03726273b6d4467c2148320
--- /dev/null
+++ b/openair1/PHY/TOOLS/oai_dfts_neon.c
@@ -0,0 +1,8128 @@
+/*
+ * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The OpenAirInterface Software Alliance licenses this file to You under
+ * the OAI Public License, Version 1.1  (the "License"); you may not use this file
+ * except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.openairinterface.org/?page_id=698
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *-------------------------------------------------------------------------------
+ * For more information about the OpenAirInterface (OAI) Software Alliance:
+ *      contact@openairinterface.org
+ */
+#if defined(__arm__) || defined(__aarch64__) 
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include <stdint.h>
+#include <math.h>
+#include <pthread.h>
+#include <execinfo.h>
+
+#ifndef M_PI
+#define M_PI 3.14159265358979323846
+#endif
+#define OAIDFTS_MAIN
+//#ifndef MR_MAIN
+//#include "PHY/defs_common.h"
+//#include "PHY/impl_defs_top.h"
+//#else
+#include "time_meas.h"
+#include "LOG/log.h"
+#define debug_msg
+#define ONE_OVER_SQRT2_Q15 23170
+
+//int oai_exit=0;
+//#endif
+
+#define ONE_OVER_SQRT3_Q15 18919
+
+#include "../sse_intrin.h"
+
+#include "assertions.h"
+
+#include "tools_defs.h"
+
+#define print_shorts(s,x) printf("%s %d,%d,%d,%d,%d,%d,%d,%d\n",s,(x)[0],(x)[1],(x)[2],(x)[3],(x)[4],(x)[5],(x)[6],(x)[7])
+#define print_shorts256(s,x) printf("%s %d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d\n",s,(x)[0],(x)[1],(x)[2],(x)[3],(x)[4],(x)[5],(x)[6],(x)[7],(x)[8],(x)[9],(x)[10],(x)[11],(x)[12],(x)[13],(x)[14],(x)[15])
+
+#define print_ints(s,x) printf("%s %d %d %d %d\n",s,(x)[0],(x)[1],(x)[2],(x)[3])
+
+
+const static int16_t conjugatedft[32] __attribute__((aligned(32))) = {-1,1,-1,1,-1,1,-1,1,-1,1,-1,1,-1,1,-1,1,-1,1};
+
+
+const static int16_t reflip[32]  __attribute__((aligned(32))) = {1,-1,1,-1,1,-1,1,-1,1,-1,1,-1,1,-1,1,-1};
+
+
+static inline void cmac(int16x8_t a,int16x8_t b, int32x4_t *re32, int32x4_t *im32) __attribute__((always_inline));
+static inline void cmac(int16x8_t a,int16x8_t b, int32x4_t *re32, int32x4_t *im32)
+{
+
+  
+  int32x4_t ab_re0,ab_re1,ab_im0,ab_im1;
+  int16x8_t bflip = vrev32q_s16(b);
+  int16x8_t bconj = vmulq_s16(b,*(int16x8_t *)reflip);
+
+  ab_re0 = vmull_s16(((int16x4_t*)&a)[0],((int16x4_t*)&bconj)[0]);
+  ab_re1 = vmull_s16(((int16x4_t*)&a)[1],((int16x4_t*)&bconj)[1]);
+  ab_im0 = vmull_s16(((int16x4_t*)&a)[0],((int16x4_t*)&bflip)[0]);
+  ab_im1 = vmull_s16(((int16x4_t*)&a)[1],((int16x4_t*)&bflip)[1]);
+  *re32 = vqaddq_s32(*re32,vcombine_s32(vpadd_s32(((int32x2_t*)&ab_re0)[0],((int32x2_t*)&ab_re0)[1]),
+					vpadd_s32(((int32x2_t*)&ab_re1)[0],((int32x2_t*)&ab_re1)[1])));
+  *im32 = vqaddq_s32(*im32,vcombine_s32(vpadd_s32(((int32x2_t*)&ab_im0)[0],((int32x2_t*)&ab_im0)[1]),
+					vpadd_s32(((int32x2_t*)&ab_im1)[0],((int32x2_t*)&ab_im1)[1])));
+}
+
+static inline void cmacc(int16x8_t a,int16x8_t b, int32x4_t *re32, int32x4_t *im32) __attribute__((always_inline));
+static inline void cmacc(int16x8_t a,int16x8_t b, int32x4_t *re32, int32x4_t *im32)
+{
+  int32x4_t ab_re0,ab_re1,ab_im0,ab_im1;
+  int16x8_t bconj = vmulq_s16(b,*(int16x8_t *)reflip);
+  int16x8_t bflip = vrev32q_s16(bconj);
+
+  ab_re0 = vmull_s16(((int16x4_t*)&a)[0],((int16x4_t*)&b)[0]);
+  ab_re1 = vmull_s16(((int16x4_t*)&a)[1],((int16x4_t*)&b)[1]);
+  ab_im0 = vmull_s16(((int16x4_t*)&a)[0],((int16x4_t*)&bflip)[0]);
+  ab_im1 = vmull_s16(((int16x4_t*)&a)[1],((int16x4_t*)&bflip)[1]);
+  *re32 = vqaddq_s32(*re32,vcombine_s32(vpadd_s32(((int32x2_t*)&ab_re0)[0],((int32x2_t*)&ab_re0)[1]),
+					vpadd_s32(((int32x2_t*)&ab_re1)[0],((int32x2_t*)&ab_re1)[1])));
+  *im32 = vqaddq_s32(*im32,vcombine_s32(vpadd_s32(((int32x2_t*)&ab_im0)[0],((int32x2_t*)&ab_im0)[1]),
+					vpadd_s32(((int32x2_t*)&ab_im1)[0],((int32x2_t*)&ab_im1)[1])));
+
+}
+
+static inline void cmult(int16x8_t a,int16x8_t b, int32x4_t *re32, int32x4_t *im32) __attribute__((always_inline));
+static inline void cmult(int16x8_t a,int16x8_t b, int32x4_t *re32, int32x4_t *im32)
+{
+  int32x4_t ab_re0,ab_re1,ab_im0,ab_im1;
+  int16x8_t bflip = vrev32q_s16(b);
+  int16x8_t bconj = vmulq_s16(b,*(int16x8_t *)reflip);
+  int16x4_t al,ah,bcl,bch,bfl,bfh;
+  int32x2_t abr0l,abr0h,abr1l,abr1h,abi0l,abi0h,abi1l,abi1h;
+
+  al  = vget_low_s16(a);      ah = vget_high_s16(a);
+  bcl = vget_low_s16(bconj);  bch = vget_high_s16(bconj);
+  bfl = vget_low_s16(bflip);  bfh = vget_high_s16(bflip);
+
+  ab_re0 = vmull_s16(al,bcl);
+  ab_re1 = vmull_s16(ah,bch);
+  ab_im0 = vmull_s16(al,bfl);
+  ab_im1 = vmull_s16(ah,bfh);
+  abr0l = vget_low_s32(ab_re0); abr0h = vget_high_s32(ab_re0);
+  abr1l = vget_low_s32(ab_re1); abr1h = vget_high_s32(ab_re1);
+  abi0l = vget_low_s32(ab_im0); abi0h = vget_high_s32(ab_im0);
+  abi1l = vget_low_s32(ab_im1); abi1h = vget_high_s32(ab_im1);
+
+  *re32 = vcombine_s32(vpadd_s32(abr0l,abr0h),
+                       vpadd_s32(abr1l,abr1h));
+  *im32 = vcombine_s32(vpadd_s32(abi0l,abi0h),
+                       vpadd_s32(abi1l,abi1h));
+}
+
+static inline void cmultc(int16x8_t a,int16x8_t b, int32x4_t *re32, int32x4_t *im32) __attribute__((always_inline));
+
+static inline void cmultc(int16x8_t a,int16x8_t b, int32x4_t *re32, int32x4_t *im32)
+{
+  int32x4_t ab_re0,ab_re1,ab_im0,ab_im1;
+  int16x8_t bconj = vmulq_s16(b,*(int16x8_t *)reflip);
+  int16x8_t bflip = vrev32q_s16(bconj);
+  int16x4_t al,ah,bl,bh,bfl,bfh; 
+  int32x2_t abr0l,abr0h,abr1l,abr1h,abi0l,abi0h,abi1l,abi1h;
+  al  = vget_low_s16(a);     ah = vget_high_s16(a);
+  bl  = vget_low_s16(b);     bh = vget_high_s16(b);
+  bfl = vget_low_s16(bflip); bfh = vget_high_s16(bflip);
+
+  ab_re0 = vmull_s16(al,bl);
+  ab_re1 = vmull_s16(ah,bh);
+  ab_im0 = vmull_s16(al,bfl);
+  ab_im1 = vmull_s16(ah,bfh);
+
+  abr0l = vget_low_s32(ab_re0); abr0h = vget_high_s32(ab_re0);
+  abr1l = vget_low_s32(ab_re1); abr1h = vget_high_s32(ab_re1);
+  abi0l = vget_low_s32(ab_im0); abi0h = vget_high_s32(ab_im0);
+  abi1l = vget_low_s32(ab_im1); abi1h = vget_high_s32(ab_im1);
+
+  *re32 = vcombine_s32(vpadd_s32(abr0l,abr0h),
+		       vpadd_s32(abr1l,abr1h));
+  *im32 = vcombine_s32(vpadd_s32(abi0l,abi0h),
+		       vpadd_s32(abi1l,abi1h));
+
+}
+
+
+static inline int16x8_t cpack(int32x4_t xre,int32x4_t xim) __attribute__((always_inline));
+
+static inline int16x8_t cpack(int32x4_t xre,int32x4_t xim)
+{
+  int32x4x2_t xtmp;
+
+  xtmp = vzipq_s32(xre,xim);
+  return(vcombine_s16(vqshrn_n_s32(xtmp.val[0],15),vqshrn_n_s32(xtmp.val[1],15)));
+
+}
+
+
+static inline void packed_cmult(int16x8_t a,int16x8_t b, int16x8_t *c) __attribute__((always_inline));
+
+static inline void packed_cmult(int16x8_t a,int16x8_t b, int16x8_t *c)
+{
+
+  int32x4_t cre,cim;
+  cmult(a,b,&cre,&cim);
+  *c = cpack(cre,cim);
+
+}
+
+
+static inline void packed_cmultc(int16x8_t a,int16x8_t b, int16x8_t *c) __attribute__((always_inline));
+
+static inline void packed_cmultc(int16x8_t a,int16x8_t b, int16x8_t *c)
+{
+
+  int32x4_t cre,cim;
+
+  cmultc(a,b,&cre,&cim);
+  *c = cpack(cre,cim);
+
+}
+
+static inline int16x8_t packed_cmult2(int16x8_t a,int16x8_t b,  int16x8_t b2) __attribute__((always_inline));
+
+static inline int16x8_t packed_cmult2(int16x8_t a,int16x8_t b,  int16x8_t b2)
+{
+
+  
+
+  int32x4_t ab_re0,ab_re1,ab_im0,ab_im1,cre,cim;
+  
+  ab_re0 = vmull_s16(((int16x4_t*)&a)[0],((int16x4_t*)&b)[0]);
+  ab_re1 = vmull_s16(((int16x4_t*)&a)[1],((int16x4_t*)&b)[1]);
+  ab_im0 = vmull_s16(((int16x4_t*)&a)[0],((int16x4_t*)&b2)[0]);
+  ab_im1 = vmull_s16(((int16x4_t*)&a)[1],((int16x4_t*)&b2)[1]);
+  cre = vcombine_s32(vpadd_s32(((int32x2_t*)&ab_re0)[0],((int32x2_t*)&ab_re0)[1]),
+		     vpadd_s32(((int32x2_t*)&ab_re1)[0],((int32x2_t*)&ab_re1)[1]));
+  cim = vcombine_s32(vpadd_s32(((int32x2_t*)&ab_im0)[0],((int32x2_t*)&ab_im0)[1]),
+		     vpadd_s32(((int32x2_t*)&ab_im1)[0],((int32x2_t*)&ab_im1)[1]));
+  return(cpack(cre,cim));
+
+}
+
+const static int16_t W0s[16]__attribute__((aligned(32))) = {32767,0,32767,0,32767,0,32767,0,32767,0,32767,0,32767,0,32767,0};
+
+const static int16_t W13s[16]__attribute__((aligned(32))) = {-16384,-28378,-16384,-28378,-16384,-28378,-16384,-28378,-16384,-28378,-16384,-28378,-16384,-28378,-16384,-28378};
+const static int16_t W23s[16]__attribute__((aligned(32))) = {-16384,28378,-16384,28378,-16384,28378,-16384,28378,-16384,28378,-16384,28378,-16384,28378,-16384,28378};
+
+const static int16_t W15s[16]__attribute__((aligned(32))) = {10126,-31163,10126,-31163,10126,-31163,10126,-31163,10126,-31163,10126,-31163,10126,-31163,10126,-31163};
+const static int16_t W25s[16]__attribute__((aligned(32))) = {-26509,-19260,-26509,-19260,-26509,-19260,-26509,-19260,-26509,-19260,-26509,-19260,-26509,-19260,-26509,-19260};
+const static int16_t W35s[16]__attribute__((aligned(32))) = {-26510,19260,-26510,19260,-26510,19260,-26510,19260,-26510,19260,-26510,19260,-26510,19260,-26510,19260};
+const static int16_t W45s[16]__attribute__((aligned(32))) = {10126,31163,10126,31163,10126,31163,10126,31163,10126,31163,10126,31163,10126,31163,10126,31163};
+
+int16x8_t *W0  = (int16x8_t *)W0s;
+int16x8_t *W13 = (int16x8_t *)W13s;
+int16x8_t *W23 = (int16x8_t *)W23s;
+int16x8_t *W15 = (int16x8_t *)W15s;
+int16x8_t *W25 = (int16x8_t *)W25s;
+int16x8_t *W35 = (int16x8_t *)W35s;
+int16x8_t *W45 = (int16x8_t *)W45s;
+const static int16_t dft_norm_table[16] = {9459,  //12
+					   6689,//24
+					   5461,//36
+					   4729,//482
+					   4230,//60
+					   23170,//72
+					   3344,//96
+					   3153,//108
+					   2991,//120
+					   18918,//sqrt(3),//144
+					   18918,//sqrt(3),//180
+					   16384,//2, //192
+					   18918,//sqrt(3), // 216
+					   16384,//2, //240
+					   18918,//sqrt(3), // 288
+					   14654
+}; //sqrt(5) //300
+
+
+static inline void bfly2(int16x8_t *x0, int16x8_t *x1,int16x8_t *y0, int16x8_t *y1,int16x8_t *tw)__attribute__((always_inline));
+
+static inline void bfly2(int16x8_t *x0, int16x8_t *x1,int16x8_t *y0, int16x8_t *y1,int16x8_t *tw)
+{
+
+  int32x4_t x0r_2,x0i_2,x1r_2,x1i_2,dy0r,dy1r,dy0i,dy1i;
+
+  cmult(*(x0),*(W0),&x0r_2,&x0i_2);
+  cmult(*(x1),*(tw),&x1r_2,&x1i_2);
+
+  dy0r = vqaddq_s32(x0r_2,x1r_2);
+  dy1r = vqsubq_s32(x0r_2,x1r_2);
+  dy0i = vqaddq_s32(x0i_2,x1i_2);
+  dy1i = vqsubq_s32(x0i_2,x1i_2);
+
+  *y0 = cpack(dy0r,dy0i);
+  *y1 = cpack(dy1r,dy1i);
+}
+
+
+static inline void bfly2_tw1(int16x8_t *x0, int16x8_t *x1, int16x8_t *y0, int16x8_t *y1)__attribute__((always_inline));
+
+static inline void bfly2_tw1(int16x8_t *x0, int16x8_t *x1, int16x8_t *y0, int16x8_t *y1)
+{
+
+  *y0  = vqaddq_s16(*x0,*x1);
+  *y1  = vqsubq_s16(*x0,*x1);
+
+}
+static inline void bfly2_16(int16x8_t *x0, int16x8_t *x1, int16x8_t *y0, int16x8_t *y1, int16x8_t *tw, int16x8_t *twb)__attribute__((always_inline));
+
+static inline void bfly2_16(int16x8_t *x0, int16x8_t *x1, int16x8_t *y0, int16x8_t *y1, int16x8_t *tw, int16x8_t *twb)
+{
+
+  *y0  = vqaddq_s16(*x0,*x1);
+  *y1  = vqsubq_s16(*x0,*x1);
+
+}
+static inline void ibfly2(int16x8_t *x0, int16x8_t *x1,int16x8_t *y0, int16x8_t *y1,int16x8_t *tw)
+{
+
+  int32x4_t x0r_2,x0i_2,x1r_2,x1i_2,dy0r,dy1r,dy0i,dy1i;
+
+  cmultc(*(x0),*(W0),&x0r_2,&x0i_2);
+  cmultc(*(x1),*(tw),&x1r_2,&x1i_2);
+
+  dy0r = vqaddq_s32(x0r_2,x1r_2);
+  dy1r = vqsubq_s32(x0r_2,x1r_2);
+  dy0i = vqaddq_s32(x0i_2,x1i_2);
+  dy1i = vqsubq_s32(x0i_2,x1i_2);
+
+  *y0 = cpack(dy0r,dy0i);
+  *y1 = cpack(dy1r,dy1i);
+
+}
+
+
+// This is the radix-3 butterfly (fft)
+
+static inline void bfly3(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,
+                         int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,
+                         int16x8_t *tw1,int16x8_t *tw2) __attribute__((always_inline));
+
+static inline void bfly3(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,
+                         int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,
+                         int16x8_t *tw1,int16x8_t *tw2)
+{
+
+  int32x4_t tmpre,tmpim;
+  int16x8_t x1_2,x2_2;
+
+  packed_cmult(*(x1),*(tw1),&x1_2);
+  packed_cmult(*(x2),*(tw2),&x2_2);
+  *(y0)  = vqaddq_s16(*(x0),vqaddq_s16(x1_2,x2_2));
+  cmult(x1_2,*(W13),&tmpre,&tmpim);
+  cmac(x2_2,*(W23),&tmpre,&tmpim);
+  *(y1) = cpack(tmpre,tmpim);
+  *(y1) = vqaddq_s16(*(x0),*(y1));
+  cmult(x1_2,*(W23),&tmpre,&tmpim);
+  cmac(x2_2,*(W13),&tmpre,&tmpim);
+  *(y2) = cpack(tmpre,tmpim);
+  *(y2) = vqaddq_s16(*(x0),*(y2));
+}
+
+static inline void ibfly3(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,
+			  int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,
+			  int16x8_t *tw1,int16x8_t *tw2) __attribute__((always_inline));
+
+static inline void ibfly3(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,
+			  int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,
+			  int16x8_t *tw1,int16x8_t *tw2)
+{
+
+  int32x4_t tmpre,tmpim;
+  int16x8_t x1_2,x2_2;
+
+  packed_cmultc(*(x1),*(tw1),&x1_2);
+  packed_cmultc(*(x2),*(tw2),&x2_2);
+  *(y0)  = vqaddq_s16(*(x0),vqaddq_s16(x1_2,x2_2));
+  cmultc(x1_2,*(W13),&tmpre,&tmpim);
+  cmacc(x2_2,*(W23),&tmpre,&tmpim);
+  *(y1) = cpack(tmpre,tmpim);
+  *(y1) = vqaddq_s16(*(x0),*(y1));
+  cmultc(x1_2,*(W23),&tmpre,&tmpim);
+  cmacc(x2_2,*(W13),&tmpre,&tmpim);
+  *(y2) = cpack(tmpre,tmpim);
+  *(y2) = vqaddq_s16(*(x0),*(y2));
+}
+static inline void bfly3_tw1(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,
+                             int16x8_t *y0,int16x8_t *y1,int16x8_t *y2) __attribute__((always_inline));
+
+static inline void bfly3_tw1(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,
+                             int16x8_t *y0,int16x8_t *y1,int16x8_t *y2)
+{
+
+  int32x4_t tmpre,tmpim;
+
+  *(y0) = vqaddq_s16(*(x0),vqaddq_s16(*(x1),*(x2)));
+  cmult(*(x1),*(W13),&tmpre,&tmpim);
+  cmac(*(x2),*(W23),&tmpre,&tmpim);
+  *(y1) = cpack(tmpre,tmpim);
+  *(y1) = vqaddq_s16(*(x0),*(y1));
+  cmult(*(x1),*(W23),&tmpre,&tmpim);
+  cmac(*(x2),*(W13),&tmpre,&tmpim);
+  *(y2) = cpack(tmpre,tmpim);
+  *(y2) = vqaddq_s16(*(x0),*(y2));
+
+}
+
+static inline void bfly4(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,int16x8_t *x3,
+                         int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,int16x8_t *y3,
+                         int16x8_t *tw1,int16x8_t *tw2,int16x8_t *tw3)__attribute__((always_inline));
+
+static inline void bfly4(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,int16x8_t *x3,
+                         int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,int16x8_t *y3,
+                         int16x8_t *tw1,int16x8_t *tw2,int16x8_t *tw3)
+{
+
+  int32x4_t x1r_2,x1i_2,x2r_2,x2i_2,x3r_2,x3i_2,dy0r,dy0i,dy1r,dy1i,dy2r,dy2i,dy3r,dy3i;
+
+  //  cmult(*(x0),*(W0),&x0r_2,&x0i_2);
+  cmult(*(x1),*(tw1),&x1r_2,&x1i_2);
+  cmult(*(x2),*(tw2),&x2r_2,&x2i_2);
+  cmult(*(x3),*(tw3),&x3r_2,&x3i_2);
+  //  dy0r = simde_mm_add_epi32(x0r_2,simde_mm_add_epi32(x1r_2,simde_mm_add_epi32(x2r_2,x3r_2)));
+  //  dy0i = simde_mm_add_epi32(x0i_2,simde_mm_add_epi32(x1i_2,simde_mm_add_epi32(x2i_2,x3i_2)));
+  //  *(y0)  = cpack(dy0r,dy0i);
+  dy0r = vqaddq_s32(x1r_2,vqaddq_s32(x2r_2,x3r_2));
+  dy0i = vqaddq_s32(x1i_2,vqaddq_s32(x2i_2,x3i_2));
+  *(y0)  = vqaddq_s16(*(x0),cpack(dy0r,dy0i));
+  //  dy1r = simde_mm_add_epi32(x0r_2,simde_mm_sub_epi32(x1i_2,simde_mm_add_epi32(x2r_2,x3i_2)));
+  //  dy1i = simde_mm_sub_epi32(x0i_2,simde_mm_add_epi32(x1r_2,simde_mm_sub_epi32(x2i_2,x3r_2)));
+  //  *(y1)  = cpack(dy1r,dy1i);
+  dy1r = vqsubq_s32(x1i_2,vqaddq_s32(x2r_2,x3i_2));
+  dy1i = vqsubq_s32(vqsubq_s32(x3r_2,x2i_2),x1r_2);
+  *(y1)  = vqaddq_s16(*(x0),cpack(dy1r,dy1i));
+  //  dy2r = simde_mm_sub_epi32(x0r_2,simde_mm_sub_epi32(x1r_2,simde_mm_sub_epi32(x2r_2,x3r_2)));
+  //  dy2i = simde_mm_sub_epi32(x0i_2,simde_mm_sub_epi32(x1i_2,simde_mm_sub_epi32(x2i_2,x3i_2)));
+  //  *(y2)  = cpack(dy2r,dy2i);
+  dy2r = vqsubq_s32(vqsubq_s32(x2r_2,x3r_2),x1r_2);
+  dy2i = vqsubq_s32(vqsubq_s32(x2i_2,x3i_2),x1i_2);
+  *(y2)  = vqaddq_s16(*(x0),cpack(dy2r,dy2i));
+  //  dy3r = simde_mm_sub_epi32(x0r_2,simde_mm_add_epi32(x1i_2,simde_mm_sub_epi32(x2r_2,x3i_2)));
+  //  dy3i = simde_mm_add_epi32(x0i_2,simde_mm_sub_epi32(x1r_2,simde_mm_add_epi32(x2i_2,x3r_2)));
+  //  *(y3) = cpack(dy3r,dy3i);
+  dy3r = vqsubq_s32(vqsubq_s32(x3i_2,x2r_2),x1i_2);
+  dy3i = vqsubq_s32(x1r_2,vqaddq_s32(x2i_2,x3r_2));
+  *(y3) = vqaddq_s16(*(x0),cpack(dy3r,dy3i));
+}
+
+static inline void ibfly4(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,int16x8_t *x3,
+                          int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,int16x8_t *y3,
+                          int16x8_t *tw1,int16x8_t *tw2,int16x8_t *tw3)__attribute__((always_inline));
+
+static inline void ibfly4(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,int16x8_t *x3,
+                          int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,int16x8_t *y3,
+                          int16x8_t *tw1,int16x8_t *tw2,int16x8_t *tw3)
+{
+
+  int32x4_t x1r_2,x1i_2,x2r_2,x2i_2,x3r_2,x3i_2,dy0r,dy0i,dy1r,dy1i,dy2r,dy2i,dy3r,dy3i;
+
+
+  cmultc(*(x1),*(tw1),&x1r_2,&x1i_2);
+  cmultc(*(x2),*(tw2),&x2r_2,&x2i_2);
+  cmultc(*(x3),*(tw3),&x3r_2,&x3i_2);
+
+  dy0r  = vqaddq_s32(x1r_2,vqaddq_s32(x2r_2,x3r_2));
+  dy0i  = vqaddq_s32(x1i_2,vqaddq_s32(x2i_2,x3i_2));
+  *(y0) = vqaddq_s16(*(x0),cpack(dy0r,dy0i));
+  dy3r  = vqsubq_s32(x1i_2,vqaddq_s32(x2r_2,x3i_2));
+  dy3i  = vqsubq_s32(vqsubq_s32(x3r_2,x2i_2),x1r_2);
+  *(y3) = vqaddq_s16(*(x0),cpack(dy3r,dy3i));
+  dy2r  = vqsubq_s32(vqsubq_s32(x2r_2,x3r_2),x1r_2);
+  dy2i  = vqsubq_s32(vqsubq_s32(x2i_2,x3i_2),x1i_2);
+  *(y2) = vqaddq_s16(*(x0),cpack(dy2r,dy2i));
+  dy1r  = vqsubq_s32(vqsubq_s32(x3i_2,x2r_2),x1i_2);
+  dy1i  = vqsubq_s32(x1r_2,vqaddq_s32(x2i_2,x3r_2));
+  *(y1) = vqaddq_s16(*(x0),cpack(dy1r,dy1i));
+}
+
+static inline void bfly4_tw1(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,int16x8_t *x3,
+                             int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,int16x8_t *y3)__attribute__((always_inline));
+
+static inline void bfly4_tw1(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,int16x8_t *x3,
+                             int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,int16x8_t *y3)
+{
+
+  register int16x8_t x1_flip,x3_flip;
+
+  *(y0) = vqaddq_s16(*(x0),vqaddq_s16(*(x1),vqaddq_s16(*(x2),*(x3))));
+  x1_flip = vrev32q_s16(vmulq_s16(*(x1),*(int16x8_t*)conjugatedft));
+  x3_flip = vrev32q_s16(vmulq_s16(*(x3),*(int16x8_t*)conjugatedft));
+  *(y1)   = vqaddq_s16(*(x0),vqsubq_s16(x1_flip,vqaddq_s16(*(x2),x3_flip)));
+  *(y2)   = vqsubq_s16(*(x0),vqsubq_s16(*(x1),vqsubq_s16(*(x2),*(x3))));
+  *(y3)   = vqsubq_s16(*(x0),vqaddq_s16(x1_flip,vqsubq_s16(*(x2),x3_flip)));
+}
+
+static inline void ibfly4_tw1(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,int16x8_t *x3,
+			      int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,int16x8_t *y3)__attribute__((always_inline));
+
+static inline void ibfly4_tw1(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,int16x8_t *x3,
+			      int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,int16x8_t *y3)
+{
+
+  register int16x8_t x1_flip,x3_flip;
+
+  *(y0) = vqaddq_s16(*(x0),vqaddq_s16(*(x1),vqaddq_s16(*(x2),*(x3))));
+  x1_flip = vrev32q_s16(vmulq_s16(*(x1),*(int16x8_t*)conjugatedft));
+  x3_flip = vrev32q_s16(vmulq_s16(*(x3),*(int16x8_t*)conjugatedft));
+  *(y1)   = vqsubq_s16(*(x0),vqaddq_s16(x1_flip,vqsubq_s16(*(x2),x3_flip)));
+  *(y2)   = vqsubq_s16(*(x0),vqsubq_s16(*(x1),vqsubq_s16(*(x2),*(x3))));
+  *(y3)   = vqaddq_s16(*(x0),vqsubq_s16(x1_flip,vqaddq_s16(*(x2),x3_flip)));
+}
+
+static inline void bfly4_16(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,int16x8_t *x3,
+                            int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,int16x8_t *y3,
+                            int16x8_t *tw1,int16x8_t *tw2,int16x8_t *tw3,
+                            int16x8_t *tw1b,int16x8_t *tw2b,int16x8_t *tw3b)__attribute__((always_inline));
+
+static inline void bfly4_16(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,int16x8_t *x3,
+                            int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,int16x8_t *y3,
+                            int16x8_t *tw1,int16x8_t *tw2,int16x8_t *tw3,
+                            int16x8_t *tw1b,int16x8_t *tw2b,int16x8_t *tw3b)
+{
+
+  register int16x8_t x1t,x2t,x3t,x02t,x13t;
+  register int16x8_t x1_flip,x3_flip;
+
+  x1t = packed_cmult2(*(x1),*(tw1),*(tw1b));
+  x2t = packed_cmult2(*(x2),*(tw2),*(tw2b));
+  x3t = packed_cmult2(*(x3),*(tw3),*(tw3b));
+
+
+
+  x02t  = vqaddq_s16(*(x0),x2t);
+  x13t  = vqaddq_s16(x1t,x3t);
+  *(y0)   = vqaddq_s16(x02t,x13t);
+  *(y2)   = vqsubq_s16(x02t,x13t);
+  x1_flip = vrev32q_s16(vmulq_s16(x1t,*(int16x8_t*)conjugatedft));
+  x3_flip = vrev32q_s16(vmulq_s16(x3t,*(int16x8_t*)conjugatedft));
+  x02t  = vqsubq_s16(*(x0),x2t);
+  x13t  = vqsubq_s16(x1_flip,x3_flip);
+  *(y1)   = vqaddq_s16(x02t,x13t);  // x0 + x1f - x2 - x3f
+  *(y3)   = vqsubq_s16(x02t,x13t);  // x0 - x1f - x2 + x3f
+}
+
+static inline void ibfly4_16(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,int16x8_t *x3,
+			     int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,int16x8_t *y3,
+			     int16x8_t *tw1,int16x8_t *tw2,int16x8_t *tw3,
+			     int16x8_t *tw1b,int16x8_t *tw2b,int16x8_t *tw3b)__attribute__((always_inline));
+
+static inline void ibfly4_16(int16x8_t *x0,int16x8_t *x1,int16x8_t *x2,int16x8_t *x3,
+			     int16x8_t *y0,int16x8_t *y1,int16x8_t *y2,int16x8_t *y3,
+			     int16x8_t *tw1,int16x8_t *tw2,int16x8_t *tw3,
+			     int16x8_t *tw1b,int16x8_t *tw2b,int16x8_t *tw3b)
+{
+
+  register int16x8_t x1t,x2t,x3t,x02t,x13t;
+  register int16x8_t x1_flip,x3_flip;
+
+  x1t = packed_cmult2(*(x1),*(tw1),*(tw1b));
+  x2t = packed_cmult2(*(x2),*(tw2),*(tw2b));
+  x3t = packed_cmult2(*(x3),*(tw3),*(tw3b));
+
+  x02t    = vqaddq_s16(*(x0),x2t);
+  x13t    = vqaddq_s16(x1t,x3t);
+  *(y0)   = vqaddq_s16(x02t,x13t);
+  *(y2)   = vqsubq_s16(x02t,x13t);
+  x1_flip = vrev32q_s16(vmulq_s16(x1t,*(int16x8_t*)conjugatedft));
+  x3_flip = vrev32q_s16(vmulq_s16(x3t,*(int16x8_t*)conjugatedft));
+  x02t    = vqsubq_s16(*(x0),x2t);
+  x13t    = vqsubq_s16(x1_flip,x3_flip);
+  *(y3)   = vqaddq_s16(x02t,x13t);  // x0 - x1f - x2 + x3f
+  *(y1)   = vqsubq_s16(x02t,x13t);  // x0 + x1f - x2 - x3f
+}
+static inline void bfly5(int16x8_t *x0, int16x8_t *x1, int16x8_t *x2, int16x8_t *x3,int16x8_t *x4,
+                         int16x8_t *y0, int16x8_t *y1, int16x8_t *y2, int16x8_t *y3,int16x8_t *y4,
+                         int16x8_t *tw1,int16x8_t *tw2,int16x8_t *tw3,int16x8_t *tw4)__attribute__((always_inline));
+
+static inline void bfly5(int16x8_t *x0, int16x8_t *x1, int16x8_t *x2, int16x8_t *x3,int16x8_t *x4,
+                         int16x8_t *y0, int16x8_t *y1, int16x8_t *y2, int16x8_t *y3,int16x8_t *y4,
+                         int16x8_t *tw1,int16x8_t *tw2,int16x8_t *tw3,int16x8_t *tw4)
+{
+
+
+
+  int16x8_t x1_2,x2_2,x3_2,x4_2;
+  int32x4_t tmpre,tmpim;
+
+  packed_cmult(*(x1),*(tw1),&x1_2);
+  packed_cmult(*(x2),*(tw2),&x2_2);
+  packed_cmult(*(x3),*(tw3),&x3_2);
+  packed_cmult(*(x4),*(tw4),&x4_2);
+
+  *(y0)  = vqaddq_s16(*(x0),vqaddq_s16(x1_2,vqaddq_s16(x2_2,vqaddq_s16(x3_2,x4_2))));
+  cmult(x1_2,*(W15),&tmpre,&tmpim);
+  cmac(x2_2,*(W25),&tmpre,&tmpim);
+  cmac(x3_2,*(W35),&tmpre,&tmpim);
+  cmac(x4_2,*(W45),&tmpre,&tmpim);
+  *(y1) = cpack(tmpre,tmpim);
+  *(y1) = vqaddq_s16(*(x0),*(y1));
+
+  cmult(x1_2,*(W25),&tmpre,&tmpim);
+  cmac(x2_2,*(W45),&tmpre,&tmpim);
+  cmac(x3_2,*(W15),&tmpre,&tmpim);
+  cmac(x4_2,*(W35),&tmpre,&tmpim);
+  *(y2) = cpack(tmpre,tmpim);
+  *(y2) = vqaddq_s16(*(x0),*(y2));
+
+  cmult(x1_2,*(W35),&tmpre,&tmpim);
+  cmac(x2_2,*(W15),&tmpre,&tmpim);
+  cmac(x3_2,*(W45),&tmpre,&tmpim);
+  cmac(x4_2,*(W25),&tmpre,&tmpim);
+  *(y3) = cpack(tmpre,tmpim);
+  *(y3) = vqaddq_s16(*(x0),*(y3));
+
+  cmult(x1_2,*(W45),&tmpre,&tmpim);
+  cmac(x2_2,*(W35),&tmpre,&tmpim);
+  cmac(x3_2,*(W25),&tmpre,&tmpim);
+  cmac(x4_2,*(W15),&tmpre,&tmpim);
+  *(y4) = cpack(tmpre,tmpim);
+  *(y4) = vqaddq_s16(*(x0),*(y4));
+
+
+}
+
+
+static inline void bfly5_tw1(int16x8_t *x0, int16x8_t *x1, int16x8_t *x2, int16x8_t *x3,int16x8_t *x4,
+                             int16x8_t *y0, int16x8_t *y1, int16x8_t *y2, int16x8_t *y3,int16x8_t *y4) __attribute__((always_inline));
+
+static inline void bfly5_tw1(int16x8_t *x0, int16x8_t *x1, int16x8_t *x2, int16x8_t *x3,int16x8_t *x4,
+                             int16x8_t *y0, int16x8_t *y1, int16x8_t *y2, int16x8_t *y3,int16x8_t *y4)
+{
+
+  int32x4_t tmpre,tmpim;
+
+  *(y0) = vqaddq_s16(*(x0),vqaddq_s16(*(x1),vqaddq_s16(*(x2),vqaddq_s16(*(x3),*(x4)))));
+  cmult(*(x1),*(W15),&tmpre,&tmpim);
+  cmac(*(x2),*(W25),&tmpre,&tmpim);
+  cmac(*(x3),*(W35),&tmpre,&tmpim);
+  cmac(*(x4),*(W45),&tmpre,&tmpim);
+  *(y1) = cpack(tmpre,tmpim);
+  *(y1) = vqaddq_s16(*(x0),*(y1));
+  cmult(*(x1),*(W25),&tmpre,&tmpim);
+  cmac(*(x2),*(W45),&tmpre,&tmpim);
+  cmac(*(x3),*(W15),&tmpre,&tmpim);
+  cmac(*(x4),*(W35),&tmpre,&tmpim);
+  *(y2) = cpack(tmpre,tmpim);
+  *(y2) = vqaddq_s16(*(x0),*(y2));
+  cmult(*(x1),*(W35),&tmpre,&tmpim);
+  cmac(*(x2),*(W15),&tmpre,&tmpim);
+  cmac(*(x3),*(W45),&tmpre,&tmpim);
+  cmac(*(x4),*(W25),&tmpre,&tmpim);
+  *(y3) = cpack(tmpre,tmpim);
+  *(y3) = vqaddq_s16(*(x0),*(y3));
+  cmult(*(x1),*(W45),&tmpre,&tmpim);
+  cmac(*(x2),*(W35),&tmpre,&tmpim);
+  cmac(*(x3),*(W25),&tmpre,&tmpim);
+  cmac(*(x4),*(W15),&tmpre,&tmpim);
+  *(y4) = cpack(tmpre,tmpim);
+  *(y4) = vqaddq_s16(*(x0),*(y4));
+}
+
+static inline void transpose16(int16x8_t *x,int16x8_t *y) __attribute__((always_inline));
+static inline void transpose16(int16x8_t *x,int16x8_t *y)
+{
+  register uint32x4x2_t ytmp0,ytmp1;
+
+  ytmp0 = vtrnq_u32((uint32x4_t)(x[0]),(uint32x4_t)(x[1]));
+  ytmp1 = vtrnq_u32((uint32x4_t)(x[2]),(uint32x4_t)(x[3]));
+
+  y[0]  = vcombine_s16(vget_low_s16((int16x8_t)ytmp0.val[0]),vget_low_s16((int16x8_t)ytmp1.val[0]));
+  y[1]  = vcombine_s16(vget_high_s16((int16x8_t)ytmp0.val[0]),vget_high_s16((int16x8_t)ytmp1.val[0]));
+  y[2]  = vcombine_s16(vget_low_s16((int16x8_t)ytmp0.val[1]),vget_low_s16((int16x8_t)ytmp1.val[1]));
+  y[3]  = vcombine_s16(vget_high_s16((int16x8_t)ytmp0.val[1]),vget_high_s16((int16x8_t)ytmp1.val[1]));
+}
+
+static inline void transpose16_ooff(int16x8_t *x,int16x8_t *y,int off) __attribute__((always_inline));
+
+static inline void transpose16_ooff(int16x8_t *x,int16x8_t *y,int off)
+{
+  int16x8_t *y2=y;
+  register uint32x4x2_t ytmp0,ytmp1;
+
+  ytmp0 = vtrnq_u32((uint32x4_t)(x[0]),(uint32x4_t)(x[1]));
+  ytmp1 = vtrnq_u32((uint32x4_t)(x[2]),(uint32x4_t)(x[3]));
+
+  *y2   = (int16x8_t)vcombine_s16(vget_low_s16((int16x8_t)ytmp0.val[0]),vget_low_s16((int16x8_t)ytmp1.val[0])); y2+=off;
+  *y2   = (int16x8_t)vcombine_s16(vget_low_s16((int16x8_t)ytmp0.val[1]),vget_low_s16((int16x8_t)ytmp1.val[1])); y2+=off;
+  *y2   = (int16x8_t)vcombine_s16(vget_high_s16((int16x8_t)ytmp0.val[0]),vget_high_s16((int16x8_t)ytmp1.val[0])); y2+=off;
+  *y2   = (int16x8_t)vcombine_s16(vget_high_s16((int16x8_t)ytmp0.val[1]),vget_high_s16((int16x8_t)ytmp1.val[1]));
+
+
+}
+
+static inline void transpose4_ooff(int16x4_t *x,int16x4_t *y,int off)__attribute__((always_inline));
+static inline void transpose4_ooff(int16x4_t *x,int16x4_t *y,int off)
+{
+  uint32x2x2_t ytmp = vtrn_u32((uint32x2_t)x[0],(uint32x2_t)x[1]);
+
+  y[0]   = (int16x4_t)ytmp.val[0];
+  y[off] = (int16x4_t)ytmp.val[1];
+}
+
+// 16-point optimized DFT kernel
+
+const static int16_t tw16[24] __attribute__((aligned(32))) = { 32767,0,30272,-12540,23169 ,-23170,12539 ,-30273,
+                                                  32767,0,23169,-23170,0     ,-32767,-23170,-23170,
+                                                  32767,0,12539,-30273,-23170,-23170,-30273,12539
+                                                };
+
+const static int16_t tw16a[24] __attribute__((aligned(32))) = {32767,0,30272,12540,23169 ,23170,12539 ,30273,
+                                                  32767,0,23169,23170,0     ,32767,-23170,23170,
+                                                  32767,0,12539,30273,-23170,23170,-30273,-12539
+                                                 };
+
+const static int16_t tw16b[24] __attribute__((aligned(32))) = { 0,32767,-12540,30272,-23170,23169 ,-30273,12539,
+                                                   0,32767,-23170,23169,-32767,0     ,-23170,-23170,
+                                                   0,32767,-30273,12539,-23170,-23170,12539 ,-30273
+                                                 };
+
+const static int16_t tw16c[24] __attribute__((aligned(32))) = { 0,32767,12540,30272,23170,23169 ,30273 ,12539,
+                                                   0,32767,23170,23169,32767,0     ,23170 ,-23170,
+                                                   0,32767,30273,12539,23170,-23170,-12539,-30273
+                                                 };
+
+static inline void dft16(int16_t *x,int16_t *y) __attribute__((always_inline));
+
+static inline void dft16(int16_t *x,int16_t *y)
+{
+
+  int16x8_t *tw16a_128=(int16x8_t *)tw16a,*tw16b_128=(int16x8_t *)tw16b,*x128=(int16x8_t *)x,*y128=(int16x8_t *)y;
+
+  /*  This is the original version before unrolling
+
+  bfly4_tw1(x128,x128+1,x128+2,x128+3,
+      y128,y128+1,y128+2,y128+3);
+
+  transpose16(y128,ytmp);
+
+  bfly4_16(ytmp,ytmp+1,ytmp+2,ytmp+3,
+     y128,y128+1,y128+2,y128+3,
+     tw16_128,tw16_128+1,tw16_128+2);
+  */
+
+  register int16x8_t x1_flip,x3_flip,x02t,x13t;
+  register int16x8_t xtmp0,xtmp1,xtmp2,xtmp3;
+  register uint32x4x2_t ytmp0,ytmp1;
+  register int16x8_t ytmp0b,ytmp1b,ytmp2b,ytmp3b;
+
+  // First stage : 4 Radix-4 butterflies without input twiddles
+  
+  x02t    = vqaddq_s16(x128[0],x128[2]);
+  x13t    = vqaddq_s16(x128[1],x128[3]);
+  xtmp0   = vqaddq_s16(x02t,x13t);
+  xtmp2   = vqsubq_s16(x02t,x13t);
+  x1_flip = vrev32q_s16(vmulq_s16(x128[1],*(int16x8_t*)conjugatedft));
+  x3_flip = vrev32q_s16(vmulq_s16(x128[3],*(int16x8_t*)conjugatedft));
+  x02t    = vqsubq_s16(x128[0],x128[2]);
+  x13t    = vqsubq_s16(x1_flip,x3_flip);
+  xtmp1   = vqaddq_s16(x02t,x13t);  // x0 + x1f - x2 - x3f
+  xtmp3   = vqsubq_s16(x02t,x13t);  // x0 - x1f - x2 + x3f
+
+  ytmp0  = vtrnq_u32((uint32x4_t)(xtmp0),(uint32x4_t)(xtmp1));
+// y0[0] = [x00 x10 x02 x12], y0[1] = [x01 x11 x03 x13]
+  ytmp1  = vtrnq_u32((uint32x4_t)(xtmp2),(uint32x4_t)(xtmp3));
+// y1[0] = [x20 x30 x22 x32], y1[1] = [x21 x31 x23 x33]
+
+
+  ytmp0b = vcombine_s16(vget_low_s16((int16x8_t)ytmp0.val[0]),vget_low_s16((int16x8_t)ytmp1.val[0]));
+// y0 = [x00 x10 x20 x30] 
+  ytmp1b = vcombine_s16(vget_low_s16((int16x8_t)ytmp0.val[1]),vget_low_s16((int16x8_t)ytmp1.val[1]));
+// t1 = [x01 x11 x21 x31] 
+  ytmp2b = vcombine_s16(vget_high_s16((int16x8_t)ytmp0.val[0]),vget_high_s16((int16x8_t)ytmp1.val[0]));
+// t2 = [x02 x12 x22 x32]
+  ytmp3b = vcombine_s16(vget_high_s16((int16x8_t)ytmp0.val[1]),vget_high_s16((int16x8_t)ytmp1.val[1]));
+// t3 = [x03 x13 x23 x33]
+
+
+  // Second stage : 4 Radix-4 butterflies with input twiddles
+  xtmp1 = packed_cmult2(ytmp1b,tw16a_128[0],tw16b_128[0]);
+  xtmp2 = packed_cmult2(ytmp2b,tw16a_128[1],tw16b_128[1]);
+  xtmp3 = packed_cmult2(ytmp3b,tw16a_128[2],tw16b_128[2]);
+
+  x02t    = vqaddq_s16(ytmp0b,xtmp2);
+  x13t    = vqaddq_s16(xtmp1,xtmp3);
+  y128[0] = vqaddq_s16(x02t,x13t);
+  y128[2] = vqsubq_s16(x02t,x13t);
+  x1_flip = vrev32q_s16(vmulq_s16(xtmp1,*(int16x8_t*)conjugatedft));
+  x3_flip = vrev32q_s16(vmulq_s16(xtmp3,*(int16x8_t*)conjugatedft));
+  x02t    = vqsubq_s16(ytmp0b,xtmp2);
+  x13t    = vqsubq_s16(x1_flip,x3_flip);
+  y128[1] = vqaddq_s16(x02t,x13t);  // x0 + x1f - x2 - x3f
+  y128[3] = vqsubq_s16(x02t,x13t);  // x0 - x1f - x2 + x3f
+
+
+}
+
+static inline void idft16(int16_t *x,int16_t *y) __attribute__((always_inline));
+
+static inline void idft16(int16_t *x,int16_t *y)
+{
+
+  int16x8_t *tw16a_128=(int16x8_t *)tw16,*tw16b_128=(int16x8_t *)tw16c,*x128=(int16x8_t *)x,*y128=(int16x8_t *)y;
+
+  /*  This is the original version before unrolling
+
+  bfly4_tw1(x128,x128+1,x128+2,x128+3,
+      y128,y128+1,y128+2,y128+3);
+
+  transpose16(y128,ytmp);
+
+  bfly4_16(ytmp,ytmp+1,ytmp+2,ytmp+3,
+     y128,y128+1,y128+2,y128+3,
+     tw16_128,tw16_128+1,tw16_128+2);
+  */
+
+  register int16x8_t x1_flip,x3_flip,x02t,x13t;
+  register int16x8_t xtmp0,xtmp1,xtmp2,xtmp3;
+  register uint32x4x2_t ytmp0,ytmp1;
+  register int16x8_t ytmp0b,ytmp1b,ytmp2b,ytmp3b;
+
+  // First stage : 4 Radix-4 butterflies without input twiddles
+
+  x02t    = vqaddq_s16(x128[0],x128[2]);
+  x13t    = vqaddq_s16(x128[1],x128[3]);
+  xtmp0   = vqaddq_s16(x02t,x13t);
+  xtmp2   = vqsubq_s16(x02t,x13t);
+  x1_flip = vrev32q_s16(vmulq_s16(x128[1],*(int16x8_t*)conjugatedft));
+  x3_flip = vrev32q_s16(vmulq_s16(x128[3],*(int16x8_t*)conjugatedft));
+  x02t    = vqsubq_s16(x128[0],x128[2]);
+  x13t    = vqsubq_s16(x1_flip,x3_flip);
+  xtmp3   = vqaddq_s16(x02t,x13t);  // x0 + x1f - x2 - x3f
+  xtmp1   = vqsubq_s16(x02t,x13t);  // x0 - x1f - x2 + x3f
+
+  ytmp0  = vtrnq_u32((uint32x4_t)(xtmp0),(uint32x4_t)(xtmp1));
+// y0[0] = [x00 x10 x02 x12], y0[1] = [x01 x11 x03 x13]
+  ytmp1  = vtrnq_u32((uint32x4_t)(xtmp2),(uint32x4_t)(xtmp3));
+// y1[0] = [x20 x30 x22 x32], y1[1] = [x21 x31 x23 x33]
+
+
+  ytmp0b = vcombine_s16(vget_low_s16((int16x8_t)ytmp0.val[0]),vget_low_s16((int16x8_t)ytmp1.val[0]));
+// y0 = [x00 x10 x20 x30] 
+  ytmp1b = vcombine_s16(vget_low_s16((int16x8_t)ytmp0.val[1]),vget_low_s16((int16x8_t)ytmp1.val[1]));
+// t1 = [x01 x11 x21 x31] 
+  ytmp2b = vcombine_s16(vget_high_s16((int16x8_t)ytmp0.val[0]),vget_high_s16((int16x8_t)ytmp1.val[0]));
+// t2 = [x02 x12 x22 x32]
+  ytmp3b = vcombine_s16(vget_high_s16((int16x8_t)ytmp0.val[1]),vget_high_s16((int16x8_t)ytmp1.val[1]));
+// t3 = [x03 x13 x23 x33]
+
+  // Second stage : 4 Radix-4 butterflies with input twiddles
+  xtmp1 = packed_cmult2(ytmp1b,tw16a_128[0],tw16b_128[0]);
+  xtmp2 = packed_cmult2(ytmp2b,tw16a_128[1],tw16b_128[1]);
+  xtmp3 = packed_cmult2(ytmp3b,tw16a_128[2],tw16b_128[2]);
+
+  x02t    = vqaddq_s16(ytmp0b,xtmp2);
+  x13t    = vqaddq_s16(xtmp1,xtmp3);
+  y128[0] = vqaddq_s16(x02t,x13t);
+  y128[2] = vqsubq_s16(x02t,x13t);
+  x1_flip = vrev32q_s16(vmulq_s16(xtmp1,*(int16x8_t*)conjugatedft));
+  x3_flip = vrev32q_s16(vmulq_s16(xtmp3,*(int16x8_t*)conjugatedft));
+  x02t    = vqsubq_s16(ytmp0b,xtmp2);
+  x13t    = vqsubq_s16(x1_flip,x3_flip);
+  y128[3] = vqaddq_s16(x02t,x13t);  // x0 + x1f - x2 - x3f
+  y128[1] = vqsubq_s16(x02t,x13t);  // x0 - x1f - x2 + x3f
+
+}
+
+void idft16f(int16_t *x,int16_t *y) {
+  idft16(x,y);
+}
+
+// 64-point optimized DFT
+
+const static int16_t tw64[96] __attribute__((aligned(32))) = { 
+32767,0,32609,-3212,32137,-6393,31356,-9512,
+30272,-12540,28897,-15447,27244,-18205,25329,-20788,
+23169,-23170,20787,-25330,18204,-27245,15446,-28898,
+12539,-30273,9511,-31357,6392,-32138,3211,-32610,
+32767,0,32137,-6393,30272,-12540,27244,-18205,
+23169,-23170,18204,-27245,12539,-30273,6392,-32138,
+0,-32767,-6393,-32138,-12540,-30273,-18205,-27245,
+-23170,-23170,-27245,-18205,-30273,-12540,-32138,-6393,
+32767,0,31356,-9512,27244,-18205,20787,-25330,
+12539,-30273,3211,-32610,-6393,-32138,-15447,-28898,
+-23170,-23170,-28898,-15447,-32138,-6393,-32610,3211,
+-30273,12539,-25330,20787,-18205,27244,-9512,31356
+                                                };
+const static int16_t tw64a[96] __attribute__((aligned(32))) = { 
+32767,0,32609,3212,32137,6393,31356,9512,
+30272,12540,28897,15447,27244,18205,25329,20788,
+23169,23170,20787,25330,18204,27245,15446,28898,
+12539,30273,9511,31357,6392,32138,3211,32610,
+32767,0,32137,6393,30272,12540,27244,18205,
+23169,23170,18204,27245,12539,30273,6392,32138,
+0,32767,-6393,32138,-12540,30273,-18205,27245,
+-23170,23170,-27245,18205,-30273,12540,-32138,6393,
+32767,0,31356,9512,27244,18205,20787,25330,
+12539,30273,3211,32610,-6393,32138,-15447,28898,
+-23170,23170,-28898,15447,-32138,6393,-32610,-3211,
+-30273,-12539,-25330,-20787,-18205,-27244,-9512,-31356
+                                                 };
+const static int16_t tw64b[96] __attribute__((aligned(32))) = { 
+0,32767,-3212,32609,-6393,32137,-9512,31356,
+-12540,30272,-15447,28897,-18205,27244,-20788,25329,
+-23170,23169,-25330,20787,-27245,18204,-28898,15446,
+-30273,12539,-31357,9511,-32138,6392,-32610,3211,
+0,32767,-6393,32137,-12540,30272,-18205,27244,
+-23170,23169,-27245,18204,-30273,12539,-32138,6392,
+-32767,0,-32138,-6393,-30273,-12540,-27245,-18205,
+-23170,-23170,-18205,-27245,-12540,-30273,-6393,-32138,
+0,32767,-9512,31356,-18205,27244,-25330,20787,
+-30273,12539,-32610,3211,-32138,-6393,-28898,-15447,
+-23170,-23170,-15447,-28898,-6393,-32138,3211,-32610,
+12539,-30273,20787,-25330,27244,-18205,31356,-9512
+                                                 };
+const static int16_t tw64c[96] __attribute__((aligned(32))) = { 
+0,32767,3212,32609,6393,32137,9512,31356,
+12540,30272,15447,28897,18205,27244,20788,25329,
+23170,23169,25330,20787,27245,18204,28898,15446,
+30273,12539,31357,9511,32138,6392,32610,3211,
+0,32767,6393,32137,12540,30272,18205,27244,
+23170,23169,27245,18204,30273,12539,32138,6392,
+32767,0,32138,-6393,30273,-12540,27245,-18205,
+23170,-23170,18205,-27245,12540,-30273,6393,-32138,
+0,32767,9512,31356,18205,27244,25330,20787,
+30273,12539,32610,3211,32138,-6393,28898,-15447,
+23170,-23170,15447,-28898,6393,-32138,-3211,-32610,
+-12539,-30273,-20787,-25330,-27244,-18205,-31356,-9512
+                                                 };
+#ifdef simd_q15_t
+#undef simd_q15_t
+#endif
+#ifdef simdshort_q15_t 
+#undef simdshort_q15_t
+#endif
+#ifdef shiftright_int16
+#undef shiftright_int16
+#endif
+#ifdef set1_int16
+#undef set1_int16
+#endif
+#ifdef mulhi_int16
+#undef mulhi_int16
+#endif
+
+#define simd_q15_t int16x8_t
+#define simdshort_q15_t int16x4_t
+#define shiftright_int16(a,shift) vshrq_n_s16(a,shift)
+#define set1_int16(a) vdupq_n_s16(a)
+#define mulhi_int16(a,b) vqdmulhq_s16(a,b);
+
+void dft64(int16_t *x,int16_t *y,unsigned char scale)
+{
+
+  simd_q15_t xtmp[16],ytmp[16],*tw64a_128=(simd_q15_t *)tw64a,*tw64b_128=(simd_q15_t *)tw64b,*x128=(simd_q15_t *)x,*y128=(simd_q15_t *)y;
+
+
+#ifdef D64STATS
+  time_stats_t ts_t,ts_d,ts_b;
+
+  reset_meas(&ts_t);
+  reset_meas(&ts_d);
+  reset_meas(&ts_b);
+  start_meas(&ts_t);
+#endif
+
+
+  transpose16_ooff(x128,xtmp,4);
+  // xtmp0  = x00 x10 x20 x30
+  // xtmp4  = x01 x11 x21 x31
+  // xtmp8  = x02 x12 x22 x32
+  // xtmp12 = x03 x13 x23 x33
+  transpose16_ooff(x128+4,xtmp+1,4);
+  // xtmp1  = x40 x50 x60 x70
+  // xtmp5  = x41 x51 x61 x71
+  // xtmp9  = x42 x52 x62 x72
+  // xtmp13 = x43 x53 x63 x73
+  transpose16_ooff(x128+8,xtmp+2,4);
+  // xtmp2  = x80 x90 xa0 xb0
+  // xtmp6  = x41 x51 x61 x71
+  // xtmp10 = x82 x92 xa2 xb2
+  // xtmp14 = x83 x93 xa3 xb3
+  transpose16_ooff(x128+12,xtmp+3,4);
+  // xtmp3  = xc0 xd0 xe0 xf0
+  // xtmp7  = xc1 xd1 xe1 xf1
+  // xtmp11 = xc2 xd2 xe2 xf2
+  // xtmp15 = xc3 xd3 xe3 xf3
+
+#ifdef D64STATS
+  stop_meas(&ts_t);
+  start_meas(&ts_d);
+#endif
+
+  // xtmp0  = x00 x10 x20 x30
+  // xtmp1  = x40 x50 x60 x70
+  // xtmp2  = x80 x90 xa0 xb0
+  // xtmp3  = xc0 xd0 xe0 xf0
+  dft16((int16_t*)(xtmp),(int16_t*)ytmp);
+
+  // xtmp4  = x01 x11 x21 x31
+  // xtmp5  = x41 x51 x61 x71
+  // xtmp6  = x81 x91 xa1 xb1
+  // xtmp7  = xc1 xd1 xe1 xf1
+  dft16((int16_t*)(xtmp+4),(int16_t*)(ytmp+4));
+  dft16((int16_t*)(xtmp+8),(int16_t*)(ytmp+8));
+  dft16((int16_t*)(xtmp+12),(int16_t*)(ytmp+12));
+
+
+#ifdef D64STATS
+  stop_meas(&ts_d);
+  start_meas(&ts_b);
+#endif
+
+
+  bfly4_16(ytmp,ytmp+4,ytmp+8,ytmp+12,
+           y128,y128+4,y128+8,y128+12,
+           tw64a_128,tw64a_128+4,tw64a_128+8,
+           tw64b_128,tw64b_128+4,tw64b_128+8);
+
+  bfly4_16(ytmp+1,ytmp+5,ytmp+9,ytmp+13,
+           y128+1,y128+5,y128+9,y128+13,
+           tw64a_128+1,tw64a_128+5,tw64a_128+9,
+           tw64b_128+1,tw64b_128+5,tw64b_128+9);
+
+  bfly4_16(ytmp+2,ytmp+6,ytmp+10,ytmp+14,
+           y128+2,y128+6,y128+10,y128+14,
+           tw64a_128+2,tw64a_128+6,tw64a_128+10,
+           tw64b_128+2,tw64b_128+6,tw64b_128+10);
+
+  bfly4_16(ytmp+3,ytmp+7,ytmp+11,ytmp+15,
+           y128+3,y128+7,y128+11,y128+15,
+           tw64a_128+3,tw64a_128+7,tw64a_128+11,
+           tw64b_128+3,tw64b_128+7,tw64b_128+11);
+
+#ifdef D64STATS
+  stop_meas(&ts_b);
+  printf("t: %llu cycles, d: %llu cycles, b: %llu cycles\n",ts_t.diff,ts_d.diff,ts_b.diff);
+#endif
+
+
+  if (scale>0) {
+    y128[0]  = shiftright_int16(y128[0],3);
+    y128[1]  = shiftright_int16(y128[1],3);
+    y128[2]  = shiftright_int16(y128[2],3);
+    y128[3]  = shiftright_int16(y128[3],3);
+    y128[4]  = shiftright_int16(y128[4],3);
+    y128[5]  = shiftright_int16(y128[5],3);
+    y128[6]  = shiftright_int16(y128[6],3);
+    y128[7]  = shiftright_int16(y128[7],3);
+    y128[8]  = shiftright_int16(y128[8],3);
+    y128[9]  = shiftright_int16(y128[9],3);
+    y128[10] = shiftright_int16(y128[10],3);
+    y128[11] = shiftright_int16(y128[11],3);
+    y128[12] = shiftright_int16(y128[12],3);
+    y128[13] = shiftright_int16(y128[13],3);
+    y128[14] = shiftright_int16(y128[14],3);
+    y128[15] = shiftright_int16(y128[15],3);
+  }
+
+  
+  
+
+}
+
+void idft64(int16_t *x,int16_t *y,unsigned char scale)
+{
+
+  simd_q15_t xtmp[16],ytmp[16],*tw64a_128=(simd_q15_t *)tw64,*tw64b_128=(simd_q15_t *)tw64c,*x128=(simd_q15_t *)x,*y128=(simd_q15_t *)y;
+
+
+#ifdef D64STATS
+  time_stats_t ts_t,ts_d,ts_b;
+
+  reset_meas(&ts_t);
+  reset_meas(&ts_d);
+  reset_meas(&ts_b);
+  start_meas(&ts_t);
+#endif
+
+
+  transpose16_ooff(x128,xtmp,4);
+  transpose16_ooff(x128+4,xtmp+1,4);
+  transpose16_ooff(x128+8,xtmp+2,4);
+  transpose16_ooff(x128+12,xtmp+3,4);
+
+
+#ifdef D64STATS
+  stop_meas(&ts_t);
+  start_meas(&ts_d);
+#endif
+
+
+  idft16((int16_t*)(xtmp),(int16_t*)ytmp);
+  idft16((int16_t*)(xtmp+4),(int16_t*)(ytmp+4));
+  idft16((int16_t*)(xtmp+8),(int16_t*)(ytmp+8));
+  idft16((int16_t*)(xtmp+12),(int16_t*)(ytmp+12));
+
+
+#ifdef D64STATS
+  stop_meas(&ts_d);
+  start_meas(&ts_b);
+#endif
+
+
+  ibfly4_16(ytmp,ytmp+4,ytmp+8,ytmp+12,
+            y128,y128+4,y128+8,y128+12,
+            tw64a_128,tw64a_128+4,tw64a_128+8,
+            tw64b_128,tw64b_128+4,tw64b_128+8);
+  ibfly4_16(ytmp+1,ytmp+5,ytmp+9,ytmp+13,
+            y128+1,y128+5,y128+9,y128+13,
+            tw64a_128+1,tw64a_128+5,tw64a_128+9,
+            tw64b_128+1,tw64b_128+5,tw64b_128+9);
+
+  ibfly4_16(ytmp+2,ytmp+6,ytmp+10,ytmp+14,
+            y128+2,y128+6,y128+10,y128+14,
+            tw64a_128+2,tw64a_128+6,tw64a_128+10,
+            tw64b_128+2,tw64b_128+6,tw64b_128+10);
+
+  ibfly4_16(ytmp+3,ytmp+7,ytmp+11,ytmp+15,
+            y128+3,y128+7,y128+11,y128+15,
+            tw64a_128+3,tw64a_128+7,tw64a_128+11,
+            tw64b_128+3,tw64b_128+7,tw64b_128+11);
+
+#ifdef D64STATS
+  stop_meas(&ts_b);
+  printf("t: %llu cycles, d: %llu cycles, b: %llu cycles\n",ts_t.diff,ts_d.diff,ts_b.diff);
+#endif
+
+
+  if (scale>0) {
+
+    y128[0]  = shiftright_int16(y128[0],3);
+    y128[1]  = shiftright_int16(y128[1],3);
+    y128[2]  = shiftright_int16(y128[2],3);
+    y128[3]  = shiftright_int16(y128[3],3);
+    y128[4]  = shiftright_int16(y128[4],3);
+    y128[5]  = shiftright_int16(y128[5],3);
+    y128[6]  = shiftright_int16(y128[6],3);
+    y128[7]  = shiftright_int16(y128[7],3);
+    y128[8]  = shiftright_int16(y128[8],3);
+    y128[9]  = shiftright_int16(y128[9],3);
+    y128[10] = shiftright_int16(y128[10],3);
+    y128[11] = shiftright_int16(y128[11],3);
+    y128[12] = shiftright_int16(y128[12],3);
+    y128[13] = shiftright_int16(y128[13],3);
+    y128[14] = shiftright_int16(y128[14],3);
+    y128[15] = shiftright_int16(y128[15],3);
+
+  }
+
+  
+  
+
+}
+
+int16_t tw128[128] __attribute__((aligned(32))) = {  32767,0,32727,-1608,32609,-3212,32412,-4808,32137,-6393,31785,-7962,31356,-9512,30851,-11039,30272,-12540,29621,-14010,28897,-15447,28105,-16846,27244,-18205,26318,-19520,25329,-20788,24278,-22005,23169,-23170,22004,-24279,20787,-25330,19519,-26319,18204,-27245,16845,-28106,15446,-28898,14009,-29622,12539,-30273,11038,-30852,9511,-31357,7961,-31786,6392,-32138,4807,-32413,3211,-32610,1607,-32728,0,-32767,-1608,-32728,-3212,-32610,-4808,-32413,-6393,-32138,-7962,-31786,-9512,-31357,-11039,-30852,-12540,-30273,-14010,-29622,-15447,-28898,-16846,-28106,-18205,-27245,-19520,-26319,-20788,-25330,-22005,-24279,-23170,-23170,-24279,-22005,-25330,-20788,-26319,-19520,-27245,-18205,-28106,-16846,-28898,-15447,-29622,-14010,-30273,-12540,-30852,-11039,-31357,-9512,-31786,-7962,-32138,-6393,-32413,-4808,-32610,-3212,-32728,-1608};
+
+int16_t tw128a[128] __attribute__((aligned(32))) = { 32767,0,32727,1608,32609,3212,32412,4808,32137,6393,31785,7962,31356,9512,30851,11039,30272,12540,29621,14010,28897,15447,28105,16846,27244,18205,26318,19520,25329,20788,24278,22005,23169,23170,22004,24279,20787,25330,19519,26319,18204,27245,16845,28106,15446,28898,14009,29622,12539,30273,11038,30852,9511,31357,7961,31786,6392,32138,4807,32413,3211,32610,1607,32728,0,32767,-1608,32728,-3212,32610,-4808,32413,-6393,32138,-7962,31786,-9512,31357,-11039,30852,-12540,30273,-14010,29622,-15447,28898,-16846,28106,-18205,27245,-19520,26319,-20788,25330,-22005,24279,-23170,23170,-24279,22005,-25330,20788,-26319,19520,-27245,18205,-28106,16846,-28898,15447,-29622,14010,-30273,12540,-30852,11039,-31357,9512,-31786,7962,-32138,6393,-32413,4808,-32610,3212,-32728,1608};
+
+int16_t tw128b[128] __attribute__((aligned(32))) = {0,32767,-1608,32727,-3212,32609,-4808,32412,-6393,32137,-7962,31785,-9512,31356,-11039,30851,-12540,30272,-14010,29621,-15447,28897,-16846,28105,-18205,27244,-19520,26318,-20788,25329,-22005,24278,-23170,23169,-24279,22004,-25330,20787,-26319,19519,-27245,18204,-28106,16845,-28898,15446,-29622,14009,-30273,12539,-30852,11038,-31357,9511,-31786,7961,-32138,6392,-32413,4807,-32610,3211,-32728,1607,-32767,0,-32728,-1608,-32610,-3212,-32413,-4808,-32138,-6393,-31786,-7962,-31357,-9512,-30852,-11039,-30273,-12540,-29622,-14010,-28898,-15447,-28106,-16846,-27245,-18205,-26319,-19520,-25330,-20788,-24279,-22005,-23170,-23170,-22005,-24279,-20788,-25330,-19520,-26319,-18205,-27245,-16846,-28106,-15447,-28898,-14010,-29622,-12540,-30273,-11039,-30852,-9512,-31357,-7962,-31786,-6393,-32138,-4808,-32413,-3212,-32610,-1608,-32728};
+
+int16_t tw128c[128] __attribute__((aligned(32))) = {0,32767,1608,32727,3212,32609,4808,32412,6393,32137,7962,31785,9512,31356,11039,30851,12540,30272,14010,29621,15447,28897,16846,28105,18205,27244,19520,26318,20788,25329,22005,24278,23170,23169,24279,22004,25330,20787,26319,19519,27245,18204,28106,16845,28898,15446,29622,14009,30273,12539,30852,11038,31357,9511,31786,7961,32138,6392,32413,4807,32610,3211,32728,1607,32767,0,32728,-1608,32610,-3212,32413,-4808,32138,-6393,31786,-7962,31357,-9512,30852,-11039,30273,-12540,29622,-14010,28898,-15447,28106,-16846,27245,-18205,26319,-19520,25330,-20788,24279,-22005,23170,-23170,22005,-24279,20788,-25330,19520,-26319,18205,-27245,16846,-28106,15447,-28898,14010,-29622,12540,-30273,11039,-30852,9512,-31357,7962,-31786,6393,-32138,4808,-32413,3212,-32610,1608,-32728};
+
+void dft128(int16_t *x,int16_t *y,unsigned char scale)
+{
+
+  simdshort_q15_t xtmp[64],*x64 = (simdshort_q15_t *)x;
+  simd_q15_t ytmp[32],*tw128a_128p=(simd_q15_t *)tw128a,*tw128b_128p=(simd_q15_t *)tw128b,*y128=(simd_q15_t *)y,*y128p=(simd_q15_t *)y;
+  simd_q15_t *ytmpp = &ytmp[0];
+  int i;
+  simd_q15_t ONE_OVER_SQRT2_Q15_128 = set1_int16(ONE_OVER_SQRT2_Q15);
+
+
+  transpose4_ooff(x64  ,xtmp,32);
+  transpose4_ooff(x64+2,xtmp+1,32);
+  transpose4_ooff(x64+4,xtmp+2,32);
+  transpose4_ooff(x64+6,xtmp+3,32);
+  transpose4_ooff(x64+8,xtmp+4,32);
+  transpose4_ooff(x64+10,xtmp+5,32);
+  transpose4_ooff(x64+12,xtmp+6,32);
+  transpose4_ooff(x64+14,xtmp+7,32);
+  transpose4_ooff(x64+16,xtmp+8,32);
+  transpose4_ooff(x64+18,xtmp+9,32);
+  transpose4_ooff(x64+20,xtmp+10,32);
+  transpose4_ooff(x64+22,xtmp+11,32);
+  transpose4_ooff(x64+24,xtmp+12,32);
+  transpose4_ooff(x64+26,xtmp+13,32);
+  transpose4_ooff(x64+28,xtmp+14,32);
+  transpose4_ooff(x64+30,xtmp+15,32);
+  transpose4_ooff(x64+32,xtmp+16,32);
+  transpose4_ooff(x64+34,xtmp+17,32);
+  transpose4_ooff(x64+36,xtmp+18,32);
+  transpose4_ooff(x64+38,xtmp+19,32);
+  transpose4_ooff(x64+40,xtmp+20,32);
+  transpose4_ooff(x64+42,xtmp+21,32);
+  transpose4_ooff(x64+44,xtmp+22,32);
+  transpose4_ooff(x64+46,xtmp+23,32);
+  transpose4_ooff(x64+48,xtmp+24,32);
+  transpose4_ooff(x64+50,xtmp+25,32);
+  transpose4_ooff(x64+52,xtmp+26,32);
+  transpose4_ooff(x64+54,xtmp+27,32);
+  transpose4_ooff(x64+56,xtmp+28,32);
+  transpose4_ooff(x64+58,xtmp+29,32);
+  transpose4_ooff(x64+60,xtmp+30,32);
+  transpose4_ooff(x64+62,xtmp+31,32);
+
+  dft64((int16_t*)(xtmp),(int16_t*)ytmp,1);
+  dft64((int16_t*)(xtmp+32),(int16_t*)(ytmp+16),1);
+#ifndef MR_MAIN
+  if (LOG_DUMPFLAG(DEBUG_DFT)) {
+    LOG_M("dft128a.m","dfta",ytmp,64,1,1);
+    LOG_M("dft128b.m","dftb",ytmp+16,64,1,1);
+  }
+#endif
+  for (i=0; i<16; i++) {
+    bfly2_16(ytmpp,ytmpp+16,
+             y128p,y128p+16,
+             tw128a_128p,
+             tw128b_128p);
+    tw128a_128p++;
+    tw128b_128p++;
+    y128p++;
+    ytmpp++;
+  }
+
+  if (scale>0) {
+
+    y128[0] = mulhi_int16(y128[0],ONE_OVER_SQRT2_Q15_128);
+    y128[1] = mulhi_int16(y128[1],ONE_OVER_SQRT2_Q15_128);
+    y128[2] = mulhi_int16(y128[2],ONE_OVER_SQRT2_Q15_128);
+    y128[3] = mulhi_int16(y128[3],ONE_OVER_SQRT2_Q15_128);
+    y128[4] = mulhi_int16(y128[4],ONE_OVER_SQRT2_Q15_128);
+    y128[5] = mulhi_int16(y128[5],ONE_OVER_SQRT2_Q15_128);
+    y128[6] = mulhi_int16(y128[6],ONE_OVER_SQRT2_Q15_128);
+    y128[7] = mulhi_int16(y128[7],ONE_OVER_SQRT2_Q15_128);
+    y128[8] = mulhi_int16(y128[8],ONE_OVER_SQRT2_Q15_128);
+    y128[9] = mulhi_int16(y128[9],ONE_OVER_SQRT2_Q15_128);
+    y128[10] = mulhi_int16(y128[10],ONE_OVER_SQRT2_Q15_128);
+    y128[11] = mulhi_int16(y128[11],ONE_OVER_SQRT2_Q15_128);
+    y128[12] = mulhi_int16(y128[12],ONE_OVER_SQRT2_Q15_128);
+    y128[13] = mulhi_int16(y128[13],ONE_OVER_SQRT2_Q15_128);
+    y128[14] = mulhi_int16(y128[14],ONE_OVER_SQRT2_Q15_128);
+    y128[15] = mulhi_int16(y128[15],ONE_OVER_SQRT2_Q15_128);
+    y128[16] = mulhi_int16(y128[16],ONE_OVER_SQRT2_Q15_128);
+    y128[17] = mulhi_int16(y128[17],ONE_OVER_SQRT2_Q15_128);
+    y128[18] = mulhi_int16(y128[18],ONE_OVER_SQRT2_Q15_128);
+    y128[19] = mulhi_int16(y128[19],ONE_OVER_SQRT2_Q15_128);
+    y128[20] = mulhi_int16(y128[20],ONE_OVER_SQRT2_Q15_128);
+    y128[21] = mulhi_int16(y128[21],ONE_OVER_SQRT2_Q15_128);
+    y128[22] = mulhi_int16(y128[22],ONE_OVER_SQRT2_Q15_128);
+    y128[23] = mulhi_int16(y128[23],ONE_OVER_SQRT2_Q15_128);
+    y128[24] = mulhi_int16(y128[24],ONE_OVER_SQRT2_Q15_128);
+    y128[25] = mulhi_int16(y128[25],ONE_OVER_SQRT2_Q15_128);
+    y128[26] = mulhi_int16(y128[26],ONE_OVER_SQRT2_Q15_128);
+    y128[27] = mulhi_int16(y128[27],ONE_OVER_SQRT2_Q15_128);
+    y128[28] = mulhi_int16(y128[28],ONE_OVER_SQRT2_Q15_128);
+    y128[29] = mulhi_int16(y128[29],ONE_OVER_SQRT2_Q15_128);
+    y128[30] = mulhi_int16(y128[30],ONE_OVER_SQRT2_Q15_128);
+    y128[31] = mulhi_int16(y128[31],ONE_OVER_SQRT2_Q15_128);
+
+
+  }
+#ifndef MR_MAIN
+  if (LOG_DUMPFLAG(DEBUG_DFT)) {
+     LOG_M("dft128out.m","dft128",y,128,1,1);
+     exit(-1);
+  }
+#endif
+}
+
+void idft128(int16_t *x,int16_t *y,unsigned char scale)
+{
+
+  simdshort_q15_t xtmp[64],*x64 = (simdshort_q15_t *)x;
+  simd_q15_t ytmp[32],*tw128_128p=(simd_q15_t *)tw128,*y128=(simd_q15_t *)y,*y128p=(simd_q15_t *)y;
+  simd_q15_t *ytmpp = &ytmp[0];
+  int i;
+  simd_q15_t ONE_OVER_SQRT2_Q15_128 = set1_int16(ONE_OVER_SQRT2_Q15);
+
+
+  transpose4_ooff(x64  ,xtmp,32);
+  transpose4_ooff(x64+2,xtmp+1,32);
+  transpose4_ooff(x64+4,xtmp+2,32);
+  transpose4_ooff(x64+6,xtmp+3,32);
+  transpose4_ooff(x64+8,xtmp+4,32);
+  transpose4_ooff(x64+10,xtmp+5,32);
+  transpose4_ooff(x64+12,xtmp+6,32);
+  transpose4_ooff(x64+14,xtmp+7,32);
+  transpose4_ooff(x64+16,xtmp+8,32);
+  transpose4_ooff(x64+18,xtmp+9,32);
+  transpose4_ooff(x64+20,xtmp+10,32);
+  transpose4_ooff(x64+22,xtmp+11,32);
+  transpose4_ooff(x64+24,xtmp+12,32);
+  transpose4_ooff(x64+26,xtmp+13,32);
+  transpose4_ooff(x64+28,xtmp+14,32);
+  transpose4_ooff(x64+30,xtmp+15,32);
+  transpose4_ooff(x64+32,xtmp+16,32);
+  transpose4_ooff(x64+34,xtmp+17,32);
+  transpose4_ooff(x64+36,xtmp+18,32);
+  transpose4_ooff(x64+38,xtmp+19,32);
+  transpose4_ooff(x64+40,xtmp+20,32);
+  transpose4_ooff(x64+42,xtmp+21,32);
+  transpose4_ooff(x64+44,xtmp+22,32);
+  transpose4_ooff(x64+46,xtmp+23,32);
+  transpose4_ooff(x64+48,xtmp+24,32);
+  transpose4_ooff(x64+50,xtmp+25,32);
+  transpose4_ooff(x64+52,xtmp+26,32);
+  transpose4_ooff(x64+54,xtmp+27,32);
+  transpose4_ooff(x64+56,xtmp+28,32);
+  transpose4_ooff(x64+58,xtmp+29,32);
+  transpose4_ooff(x64+60,xtmp+30,32);
+  transpose4_ooff(x64+62,xtmp+31,32);
+
+  idft64((int16_t*)(xtmp),(int16_t*)ytmp,1);
+  idft64((int16_t*)(xtmp+32),(int16_t*)(ytmp+16),1);
+
+
+  for (i=0; i<16; i++) {
+    ibfly2(ytmpp,ytmpp+16,
+           y128p,y128p+16,
+           tw128_128p);
+    tw128_128p++;
+    y128p++;
+    ytmpp++;
+  }
+
+  if (scale>0) {
+
+    y128[0]  = mulhi_int16(y128[0],ONE_OVER_SQRT2_Q15_128);
+    y128[1]  = mulhi_int16(y128[1],ONE_OVER_SQRT2_Q15_128);
+    y128[2]  = mulhi_int16(y128[2],ONE_OVER_SQRT2_Q15_128);
+    y128[3]  = mulhi_int16(y128[3],ONE_OVER_SQRT2_Q15_128);
+    y128[4]  = mulhi_int16(y128[4],ONE_OVER_SQRT2_Q15_128);
+    y128[5]  = mulhi_int16(y128[5],ONE_OVER_SQRT2_Q15_128);
+    y128[6]  = mulhi_int16(y128[6],ONE_OVER_SQRT2_Q15_128);
+    y128[7]  = mulhi_int16(y128[7],ONE_OVER_SQRT2_Q15_128);
+    y128[8]  = mulhi_int16(y128[8],ONE_OVER_SQRT2_Q15_128);
+    y128[9]  = mulhi_int16(y128[9],ONE_OVER_SQRT2_Q15_128);
+    y128[10] = mulhi_int16(y128[10],ONE_OVER_SQRT2_Q15_128);
+    y128[11] = mulhi_int16(y128[11],ONE_OVER_SQRT2_Q15_128);
+    y128[12] = mulhi_int16(y128[12],ONE_OVER_SQRT2_Q15_128);
+    y128[13] = mulhi_int16(y128[13],ONE_OVER_SQRT2_Q15_128);
+    y128[14] = mulhi_int16(y128[14],ONE_OVER_SQRT2_Q15_128);
+    y128[15] = mulhi_int16(y128[15],ONE_OVER_SQRT2_Q15_128);
+    y128[16] = mulhi_int16(y128[16],ONE_OVER_SQRT2_Q15_128);
+    y128[17] = mulhi_int16(y128[17],ONE_OVER_SQRT2_Q15_128);
+    y128[18] = mulhi_int16(y128[18],ONE_OVER_SQRT2_Q15_128);
+    y128[19] = mulhi_int16(y128[19],ONE_OVER_SQRT2_Q15_128);
+    y128[20] = mulhi_int16(y128[20],ONE_OVER_SQRT2_Q15_128);
+    y128[21] = mulhi_int16(y128[21],ONE_OVER_SQRT2_Q15_128);
+    y128[22] = mulhi_int16(y128[22],ONE_OVER_SQRT2_Q15_128);
+    y128[23] = mulhi_int16(y128[23],ONE_OVER_SQRT2_Q15_128);
+    y128[24] = mulhi_int16(y128[24],ONE_OVER_SQRT2_Q15_128);
+    y128[25] = mulhi_int16(y128[25],ONE_OVER_SQRT2_Q15_128);
+    y128[26] = mulhi_int16(y128[26],ONE_OVER_SQRT2_Q15_128);
+    y128[27] = mulhi_int16(y128[27],ONE_OVER_SQRT2_Q15_128);
+    y128[28] = mulhi_int16(y128[28],ONE_OVER_SQRT2_Q15_128);
+    y128[29] = mulhi_int16(y128[29],ONE_OVER_SQRT2_Q15_128);
+    y128[30] = mulhi_int16(y128[30],ONE_OVER_SQRT2_Q15_128);
+    y128[31] = mulhi_int16(y128[31],ONE_OVER_SQRT2_Q15_128);
+
+  }
+
+}
+
+int16_t tw256[384] __attribute__((aligned(32))) = {  32767,0,32757,-805,32727,-1608,32678,-2411,32609,-3212,32520,-4012,32412,-4808,32284,-5602,32137,-6393,31970,-7180,31785,-7962,31580,-8740,31356,-9512,31113,-10279,30851,-11039,30571,-11793,30272,-12540,29955,-13279,29621,-14010,29268,-14733,28897,-15447,28510,-16151,28105,-16846,27683,-17531,27244,-18205,26789,-18868,26318,-19520,25831,-20160,25329,-20788,24811,-21403,24278,-22005,23731,-22595,23169,-23170,22594,-23732,22004,-24279,21402,-24812,20787,-25330,20159,-25832,19519,-26319,18867,-26790,18204,-27245,17530,-27684,16845,-28106,16150,-28511,15446,-28898,14732,-29269,14009,-29622,13278,-29956,12539,-30273,11792,-30572,11038,-30852,10278,-31114,9511,-31357,8739,-31581,7961,-31786,7179,-31971,6392,-32138,5601,-32285,4807,-32413,4011,-32521,3211,-32610,2410,-32679,1607,-32728,804,-32758,
+                                                     32767,0,32727,-1608,32609,-3212,32412,-4808,32137,-6393,31785,-7962,31356,-9512,30851,-11039,30272,-12540,29621,-14010,28897,-15447,28105,-16846,27244,-18205,26318,-19520,25329,-20788,24278,-22005,23169,-23170,22004,-24279,20787,-25330,19519,-26319,18204,-27245,16845,-28106,15446,-28898,14009,-29622,12539,-30273,11038,-30852,9511,-31357,7961,-31786,6392,-32138,4807,-32413,3211,-32610,1607,-32728,0,-32767,-1608,-32728,-3212,-32610,-4808,-32413,-6393,-32138,-7962,-31786,-9512,-31357,-11039,-30852,-12540,-30273,-14010,-29622,-15447,-28898,-16846,-28106,-18205,-27245,-19520,-26319,-20788,-25330,-22005,-24279,-23170,-23170,-24279,-22005,-25330,-20788,-26319,-19520,-27245,-18205,-28106,-16846,-28898,-15447,-29622,-14010,-30273,-12540,-30852,-11039,-31357,-9512,-31786,-7962,-32138,-6393,-32413,-4808,-32610,-3212,-32728,-1608,
+                                                     32767,0,32678,-2411,32412,-4808,31970,-7180,31356,-9512,30571,-11793,29621,-14010,28510,-16151,27244,-18205,25831,-20160,24278,-22005,22594,-23732,20787,-25330,18867,-26790,16845,-28106,14732,-29269,12539,-30273,10278,-31114,7961,-31786,5601,-32285,3211,-32610,804,-32758,-1608,-32728,-4012,-32521,-6393,-32138,-8740,-31581,-11039,-30852,-13279,-29956,-15447,-28898,-17531,-27684,-19520,-26319,-21403,-24812,-23170,-23170,-24812,-21403,-26319,-19520,-27684,-17531,-28898,-15447,-29956,-13279,-30852,-11039,-31581,-8740,-32138,-6393,-32521,-4012,-32728,-1608,-32758,804,-32610,3211,-32285,5601,-31786,7961,-31114,10278,-30273,12539,-29269,14732,-28106,16845,-26790,18867,-25330,20787,-23732,22594,-22005,24278,-20160,25831,-18205,27244,-16151,28510,-14010,29621,-11793,30571,-9512,31356,-7180,31970,-4808,32412,-2411,32678
+                                                  };
+
+int16_t tw256a[384] __attribute__((aligned(32))) = { 32767,0,32757,804,32727,1607,32678,2410,32609,3211,32520,4011,32412,4807,32284,5601,32137,6392,31970,7179,31785,7961,31580,8739,31356,9511,31113,10278,30851,11038,30571,11792,30272,12539,29955,13278,29621,14009,29268,14732,28897,15446,28510,16150,28105,16845,27683,17530,27244,18204,26789,18867,26318,19519,25831,20159,25329,20787,24811,21402,24278,22004,23731,22594,23169,23169,22594,23731,22004,24278,21402,24811,20787,25329,20159,25831,19519,26318,18867,26789,18204,27244,17530,27683,16845,28105,16150,28510,15446,28897,14732,29268,14009,29621,13278,29955,12539,30272,11792,30571,11038,30851,10278,31113,9511,31356,8739,31580,7961,31785,7179,31970,6392,32137,5601,32284,4807,32412,4011,32520,3211,32609,2410,32678,1607,32727,804,32757,
+                                                     32767,0,32727,1607,32609,3211,32412,4807,32137,6392,31785,7961,31356,9511,30851,11038,30272,12539,29621,14009,28897,15446,28105,16845,27244,18204,26318,19519,25329,20787,24278,22004,23169,23169,22004,24278,20787,25329,19519,26318,18204,27244,16845,28105,15446,28897,14009,29621,12539,30272,11038,30851,9511,31356,7961,31785,6392,32137,4807,32412,3211,32609,1607,32727,0,32767,-1608,32727,-3212,32609,-4808,32412,-6393,32137,-7962,31785,-9512,31356,-11039,30851,-12540,30272,-14010,29621,-15447,28897,-16846,28105,-18205,27244,-19520,26318,-20788,25329,-22005,24278,-23170,23169,-24279,22004,-25330,20787,-26319,19519,-27245,18204,-28106,16845,-28898,15446,-29622,14009,-30273,12539,-30852,11038,-31357,9511,-31786,7961,-32138,6392,-32413,4807,-32610,3211,-32728,1607,
+                                                     32767,0,32678,2410,32412,4807,31970,7179,31356,9511,30571,11792,29621,14009,28510,16150,27244,18204,25831,20159,24278,22004,22594,23731,20787,25329,18867,26789,16845,28105,14732,29268,12539,30272,10278,31113,7961,31785,5601,32284,3211,32609,804,32757,-1608,32727,-4012,32520,-6393,32137,-8740,31580,-11039,30851,-13279,29955,-15447,28897,-17531,27683,-19520,26318,-21403,24811,-23170,23169,-24812,21402,-26319,19519,-27684,17530,-28898,15446,-29956,13278,-30852,11038,-31581,8739,-32138,6392,-32521,4011,-32728,1607,-32758,-805,-32610,-3212,-32285,-5602,-31786,-7962,-31114,-10279,-30273,-12540,-29269,-14733,-28106,-16846,-26790,-18868,-25330,-20788,-23732,-22595,-22005,-24279,-20160,-25832,-18205,-27245,-16151,-28511,-14010,-29622,-11793,-30572,-9512,-31357,-7180,-31971,-4808,-32413,-2411,-32679
+                                                   };
+
+int16_t tw256b[384] __attribute__((aligned(32))) = {0,32767,-805,32757,-1608,32727,-2411,32678,-3212,32609,-4012,32520,-4808,32412,-5602,32284,-6393,32137,-7180,31970,-7962,31785,-8740,31580,-9512,31356,-10279,31113,-11039,30851,-11793,30571,-12540,30272,-13279,29955,-14010,29621,-14733,29268,-15447,28897,-16151,28510,-16846,28105,-17531,27683,-18205,27244,-18868,26789,-19520,26318,-20160,25831,-20788,25329,-21403,24811,-22005,24278,-22595,23731,-23170,23169,-23732,22594,-24279,22004,-24812,21402,-25330,20787,-25832,20159,-26319,19519,-26790,18867,-27245,18204,-27684,17530,-28106,16845,-28511,16150,-28898,15446,-29269,14732,-29622,14009,-29956,13278,-30273,12539,-30572,11792,-30852,11038,-31114,10278,-31357,9511,-31581,8739,-31786,7961,-31971,7179,-32138,6392,-32285,5601,-32413,4807,-32521,4011,-32610,3211,-32679,2410,-32728,1607,-32758,804,
+                                                    0,32767,-1608,32727,-3212,32609,-4808,32412,-6393,32137,-7962,31785,-9512,31356,-11039,30851,-12540,30272,-14010,29621,-15447,28897,-16846,28105,-18205,27244,-19520,26318,-20788,25329,-22005,24278,-23170,23169,-24279,22004,-25330,20787,-26319,19519,-27245,18204,-28106,16845,-28898,15446,-29622,14009,-30273,12539,-30852,11038,-31357,9511,-31786,7961,-32138,6392,-32413,4807,-32610,3211,-32728,1607,-32767,0,-32728,-1608,-32610,-3212,-32413,-4808,-32138,-6393,-31786,-7962,-31357,-9512,-30852,-11039,-30273,-12540,-29622,-14010,-28898,-15447,-28106,-16846,-27245,-18205,-26319,-19520,-25330,-20788,-24279,-22005,-23170,-23170,-22005,-24279,-20788,-25330,-19520,-26319,-18205,-27245,-16846,-28106,-15447,-28898,-14010,-29622,-12540,-30273,-11039,-30852,-9512,-31357,-7962,-31786,-6393,-32138,-4808,-32413,-3212,-32610,-1608,-32728,
+                                                    0,32767,-2411,32678,-4808,32412,-7180,31970,-9512,31356,-11793,30571,-14010,29621,-16151,28510,-18205,27244,-20160,25831,-22005,24278,-23732,22594,-25330,20787,-26790,18867,-28106,16845,-29269,14732,-30273,12539,-31114,10278,-31786,7961,-32285,5601,-32610,3211,-32758,804,-32728,-1608,-32521,-4012,-32138,-6393,-31581,-8740,-30852,-11039,-29956,-13279,-28898,-15447,-27684,-17531,-26319,-19520,-24812,-21403,-23170,-23170,-21403,-24812,-19520,-26319,-17531,-27684,-15447,-28898,-13279,-29956,-11039,-30852,-8740,-31581,-6393,-32138,-4012,-32521,-1608,-32728,804,-32758,3211,-32610,5601,-32285,7961,-31786,10278,-31114,12539,-30273,14732,-29269,16845,-28106,18867,-26790,20787,-25330,22594,-23732,24278,-22005,25831,-20160,27244,-18205,28510,-16151,29621,-14010,30571,-11793,31356,-9512,31970,-7180,32412,-4808,32678,-2411
+                                                   };
+void dft256(int16_t *x,int16_t *y,unsigned char scale)
+{
+
+  simd_q15_t xtmp[64],ytmp[64],*tw256a_128p=(simd_q15_t *)tw256a,*tw256b_128p=(simd_q15_t *)tw256b,*x128=(simd_q15_t *)x,*y128=(simd_q15_t *)y,*y128p=(simd_q15_t *)y;
+  simd_q15_t *ytmpp = &ytmp[0];
+  int i;
+
+#ifdef D256STATS
+  time_stats_t ts_t,ts_d,ts_b;
+
+  reset_meas(&ts_t);
+  reset_meas(&ts_d);
+  reset_meas(&ts_b);
+  start_meas(&ts_t);
+#endif
+  /*
+  for (i=0,j=0;i<64;i+=4,j++) {
+    transpose16_ooff(x128+i,xtmp+j,16);
+  }
+  */
+  transpose16_ooff(x128+0,xtmp+0,16);
+  transpose16_ooff(x128+4,xtmp+1,16);
+  transpose16_ooff(x128+8,xtmp+2,16);
+  transpose16_ooff(x128+12,xtmp+3,16);
+  transpose16_ooff(x128+16,xtmp+4,16);
+  transpose16_ooff(x128+20,xtmp+5,16);
+  transpose16_ooff(x128+24,xtmp+6,16);
+  transpose16_ooff(x128+28,xtmp+7,16);
+  transpose16_ooff(x128+32,xtmp+8,16);
+  transpose16_ooff(x128+36,xtmp+9,16);
+  transpose16_ooff(x128+40,xtmp+10,16);
+  transpose16_ooff(x128+44,xtmp+11,16);
+  transpose16_ooff(x128+48,xtmp+12,16);
+  transpose16_ooff(x128+52,xtmp+13,16);
+  transpose16_ooff(x128+56,xtmp+14,16);
+  transpose16_ooff(x128+60,xtmp+15,16);
+
+#ifdef D256STATS
+  stop_meas(&ts_t);
+  start_meas(&ts_d);
+#endif
+
+  dft64((int16_t*)(xtmp),(int16_t*)(ytmp),1);
+  dft64((int16_t*)(xtmp+16),(int16_t*)(ytmp+16),1);
+  dft64((int16_t*)(xtmp+32),(int16_t*)(ytmp+32),1);
+  dft64((int16_t*)(xtmp+48),(int16_t*)(ytmp+48),1);
+
+#ifdef D256STATS
+  stop_meas(&ts_d);
+  start_meas(&ts_b);
+#endif
+
+  for (i=0; i<16; i+=4) {
+    bfly4_16(ytmpp,ytmpp+16,ytmpp+32,ytmpp+48,
+             y128p,y128p+16,y128p+32,y128p+48,
+             tw256a_128p,tw256a_128p+16,tw256a_128p+32,
+             tw256b_128p,tw256b_128p+16,tw256b_128p+32);
+    bfly4_16(ytmpp+1,ytmpp+17,ytmpp+33,ytmpp+49,
+             y128p+1,y128p+17,y128p+33,y128p+49,
+             tw256a_128p+1,tw256a_128p+17,tw256a_128p+33,
+             tw256b_128p+1,tw256b_128p+17,tw256b_128p+33);
+    bfly4_16(ytmpp+2,ytmpp+18,ytmpp+34,ytmpp+50,
+             y128p+2,y128p+18,y128p+34,y128p+50,
+             tw256a_128p+2,tw256a_128p+18,tw256a_128p+34,
+             tw256b_128p+2,tw256b_128p+18,tw256b_128p+34);
+    bfly4_16(ytmpp+3,ytmpp+19,ytmpp+35,ytmpp+51,
+             y128p+3,y128p+19,y128p+35,y128p+51,
+             tw256a_128p+3,tw256a_128p+19,tw256a_128p+35,
+             tw256b_128p+3,tw256b_128p+19,tw256b_128p+35);
+    tw256a_128p+=4;
+    tw256b_128p+=4;
+    y128p+=4;
+    ytmpp+=4;
+  }
+
+#ifdef D256STATS
+  stop_meas(&ts_b);
+  printf("t: %llu cycles, d: %llu cycles, b: %llu cycles\n",ts_t.diff,ts_d.diff,ts_b.diff);
+#endif
+
+  if (scale>0) {
+
+    for (i=0; i<4; i++) {
+      y128[0]  = shiftright_int16(y128[0],1);
+      y128[1]  = shiftright_int16(y128[1],1);
+      y128[2]  = shiftright_int16(y128[2],1);
+      y128[3]  = shiftright_int16(y128[3],1);
+      y128[4]  = shiftright_int16(y128[4],1);
+      y128[5]  = shiftright_int16(y128[5],1);
+      y128[6]  = shiftright_int16(y128[6],1);
+      y128[7]  = shiftright_int16(y128[7],1);
+      y128[8]  = shiftright_int16(y128[8],1);
+      y128[9]  = shiftright_int16(y128[9],1);
+      y128[10] = shiftright_int16(y128[10],1);
+      y128[11] = shiftright_int16(y128[11],1);
+      y128[12] = shiftright_int16(y128[12],1);
+      y128[13] = shiftright_int16(y128[13],1);
+      y128[14] = shiftright_int16(y128[14],1);
+      y128[15] = shiftright_int16(y128[15],1);
+
+      y128+=16;
+    }
+
+  }
+}
+
+
+
+void idft256(int16_t *x,int16_t *y,unsigned char scale)
+{
+
+  simd_q15_t xtmp[64],ytmp[64],*tw256_128p=(simd_q15_t *)tw256,*x128=(simd_q15_t *)x,*y128=(simd_q15_t *)y,*y128p=(simd_q15_t *)y;
+  simd_q15_t *ytmpp = &ytmp[0];
+  int i,j;
+
+  for (i=0,j=0; i<64; i+=4,j++) {
+    transpose16_ooff(x128+i,xtmp+j,16);
+  }
+
+
+  idft64((int16_t*)(xtmp),(int16_t*)(ytmp),1);
+  idft64((int16_t*)(xtmp+16),(int16_t*)(ytmp+16),1);
+  idft64((int16_t*)(xtmp+32),(int16_t*)(ytmp+32),1);
+  idft64((int16_t*)(xtmp+48),(int16_t*)(ytmp+48),1);
+
+  for (i=0; i<16; i++) {
+    ibfly4(ytmpp,ytmpp+16,ytmpp+32,ytmpp+48,
+           y128p,y128p+16,y128p+32,y128p+48,
+           tw256_128p,tw256_128p+16,tw256_128p+32);
+    tw256_128p++;
+    y128p++;
+    ytmpp++;
+  }
+
+  if (scale>0) {
+
+    for (i=0; i<4; i++) {
+      y128[0]  = shiftright_int16(y128[0],1);
+      y128[1]  = shiftright_int16(y128[1],1);
+      y128[2]  = shiftright_int16(y128[2],1);
+      y128[3]  = shiftright_int16(y128[3],1);
+      y128[4]  = shiftright_int16(y128[4],1);
+      y128[5]  = shiftright_int16(y128[5],1);
+      y128[6]  = shiftright_int16(y128[6],1);
+      y128[7]  = shiftright_int16(y128[7],1);
+      y128[8]  = shiftright_int16(y128[8],1);
+      y128[9]  = shiftright_int16(y128[9],1);
+      y128[10] = shiftright_int16(y128[10],1);
+      y128[11] = shiftright_int16(y128[11],1);
+      y128[12] = shiftright_int16(y128[12],1);
+      y128[13] = shiftright_int16(y128[13],1);
+      y128[14] = shiftright_int16(y128[14],1);
+      y128[15] = shiftright_int16(y128[15],1);
+
+      y128+=16;
+    }
+  }
+}
+
+int16_t tw512[512] __attribute__((aligned(32))) = {
+  32767,0,32764,-403,32757,-805,32744,-1207,32727,-1608,32705,-2010,32678,-2411,32646,-2812,32609,-3212,32567,-3612,32520,-4012,32468,-4410,32412,-4808,32350,-5206,32284,-5602,32213,-5998,32137,-6393,32056,-6787,31970,-7180,31880,-7572,31785,-7962,31684,-8352,31580,-8740,31470,-9127,31356,-9512,31236,-9896,31113,-10279,30984,-10660,30851,-11039,30713,-11417,30571,-11793,30424,-12167,30272,-12540,30116,-12910,29955,-13279,29790,-13646,29621,-14010,29446,-14373,29268,-14733,29085,-15091,28897,-15447,28706,-15800,28510,-16151,28309,-16500,28105,-16846,27896,-17190,27683,-17531,27466,-17869,27244,-18205,27019,-18538,26789,-18868,26556,-19195,26318,-19520,26077,-19841,25831,-20160,25582,-20475,25329,-20788,25072,-21097,24811,-21403,24546,-21706,24278,-22005,24006,-22302,23731,-22595,23452,-22884,23169,-23170,22883,-23453,22594,-23732,22301,-24007,22004,-24279,21705,-24547,21402,-24812,21096,-25073,20787,-25330,20474,-25583,20159,-25832,19840,-26078,19519,-26319,19194,-26557,18867,-26790,18537,-27020,18204,-27245,17868,-27467,17530,-27684,17189,-27897,16845,-28106,16499,-28310,16150,-28511,15799,-28707,15446,-28898,15090,-29086,14732,-29269,14372,-29447,14009,-29622,13645,-29791,13278,-29956,12909,-30117,12539,-30273,12166,-30425,11792,-30572,11416,-30714,11038,-30852,10659,-30985,10278,-31114,9895,-31237,9511,-31357,9126,-31471,8739,-31581,8351,-31685,7961,-31786,7571,-31881,7179,-31971,6786,-32057,6392,-32138,5997,-32214,5601,-32285,5205,-32351,4807,-32413,4409,-32469,4011,-32521,3611,-32568,3211,-32610,2811,-32647,2410,-32679,2009,-32706,1607,-32728,1206,-32745,804,-32758,402,-32765,0,-32767,-403,-32765,-805,-32758,-1207,-32745,-1608,-32728,-2010,-32706,-2411,-32679,-2812,-32647,-3212,-32610,-3612,-32568,-4012,-32521,-4410,-32469,-4808,-32413,-5206,-32351,-5602,-32285,-5998,-32214,-6393,-32138,-6787,-32057,-7180,-31971,-7572,-31881,-7962,-31786,-8352,-31685,-8740,-31581,-9127,-31471,-9512,-31357,-9896,-31237,-10279,-31114,-10660,-30985,-11039,-30852,-11417,-30714,-11793,-30572,-12167,-30425,-12540,-30273,-12910,-30117,-13279,-29956,-13646,-29791,-14010,-29622,-14373,-29447,-14733,-29269,-15091,-29086,-15447,-28898,-15800,-28707,-16151,-28511,-16500,-28310,-16846,-28106,-17190,-27897,-17531,-27684,-17869,-27467,-18205,-27245,-18538,-27020,-18868,-26790,-19195,-26557,-19520,-26319,-19841,-26078,-20160,-25832,-20475,-25583,-20788,-25330,-21097,-25073,-21403,-24812,-21706,-24547,-22005,-24279,-22302,-24007,-22595,-23732,-22884,-23453,-23170,-23170,-23453,-22884,-23732,-22595,-24007,-22302,-24279,-22005,-24547,-21706,-24812,-21403,-25073,-21097,-25330,-20788,-25583,-20475,-25832,-20160,-26078,-19841,-26319,-19520,-26557,-19195,-26790,-18868,-27020,-18538,-27245,-18205,-27467,-17869,-27684,-17531,-27897,-17190,-28106,-16846,-28310,-16500,-28511,-16151,-28707,-15800,-28898,-15447,-29086,-15091,-29269,-14733,-29447,-14373,-29622,-14010,-29791,-13646,-29956,-13279,-30117,-12910,-30273,-12540,-30425,-12167,-30572,-11793,-30714,-11417,-30852,-11039,-30985,-10660,-31114,-10279,-31237,-9896,-31357,-9512,-31471,-9127,-31581,-8740,-31685,-8352,-31786,-7962,-31881,-7572,-31971,-7180,-32057,-6787,-32138,-6393,-32214,-5998,-32285,-5602,-32351,-5206,-32413,-4808,-32469,-4410,-32521,-4012,-32568,-3612,-32610,-3212,-32647,-2812,-32679,-2411,-32706,-2010,-32728,-1608,-32745,-1207,-32758,-805,-32765,-403
+};
+
+int16_t tw512a[512] __attribute__((aligned(32))) = {
+  32767,0,32764,403,32757,805,32744,1207,32727,1608,32705,2010,32678,2411,32646,2812,32609,3212,32567,3612,32520,4012,32468,4410,32412,4808,32350,5206,32284,5602,32213,5998,32137,6393,32056,6787,31970,7180,31880,7572,31785,7962,31684,8352,31580,8740,31470,9127,31356,9512,31236,9896,31113,10279,30984,10660,30851,11039,30713,11417,30571,11793,30424,12167,30272,12540,30116,12910,29955,13279,29790,13646,29621,14010,29446,14373,29268,14733,29085,15091,28897,15447,28706,15800,28510,16151,28309,16500,28105,16846,27896,17190,27683,17531,27466,17869,27244,18205,27019,18538,26789,18868,26556,19195,26318,19520,26077,19841,25831,20160,25582,20475,25329,20788,25072,21097,24811,21403,24546,21706,24278,22005,24006,22302,23731,22595,23452,22884,23169,23170,22883,23453,22594,23732,22301,24007,22004,24279,21705,24547,21402,24812,21096,25073,20787,25330,20474,25583,20159,25832,19840,26078,19519,26319,19194,26557,18867,26790,18537,27020,18204,27245,17868,27467,17530,27684,17189,27897,16845,28106,16499,28310,16150,28511,15799,28707,15446,28898,15090,29086,14732,29269,14372,29447,14009,29622,13645,29791,13278,29956,12909,30117,12539,30273,12166,30425,11792,30572,11416,30714,11038,30852,10659,30985,10278,31114,9895,31237,9511,31357,9126,31471,8739,31581,8351,31685,7961,31786,7571,31881,7179,31971,6786,32057,6392,32138,5997,32214,5601,32285,5205,32351,4807,32413,4409,32469,4011,32521,3611,32568,3211,32610,2811,32647,2410,32679,2009,32706,1607,32728,1206,32745,804,32758,402,32765,0,32767,-403,32765,-805,32758,-1207,32745,-1608,32728,-2010,32706,-2411,32679,-2812,32647,-3212,32610,-3612,32568,-4012,32521,-4410,32469,-4808,32413,-5206,32351,-5602,32285,-5998,32214,-6393,32138,-6787,32057,-7180,31971,-7572,31881,-7962,31786,-8352,31685,-8740,31581,-9127,31471,-9512,31357,-9896,31237,-10279,31114,-10660,30985,-11039,30852,-11417,30714,-11793,30572,-12167,30425,-12540,30273,-12910,30117,-13279,29956,-13646,29791,-14010,29622,-14373,29447,-14733,29269,-15091,29086,-15447,28898,-15800,28707,-16151,28511,-16500,28310,-16846,28106,-17190,27897,-17531,27684,-17869,27467,-18205,27245,-18538,27020,-18868,26790,-19195,26557,-19520,26319,-19841,26078,-20160,25832,-20475,25583,-20788,25330,-21097,25073,-21403,24812,-21706,24547,-22005,24279,-22302,24007,-22595,23732,-22884,23453,-23170,23170,-23453,22884,-23732,22595,-24007,22302,-24279,22005,-24547,21706,-24812,21403,-25073,21097,-25330,20788,-25583,20475,-25832,20160,-26078,19841,-26319,19520,-26557,19195,-26790,18868,-27020,18538,-27245,18205,-27467,17869,-27684,17531,-27897,17190,-28106,16846,-28310,16500,-28511,16151,-28707,15800,-28898,15447,-29086,15091,-29269,14733,-29447,14373,-29622,14010,-29791,13646,-29956,13279,-30117,12910,-30273,12540,-30425,12167,-30572,11793,-30714,11417,-30852,11039,-30985,10660,-31114,10279,-31237,9896,-31357,9512,-31471,9127,-31581,8740,-31685,8352,-31786,7962,-31881,7572,-31971,7180,-32057,6787,-32138,6393,-32214,5998,-32285,5602,-32351,5206,-32413,4808,-32469,4410,-32521,4012,-32568,3612,-32610,3212,-32647,2812,-32679,2411,-32706,2010,-32728,1608,-32745,1207,-32758,805,-32765,403
+};
+
+
+
+int16_t tw512b[512] __attribute__((aligned(32))) = {
+  0,32767,-403,32764,-805,32757,-1207,32744,-1608,32727,-2010,32705,-2411,32678,-2812,32646,-3212,32609,-3612,32567,-4012,32520,-4410,32468,-4808,32412,-5206,32350,-5602,32284,-5998,32213,-6393,32137,-6787,32056,-7180,31970,-7572,31880,-7962,31785,-8352,31684,-8740,31580,-9127,31470,-9512,31356,-9896,31236,-10279,31113,-10660,30984,-11039,30851,-11417,30713,-11793,30571,-12167,30424,-12540,30272,-12910,30116,-13279,29955,-13646,29790,-14010,29621,-14373,29446,-14733,29268,-15091,29085,-15447,28897,-15800,28706,-16151,28510,-16500,28309,-16846,28105,-17190,27896,-17531,27683,-17869,27466,-18205,27244,-18538,27019,-18868,26789,-19195,26556,-19520,26318,-19841,26077,-20160,25831,-20475,25582,-20788,25329,-21097,25072,-21403,24811,-21706,24546,-22005,24278,-22302,24006,-22595,23731,-22884,23452,-23170,23169,-23453,22883,-23732,22594,-24007,22301,-24279,22004,-24547,21705,-24812,21402,-25073,21096,-25330,20787,-25583,20474,-25832,20159,-26078,19840,-26319,19519,-26557,19194,-26790,18867,-27020,18537,-27245,18204,-27467,17868,-27684,17530,-27897,17189,-28106,16845,-28310,16499,-28511,16150,-28707,15799,-28898,15446,-29086,15090,-29269,14732,-29447,14372,-29622,14009,-29791,13645,-29956,13278,-30117,12909,-30273,12539,-30425,12166,-30572,11792,-30714,11416,-30852,11038,-30985,10659,-31114,10278,-31237,9895,-31357,9511,-31471,9126,-31581,8739,-31685,8351,-31786,7961,-31881,7571,-31971,7179,-32057,6786,-32138,6392,-32214,5997,-32285,5601,-32351,5205,-32413,4807,-32469,4409,-32521,4011,-32568,3611,-32610,3211,-32647,2811,-32679,2410,-32706,2009,-32728,1607,-32745,1206,-32758,804,-32765,402,-32767,0,-32765,-403,-32758,-805,-32745,-1207,-32728,-1608,-32706,-2010,-32679,-2411,-32647,-2812,-32610,-3212,-32568,-3612,-32521,-4012,-32469,-4410,-32413,-4808,-32351,-5206,-32285,-5602,-32214,-5998,-32138,-6393,-32057,-6787,-31971,-7180,-31881,-7572,-31786,-7962,-31685,-8352,-31581,-8740,-31471,-9127,-31357,-9512,-31237,-9896,-31114,-10279,-30985,-10660,-30852,-11039,-30714,-11417,-30572,-11793,-30425,-12167,-30273,-12540,-30117,-12910,-29956,-13279,-29791,-13646,-29622,-14010,-29447,-14373,-29269,-14733,-29086,-15091,-28898,-15447,-28707,-15800,-28511,-16151,-28310,-16500,-28106,-16846,-27897,-17190,-27684,-17531,-27467,-17869,-27245,-18205,-27020,-18538,-26790,-18868,-26557,-19195,-26319,-19520,-26078,-19841,-25832,-20160,-25583,-20475,-25330,-20788,-25073,-21097,-24812,-21403,-24547,-21706,-24279,-22005,-24007,-22302,-23732,-22595,-23453,-22884,-23170,-23170,-22884,-23453,-22595,-23732,-22302,-24007,-22005,-24279,-21706,-24547,-21403,-24812,-21097,-25073,-20788,-25330,-20475,-25583,-20160,-25832,-19841,-26078,-19520,-26319,-19195,-26557,-18868,-26790,-18538,-27020,-18205,-27245,-17869,-27467,-17531,-27684,-17190,-27897,-16846,-28106,-16500,-28310,-16151,-28511,-15800,-28707,-15447,-28898,-15091,-29086,-14733,-29269,-14373,-29447,-14010,-29622,-13646,-29791,-13279,-29956,-12910,-30117,-12540,-30273,-12167,-30425,-11793,-30572,-11417,-30714,-11039,-30852,-10660,-30985,-10279,-31114,-9896,-31237,-9512,-31357,-9127,-31471,-8740,-31581,-8352,-31685,-7962,-31786,-7572,-31881,-7180,-31971,-6787,-32057,-6393,-32138,-5998,-32214,-5602,-32285,-5206,-32351,-4808,-32413,-4410,-32469,-4012,-32521,-3612,-32568,-3212,-32610,-2812,-32647,-2411,-32679,-2010,-32706,-1608,-32728,-1207,-32745,-805,-32758,-403,-32765
+};
+
+int16_t tw512c[512] __attribute__((aligned(32))) = {
+  0,32767,403,32764,805,32757,1207,32744,1608,32727,2010,32705,2411,32678,2812,32646,3212,32609,3612,32567,4012,32520,4410,32468,4808,32412,5206,32350,5602,32284,5998,32213,6393,32137,6787,32056,7180,31970,7572,31880,7962,31785,8352,31684,8740,31580,9127,31470,9512,31356,9896,31236,10279,31113,10660,30984,11039,30851,11417,30713,11793,30571,12167,30424,12540,30272,12910,30116,13279,29955,13646,29790,14010,29621,14373,29446,14733,29268,15091,29085,15447,28897,15800,28706,16151,28510,16500,28309,16846,28105,17190,27896,17531,27683,17869,27466,18205,27244,18538,27019,18868,26789,19195,26556,19520,26318,19841,26077,20160,25831,20475,25582,20788,25329,21097,25072,21403,24811,21706,24546,22005,24278,22302,24006,22595,23731,22884,23452,23170,23169,23453,22883,23732,22594,24007,22301,24279,22004,24547,21705,24812,21402,25073,21096,25330,20787,25583,20474,25832,20159,26078,19840,26319,19519,26557,19194,26790,18867,27020,18537,27245,18204,27467,17868,27684,17530,27897,17189,28106,16845,28310,16499,28511,16150,28707,15799,28898,15446,29086,15090,29269,14732,29447,14372,29622,14009,29791,13645,29956,13278,30117,12909,30273,12539,30425,12166,30572,11792,30714,11416,30852,11038,30985,10659,31114,10278,31237,9895,31357,9511,31471,9126,31581,8739,31685,8351,31786,7961,31881,7571,31971,7179,32057,6786,32138,6392,32214,5997,32285,5601,32351,5205,32413,4807,32469,4409,32521,4011,32568,3611,32610,3211,32647,2811,32679,2410,32706,2009,32728,1607,32745,1206,32758,804,32765,402,32767,0,32765,-403,32758,-805,32745,-1207,32728,-1608,32706,-2010,32679,-2411,32647,-2812,32610,-3212,32568,-3612,32521,-4012,32469,-4410,32413,-4808,32351,-5206,32285,-5602,32214,-5998,32138,-6393,32057,-6787,31971,-7180,31881,-7572,31786,-7962,31685,-8352,31581,-8740,31471,-9127,31357,-9512,31237,-9896,31114,-10279,30985,-10660,30852,-11039,30714,-11417,30572,-11793,30425,-12167,30273,-12540,30117,-12910,29956,-13279,29791,-13646,29622,-14010,29447,-14373,29269,-14733,29086,-15091,28898,-15447,28707,-15800,28511,-16151,28310,-16500,28106,-16846,27897,-17190,27684,-17531,27467,-17869,27245,-18205,27020,-18538,26790,-18868,26557,-19195,26319,-19520,26078,-19841,25832,-20160,25583,-20475,25330,-20788,25073,-21097,24812,-21403,24547,-21706,24279,-22005,24007,-22302,23732,-22595,23453,-22884,23170,-23170,22884,-23453,22595,-23732,22302,-24007,22005,-24279,21706,-24547,21403,-24812,21097,-25073,20788,-25330,20475,-25583,20160,-25832,19841,-26078,19520,-26319,19195,-26557,18868,-26790,18538,-27020,18205,-27245,17869,-27467,17531,-27684,17190,-27897,16846,-28106,16500,-28310,16151,-28511,15800,-28707,15447,-28898,15091,-29086,14733,-29269,14373,-29447,14010,-29622,13646,-29791,13279,-29956,12910,-30117,12540,-30273,12167,-30425,11793,-30572,11417,-30714,11039,-30852,10660,-30985,10279,-31114,9896,-31237,9512,-31357,9127,-31471,8740,-31581,8352,-31685,7962,-31786,7572,-31881,7180,-31971,6787,-32057,6393,-32138,5998,-32214,5602,-32285,5206,-32351,4808,-32413,4410,-32469,4012,-32521,3612,-32568,3212,-32610,2812,-32647,2411,-32679,2010,-32706,1608,-32728,1207,-32745,805,-32758,403,-32765
+};
+
+void dft512(int16_t *x,int16_t *y,unsigned char scale)
+{
+
+  simdshort_q15_t xtmp[256],*xtmpp,*x64 = (simdshort_q15_t *)x;
+  simd_q15_t ytmp[128],*tw512a_128p=(simd_q15_t *)tw512a,*tw512b_128p=(simd_q15_t *)tw512b,*y128=(simd_q15_t *)y,*y128p=(simd_q15_t *)y;
+  simd_q15_t *ytmpp = &ytmp[0];
+  int i;
+  simd_q15_t ONE_OVER_SQRT2_Q15_128 = set1_int16(ONE_OVER_SQRT2_Q15);
+
+  xtmpp = xtmp;
+
+  for (i=0; i<4; i++) {
+    transpose4_ooff(x64  ,xtmpp,128);
+    transpose4_ooff(x64+2,xtmpp+1,128);
+    transpose4_ooff(x64+4,xtmpp+2,128);
+    transpose4_ooff(x64+6,xtmpp+3,128);
+    transpose4_ooff(x64+8,xtmpp+4,128);
+    transpose4_ooff(x64+10,xtmpp+5,128);
+    transpose4_ooff(x64+12,xtmpp+6,128);
+    transpose4_ooff(x64+14,xtmpp+7,128);
+    transpose4_ooff(x64+16,xtmpp+8,128);
+    transpose4_ooff(x64+18,xtmpp+9,128);
+    transpose4_ooff(x64+20,xtmpp+10,128);
+    transpose4_ooff(x64+22,xtmpp+11,128);
+    transpose4_ooff(x64+24,xtmpp+12,128);
+    transpose4_ooff(x64+26,xtmpp+13,128);
+    transpose4_ooff(x64+28,xtmpp+14,128);
+    transpose4_ooff(x64+30,xtmpp+15,128);
+    transpose4_ooff(x64+32,xtmpp+16,128);
+    transpose4_ooff(x64+34,xtmpp+17,128);
+    transpose4_ooff(x64+36,xtmpp+18,128);
+    transpose4_ooff(x64+38,xtmpp+19,128);
+    transpose4_ooff(x64+40,xtmpp+20,128);
+    transpose4_ooff(x64+42,xtmpp+21,128);
+    transpose4_ooff(x64+44,xtmpp+22,128);
+    transpose4_ooff(x64+46,xtmpp+23,128);
+    transpose4_ooff(x64+48,xtmpp+24,128);
+    transpose4_ooff(x64+50,xtmpp+25,128);
+    transpose4_ooff(x64+52,xtmpp+26,128);
+    transpose4_ooff(x64+54,xtmpp+27,128);
+    transpose4_ooff(x64+56,xtmpp+28,128);
+    transpose4_ooff(x64+58,xtmpp+29,128);
+    transpose4_ooff(x64+60,xtmpp+30,128);
+    transpose4_ooff(x64+62,xtmpp+31,128);
+    x64+=64;
+    xtmpp+=32;
+  }
+
+  dft256((int16_t*)(xtmp),(int16_t*)ytmp,1);
+  dft256((int16_t*)(xtmp+128),(int16_t*)(ytmp+64),1);
+
+
+  for (i=0; i<64; i+=8) {
+    bfly2_16(ytmpp,ytmpp+64,
+             y128p,y128p+64,
+             tw512a_128p,
+             tw512b_128p);
+    bfly2_16(ytmpp+1,ytmpp+65,
+             y128p+1,y128p+65,
+             tw512a_128p+1,
+             tw512b_128p+1);
+    bfly2_16(ytmpp+2,ytmpp+66,
+             y128p+2,y128p+66,
+             tw512a_128p+2,
+             tw512b_128p+2);
+    bfly2_16(ytmpp+3,ytmpp+67,
+             y128p+3,y128p+67,
+             tw512a_128p+3,
+             tw512b_128p+3);
+    bfly2_16(ytmpp+4,ytmpp+68,
+             y128p+4,y128p+68,
+             tw512a_128p+4,
+             tw512b_128p+4);
+    bfly2_16(ytmpp+5,ytmpp+69,
+             y128p+5,y128p+69,
+             tw512a_128p+5,
+             tw512b_128p+5);
+    bfly2_16(ytmpp+6,ytmpp+70,
+             y128p+6,y128p+70,
+             tw512a_128p+6,
+             tw512b_128p+6);
+    bfly2_16(ytmpp+7,ytmpp+71,
+             y128p+7,y128p+71,
+             tw512a_128p+7,
+             tw512b_128p+7);
+    tw512a_128p+=8;
+    tw512b_128p+=8;
+    y128p+=8;
+    ytmpp+=8;
+  }
+
+  if (scale>0) {
+    y128p = y128;
+
+    for (i=0; i<8; i++) {
+      y128p[0]  = mulhi_int16(y128p[0],ONE_OVER_SQRT2_Q15_128);
+      y128p[1]  = mulhi_int16(y128p[1],ONE_OVER_SQRT2_Q15_128);
+      y128p[2]  = mulhi_int16(y128p[2],ONE_OVER_SQRT2_Q15_128);
+      y128p[3]  = mulhi_int16(y128p[3],ONE_OVER_SQRT2_Q15_128);
+      y128p[4]  = mulhi_int16(y128p[4],ONE_OVER_SQRT2_Q15_128);
+      y128p[5]  = mulhi_int16(y128p[5],ONE_OVER_SQRT2_Q15_128);
+      y128p[6]  = mulhi_int16(y128p[6],ONE_OVER_SQRT2_Q15_128);
+      y128p[7]  = mulhi_int16(y128p[7],ONE_OVER_SQRT2_Q15_128);
+      y128p[8]  = mulhi_int16(y128p[8],ONE_OVER_SQRT2_Q15_128);
+      y128p[9]  = mulhi_int16(y128p[9],ONE_OVER_SQRT2_Q15_128);
+      y128p[10] = mulhi_int16(y128p[10],ONE_OVER_SQRT2_Q15_128);
+      y128p[11] = mulhi_int16(y128p[11],ONE_OVER_SQRT2_Q15_128);
+      y128p[12] = mulhi_int16(y128p[12],ONE_OVER_SQRT2_Q15_128);
+      y128p[13] = mulhi_int16(y128p[13],ONE_OVER_SQRT2_Q15_128);
+      y128p[14] = mulhi_int16(y128p[14],ONE_OVER_SQRT2_Q15_128);
+      y128p[15] = mulhi_int16(y128p[15],ONE_OVER_SQRT2_Q15_128);
+      y128p+=16;
+    }
+  }
+}
+
+void idft512(int16_t *x,int16_t *y,unsigned char scale)
+{
+
+  simdshort_q15_t xtmp[256],*xtmpp,*x64 = (simdshort_q15_t *)x;
+  simd_q15_t ytmp[128],*tw512_128p=(simd_q15_t *)tw512,*y128=(simd_q15_t *)y,*y128p=(simd_q15_t *)y;
+  simd_q15_t *ytmpp = &ytmp[0];
+  int i;
+  simd_q15_t ONE_OVER_SQRT2_Q15_128 = set1_int16(ONE_OVER_SQRT2_Q15);
+
+  xtmpp = xtmp;
+
+  for (i=0; i<4; i++) {
+    transpose4_ooff(x64  ,xtmpp,128);
+    transpose4_ooff(x64+2,xtmpp+1,128);
+    transpose4_ooff(x64+4,xtmpp+2,128);
+    transpose4_ooff(x64+6,xtmpp+3,128);
+    transpose4_ooff(x64+8,xtmpp+4,128);
+    transpose4_ooff(x64+10,xtmpp+5,128);
+    transpose4_ooff(x64+12,xtmpp+6,128);
+    transpose4_ooff(x64+14,xtmpp+7,128);
+    transpose4_ooff(x64+16,xtmpp+8,128);
+    transpose4_ooff(x64+18,xtmpp+9,128);
+    transpose4_ooff(x64+20,xtmpp+10,128);
+    transpose4_ooff(x64+22,xtmpp+11,128);
+    transpose4_ooff(x64+24,xtmpp+12,128);
+    transpose4_ooff(x64+26,xtmpp+13,128);
+    transpose4_ooff(x64+28,xtmpp+14,128);
+    transpose4_ooff(x64+30,xtmpp+15,128);
+    transpose4_ooff(x64+32,xtmpp+16,128);
+    transpose4_ooff(x64+34,xtmpp+17,128);
+    transpose4_ooff(x64+36,xtmpp+18,128);
+    transpose4_ooff(x64+38,xtmpp+19,128);
+    transpose4_ooff(x64+40,xtmpp+20,128);
+    transpose4_ooff(x64+42,xtmpp+21,128);
+    transpose4_ooff(x64+44,xtmpp+22,128);
+    transpose4_ooff(x64+46,xtmpp+23,128);
+    transpose4_ooff(x64+48,xtmpp+24,128);
+    transpose4_ooff(x64+50,xtmpp+25,128);
+    transpose4_ooff(x64+52,xtmpp+26,128);
+    transpose4_ooff(x64+54,xtmpp+27,128);
+    transpose4_ooff(x64+56,xtmpp+28,128);
+    transpose4_ooff(x64+58,xtmpp+29,128);
+    transpose4_ooff(x64+60,xtmpp+30,128);
+    transpose4_ooff(x64+62,xtmpp+31,128);
+    x64+=64;
+    xtmpp+=32;
+  }
+
+  idft256((int16_t*)(xtmp),(int16_t*)ytmp,1);
+  idft256((int16_t*)(xtmp+128),(int16_t*)(ytmp+64),1);
+
+
+  for (i=0; i<64; i++) {
+    ibfly2(ytmpp,ytmpp+64,
+           y128p,y128p+64,
+           tw512_128p);
+    tw512_128p++;
+    y128p++;
+    ytmpp++;
+  }
+
+  if (scale>0) {
+    y128p = y128;
+
+    for (i=0; i<8; i++) {
+      y128p[0]  = mulhi_int16(y128p[0],ONE_OVER_SQRT2_Q15_128);
+      y128p[1]  = mulhi_int16(y128p[1],ONE_OVER_SQRT2_Q15_128);
+      y128p[2]  = mulhi_int16(y128p[2],ONE_OVER_SQRT2_Q15_128);
+      y128p[3]  = mulhi_int16(y128p[3],ONE_OVER_SQRT2_Q15_128);
+      y128p[4]  = mulhi_int16(y128p[4],ONE_OVER_SQRT2_Q15_128);
+      y128p[5]  = mulhi_int16(y128p[5],ONE_OVER_SQRT2_Q15_128);
+      y128p[6]  = mulhi_int16(y128p[6],ONE_OVER_SQRT2_Q15_128);
+      y128p[7]  = mulhi_int16(y128p[7],ONE_OVER_SQRT2_Q15_128);
+      y128p[8]  = mulhi_int16(y128p[8],ONE_OVER_SQRT2_Q15_128);
+      y128p[9]  = mulhi_int16(y128p[9],ONE_OVER_SQRT2_Q15_128);
+      y128p[10] = mulhi_int16(y128p[10],ONE_OVER_SQRT2_Q15_128);
+      y128p[11] = mulhi_int16(y128p[11],ONE_OVER_SQRT2_Q15_128);
+      y128p[12] = mulhi_int16(y128p[12],ONE_OVER_SQRT2_Q15_128);
+      y128p[13] = mulhi_int16(y128p[13],ONE_OVER_SQRT2_Q15_128);
+      y128p[14] = mulhi_int16(y128p[14],ONE_OVER_SQRT2_Q15_128);
+      y128p[15] = mulhi_int16(y128p[15],ONE_OVER_SQRT2_Q15_128);
+      y128p+=16;
+    }
+  }
+}
+
+int16_t tw1024[1536] __attribute__((aligned(32)));
+
+void dft1024(int16_t *x,int16_t *y,unsigned char scale)
+{
+
+  simd_q15_t xtmp[256],ytmp[256],*tw1024_128p=(simd_q15_t *)tw1024,*x128=(simd_q15_t *)x,*y128=(simd_q15_t *)y,*y128p=(simd_q15_t *)y;
+  simd_q15_t *ytmpp = &ytmp[0];
+  int i,j;
+
+  for (i=0,j=0; i<256; i+=4,j++) {
+    transpose16_ooff(x128+i,xtmp+j,64);
+  }
+
+
+  dft256((int16_t*)(xtmp),(int16_t*)(ytmp),1);
+  dft256((int16_t*)(xtmp+64),(int16_t*)(ytmp+64),1);
+  dft256((int16_t*)(xtmp+128),(int16_t*)(ytmp+128),1);
+  dft256((int16_t*)(xtmp+192),(int16_t*)(ytmp+192),1);
+
+  for (i=0; i<64; i++) {
+    bfly4(ytmpp,ytmpp+64,ytmpp+128,ytmpp+192,
+          y128p,y128p+64,y128p+128,y128p+192,
+          tw1024_128p,tw1024_128p+64,tw1024_128p+128);
+    tw1024_128p++;
+    y128p++;
+    ytmpp++;
+  }
+
+  if (scale>0) {
+
+    for (i=0; i<16; i++) {
+      y128[0]  = shiftright_int16(y128[0],1);
+      y128[1]  = shiftright_int16(y128[1],1);
+      y128[2]  = shiftright_int16(y128[2],1);
+      y128[3]  = shiftright_int16(y128[3],1);
+      y128[4]  = shiftright_int16(y128[4],1);
+      y128[5]  = shiftright_int16(y128[5],1);
+      y128[6]  = shiftright_int16(y128[6],1);
+      y128[7]  = shiftright_int16(y128[7],1);
+      y128[8]  = shiftright_int16(y128[8],1);
+      y128[9]  = shiftright_int16(y128[9],1);
+      y128[10] = shiftright_int16(y128[10],1);
+      y128[11] = shiftright_int16(y128[11],1);
+      y128[12] = shiftright_int16(y128[12],1);
+      y128[13] = shiftright_int16(y128[13],1);
+      y128[14] = shiftright_int16(y128[14],1);
+      y128[15] = shiftright_int16(y128[15],1);
+
+      y128+=16;
+    }
+  }
+}
+
+void idft1024(int16_t *x,int16_t *y,unsigned char scale)
+{
+
+  simd_q15_t xtmp[256],ytmp[256],*tw1024_128p=(simd_q15_t *)tw1024,*x128=(simd_q15_t *)x,*y128=(simd_q15_t *)y,*y128p=(simd_q15_t *)y;
+  simd_q15_t *ytmpp = &ytmp[0];
+  int i,j;
+
+  for (i=0,j=0; i<256; i+=4,j++) {
+    transpose16_ooff(x128+i,xtmp+j,64);
+  }
+
+
+  idft256((int16_t*)(xtmp),(int16_t*)(ytmp),1);
+  idft256((int16_t*)(xtmp+64),(int16_t*)(ytmp+64),1);
+  idft256((int16_t*)(xtmp+128),(int16_t*)(ytmp+128),1);
+  idft256((int16_t*)(xtmp+192),(int16_t*)(ytmp+192),1);
+
+  for (i=0; i<64; i++) {
+    ibfly4(ytmpp,ytmpp+64,ytmpp+128,ytmpp+192,
+           y128p,y128p+64,y128p+128,y128p+192,
+           tw1024_128p,tw1024_128p+64,tw1024_128p+128);
+    tw1024_128p++;
+    y128p++;
+    ytmpp++;
+  }
+
+  if (scale>0) {
+
+    for (i=0; i<16; i++) {
+      y128[0]  = shiftright_int16(y128[0],1);
+      y128[1]  = shiftright_int16(y128[1],1);
+      y128[2]  = shiftright_int16(y128[2],1);
+      y128[3]  = shiftright_int16(y128[3],1);
+      y128[4]  = shiftright_int16(y128[4],1);
+      y128[5]  = shiftright_int16(y128[5],1);
+      y128[6]  = shiftright_int16(y128[6],1);
+      y128[7]  = shiftright_int16(y128[7],1);
+      y128[8]  = shiftright_int16(y128[8],1);
+      y128[9]  = shiftright_int16(y128[9],1);
+      y128[10] = shiftright_int16(y128[10],1);
+      y128[11] = shiftright_int16(y128[11],1);
+      y128[12] = shiftright_int16(y128[12],1);
+      y128[13] = shiftright_int16(y128[13],1);
+      y128[14] = shiftright_int16(y128[14],1);
+      y128[15] = shiftright_int16(y128[15],1);
+
+      y128+=16;
+    }
+  }
+}
+
+int16_t tw2048[2048] __attribute__((aligned(32)));
+
+void dft2048(int16_t *x,int16_t *y,unsigned char scale)
+{
+
+  simdshort_q15_t xtmp[1024],*xtmpp,*x64 = (simdshort_q15_t *)x;
+  simd_q15_t ytmp[512],*tw2048_128p=(simd_q15_t *)tw2048,*y128=(simd_q15_t *)y,*y128p=(simd_q15_t *)y;
+  simd_q15_t *ytmpp = &ytmp[0];
+  int i;
+  simd_q15_t ONE_OVER_SQRT2_Q15_128 = set1_int16(ONE_OVER_SQRT2_Q15);
+
+  xtmpp = xtmp;
+
+  for (i=0; i<16; i++) {
+    transpose4_ooff(x64  ,xtmpp,512);
+    transpose4_ooff(x64+2,xtmpp+1,512);
+    transpose4_ooff(x64+4,xtmpp+2,512);
+    transpose4_ooff(x64+6,xtmpp+3,512);
+    transpose4_ooff(x64+8,xtmpp+4,512);
+    transpose4_ooff(x64+10,xtmpp+5,512);
+    transpose4_ooff(x64+12,xtmpp+6,512);
+    transpose4_ooff(x64+14,xtmpp+7,512);
+    transpose4_ooff(x64+16,xtmpp+8,512);
+    transpose4_ooff(x64+18,xtmpp+9,512);
+    transpose4_ooff(x64+20,xtmpp+10,512);
+    transpose4_ooff(x64+22,xtmpp+11,512);
+    transpose4_ooff(x64+24,xtmpp+12,512);
+    transpose4_ooff(x64+26,xtmpp+13,512);
+    transpose4_ooff(x64+28,xtmpp+14,512);
+    transpose4_ooff(x64+30,xtmpp+15,512);
+    transpose4_ooff(x64+32,xtmpp+16,512);
+    transpose4_ooff(x64+34,xtmpp+17,512);
+    transpose4_ooff(x64+36,xtmpp+18,512);
+    transpose4_ooff(x64+38,xtmpp+19,512);
+    transpose4_ooff(x64+40,xtmpp+20,512);
+    transpose4_ooff(x64+42,xtmpp+21,512);
+    transpose4_ooff(x64+44,xtmpp+22,512);
+    transpose4_ooff(x64+46,xtmpp+23,512);
+    transpose4_ooff(x64+48,xtmpp+24,512);
+    transpose4_ooff(x64+50,xtmpp+25,512);
+    transpose4_ooff(x64+52,xtmpp+26,512);
+    transpose4_ooff(x64+54,xtmpp+27,512);
+    transpose4_ooff(x64+56,xtmpp+28,512);
+    transpose4_ooff(x64+58,xtmpp+29,512);
+    transpose4_ooff(x64+60,xtmpp+30,512);
+    transpose4_ooff(x64+62,xtmpp+31,512);
+    x64+=64;
+    xtmpp+=32;
+  }
+
+  dft1024((int16_t*)(xtmp),(int16_t*)ytmp,1);
+  dft1024((int16_t*)(xtmp+512),(int16_t*)(ytmp+256),1);
+
+
+  for (i=0; i<256; i++) {
+    bfly2(ytmpp,ytmpp+256,
+          y128p,y128p+256,
+          tw2048_128p);
+    tw2048_128p++;
+    y128p++;
+    ytmpp++;
+  }
+
+  if (scale>0) {
+    y128p = y128;
+
+    for (i=0; i<32; i++) {
+      y128p[0]  = mulhi_int16(y128p[0],ONE_OVER_SQRT2_Q15_128);
+      y128p[1]  = mulhi_int16(y128p[1],ONE_OVER_SQRT2_Q15_128);
+      y128p[2]  = mulhi_int16(y128p[2],ONE_OVER_SQRT2_Q15_128);
+      y128p[3]  = mulhi_int16(y128p[3],ONE_OVER_SQRT2_Q15_128);
+      y128p[4]  = mulhi_int16(y128p[4],ONE_OVER_SQRT2_Q15_128);
+      y128p[5]  = mulhi_int16(y128p[5],ONE_OVER_SQRT2_Q15_128);
+      y128p[6]  = mulhi_int16(y128p[6],ONE_OVER_SQRT2_Q15_128);
+      y128p[7]  = mulhi_int16(y128p[7],ONE_OVER_SQRT2_Q15_128);
+      y128p[8]  = mulhi_int16(y128p[8],ONE_OVER_SQRT2_Q15_128);
+      y128p[9]  = mulhi_int16(y128p[9],ONE_OVER_SQRT2_Q15_128);
+      y128p[10] = mulhi_int16(y128p[10],ONE_OVER_SQRT2_Q15_128);
+      y128p[11] = mulhi_int16(y128p[11],ONE_OVER_SQRT2_Q15_128);
+      y128p[12] = mulhi_int16(y128p[12],ONE_OVER_SQRT2_Q15_128);
+      y128p[13] = mulhi_int16(y128p[13],ONE_OVER_SQRT2_Q15_128);
+      y128p[14] = mulhi_int16(y128p[14],ONE_OVER_SQRT2_Q15_128);
+      y128p[15] = mulhi_int16(y128p[15],ONE_OVER_SQRT2_Q15_128);
+      y128p+=16;
+    }
+  }
+}
+
+void idft2048(int16_t *x,int16_t *y,unsigned char scale)
+{
+
+  simdshort_q15_t xtmp[1024],*xtmpp,*x64 = (simdshort_q15_t *)x;
+  simd_q15_t ytmp[512],*tw2048_128p=(simd_q15_t *)tw2048,*y128=(simd_q15_t *)y,*y128p=(simd_q15_t *)y;
+  simd_q15_t *ytmpp = &ytmp[0];
+  int i;
+  simd_q15_t ONE_OVER_SQRT2_Q15_128 = set1_int16(ONE_OVER_SQRT2_Q15);
+
+  xtmpp = xtmp;
+
+  for (i=0; i<16; i++) {
+    transpose4_ooff(x64  ,xtmpp,512);
+    transpose4_ooff(x64+2,xtmpp+1,512);
+    transpose4_ooff(x64+4,xtmpp+2,512);
+    transpose4_ooff(x64+6,xtmpp+3,512);
+    transpose4_ooff(x64+8,xtmpp+4,512);
+    transpose4_ooff(x64+10,xtmpp+5,512);
+    transpose4_ooff(x64+12,xtmpp+6,512);
+    transpose4_ooff(x64+14,xtmpp+7,512);
+    transpose4_ooff(x64+16,xtmpp+8,512);
+    transpose4_ooff(x64+18,xtmpp+9,512);
+    transpose4_ooff(x64+20,xtmpp+10,512);
+    transpose4_ooff(x64+22,xtmpp+11,512);
+    transpose4_ooff(x64+24,xtmpp+12,512);
+    transpose4_ooff(x64+26,xtmpp+13,512);
+    transpose4_ooff(x64+28,xtmpp+14,512);
+    transpose4_ooff(x64+30,xtmpp+15,512);
+    transpose4_ooff(x64+32,xtmpp+16,512);
+    transpose4_ooff(x64+34,xtmpp+17,512);
+    transpose4_ooff(x64+36,xtmpp+18,512);
+    transpose4_ooff(x64+38,xtmpp+19,512);
+    transpose4_ooff(x64+40,xtmpp+20,512);
+    transpose4_ooff(x64+42,xtmpp+21,512);
+    transpose4_ooff(x64+44,xtmpp+22,512);
+    transpose4_ooff(x64+46,xtmpp+23,512);
+    transpose4_ooff(x64+48,xtmpp+24,512);
+    transpose4_ooff(x64+50,xtmpp+25,512);
+    transpose4_ooff(x64+52,xtmpp+26,512);
+    transpose4_ooff(x64+54,xtmpp+27,512);
+    transpose4_ooff(x64+56,xtmpp+28,512);
+    transpose4_ooff(x64+58,xtmpp+29,512);
+    transpose4_ooff(x64+60,xtmpp+30,512);
+    transpose4_ooff(x64+62,xtmpp+31,512);
+    x64+=64;
+    xtmpp+=32;
+  }
+
+  idft1024((int16_t*)(xtmp),(int16_t*)ytmp,1);
+  idft1024((int16_t*)(xtmp+512),(int16_t*)(ytmp+256),1);
+
+
+  for (i=0; i<256; i++) {
+    ibfly2(ytmpp,ytmpp+256,
+           y128p,y128p+256,
+           tw2048_128p);
+    tw2048_128p++;
+    y128p++;
+    ytmpp++;
+  }
+
+  if (scale>0) {
+    y128p = y128;
+
+    for (i=0; i<32; i++) {
+      y128p[0]  = mulhi_int16(y128p[0],ONE_OVER_SQRT2_Q15_128);
+      y128p[1]  = mulhi_int16(y128p[1],ONE_OVER_SQRT2_Q15_128);
+      y128p[2]  = mulhi_int16(y128p[2],ONE_OVER_SQRT2_Q15_128);
+      y128p[3]  = mulhi_int16(y128p[3],ONE_OVER_SQRT2_Q15_128);
+      y128p[4]  = mulhi_int16(y128p[4],ONE_OVER_SQRT2_Q15_128);
+      y128p[5]  = mulhi_int16(y128p[5],ONE_OVER_SQRT2_Q15_128);
+      y128p[6]  = mulhi_int16(y128p[6],ONE_OVER_SQRT2_Q15_128);
+      y128p[7]  = mulhi_int16(y128p[7],ONE_OVER_SQRT2_Q15_128);
+      y128p[8]  = mulhi_int16(y128p[8],ONE_OVER_SQRT2_Q15_128);
+      y128p[9]  = mulhi_int16(y128p[9],ONE_OVER_SQRT2_Q15_128);
+      y128p[10] = mulhi_int16(y128p[10],ONE_OVER_SQRT2_Q15_128);
+      y128p[11] = mulhi_int16(y128p[11],ONE_OVER_SQRT2_Q15_128);
+      y128p[12] = mulhi_int16(y128p[12],ONE_OVER_SQRT2_Q15_128);
+      y128p[13] = mulhi_int16(y128p[13],ONE_OVER_SQRT2_Q15_128);
+      y128p[14] = mulhi_int16(y128p[14],ONE_OVER_SQRT2_Q15_128);
+      y128p[15] = mulhi_int16(y128p[15],ONE_OVER_SQRT2_Q15_128);
+      y128p+=16;
+    }
+  }
+}
+
+int16_t tw4096[3*2*1024];
+
+void dft4096(int16_t *x,int16_t *y,unsigned char scale)
+{
+
+  simd_q15_t xtmp[1024],ytmp[1024],*tw4096_128p=(simd_q15_t *)tw4096,*x128=(simd_q15_t *)x,*y128=(simd_q15_t *)y,*y128p=(simd_q15_t *)y;
+  simd_q15_t *ytmpp = &ytmp[0];
+  int i,j;
+
+  for (i=0,j=0; i<1024; i+=4,j++) {
+    transpose16_ooff(x128+i,xtmp+j,256);
+  }
+
+
+  dft1024((int16_t*)(xtmp),(int16_t*)(ytmp),1);
+  dft1024((int16_t*)(xtmp+256),(int16_t*)(ytmp+256),1);
+  dft1024((int16_t*)(xtmp+512),(int16_t*)(ytmp+512),1);
+  dft1024((int16_t*)(xtmp+768),(int16_t*)(ytmp+768),1);
+
+  for (i=0; i<256; i++) {
+    bfly4(ytmpp,ytmpp+256,ytmpp+512,ytmpp+768,
+          y128p,y128p+256,y128p+512,y128p+768,
+          tw4096_128p,tw4096_128p+256,tw4096_128p+512);
+    tw4096_128p++;
+    y128p++;
+    ytmpp++;
+  }
+
+  if (scale>0) {
+
+    for (i=0; i<64; i++) {
+      y128[0]  = shiftright_int16(y128[0],1);
+      y128[1]  = shiftright_int16(y128[1],1);
+      y128[2]  = shiftright_int16(y128[2],1);
+      y128[3]  = shiftright_int16(y128[3],1);
+      y128[4]  = shiftright_int16(y128[4],1);
+      y128[5]  = shiftright_int16(y128[5],1);
+      y128[6]  = shiftright_int16(y128[6],1);
+      y128[7]  = shiftright_int16(y128[7],1);
+      y128[8]  = shiftright_int16(y128[8],1);
+      y128[9]  = shiftright_int16(y128[9],1);
+      y128[10] = shiftright_int16(y128[10],1);
+      y128[11] = shiftright_int16(y128[11],1);
+      y128[12] = shiftright_int16(y128[12],1);
+      y128[13] = shiftright_int16(y128[13],1);
+      y128[14] = shiftright_int16(y128[14],1);
+      y128[15] = shiftright_int16(y128[15],1);
+
+      y128+=16;
+    }
+  }
+}
+
+ 
+
+void idft4096(int16_t *x,int16_t *y,unsigned char scale)
+{
+
+  simd_q15_t xtmp[1024],ytmp[1024],*tw4096_128p=(simd_q15_t *)tw4096,*x128=(simd_q15_t *)x,*y128=(simd_q15_t *)y,*y128p=(simd_q15_t *)y;
+  simd_q15_t *ytmpp = &ytmp[0];
+  int i,j;
+
+  for (i=0,j=0; i<1024; i+=4,j++) {
+    transpose16_ooff(x128+i,xtmp+j,256);
+  }
+
+
+  idft1024((int16_t*)(xtmp),(int16_t*)(ytmp),1);
+  idft1024((int16_t*)(xtmp+256),(int16_t*)(ytmp+256),1);
+  idft1024((int16_t*)(xtmp+512),(int16_t*)(ytmp+512),1);
+  idft1024((int16_t*)(xtmp+768),(int16_t*)(ytmp+768),1);
+
+  for (i=0; i<256; i++) {
+    ibfly4(ytmpp,ytmpp+256,ytmpp+512,ytmpp+768,
+           y128p,y128p+256,y128p+512,y128p+768,
+           tw4096_128p,tw4096_128p+256,tw4096_128p+512);
+    tw4096_128p++;
+    y128p++;
+    ytmpp++;
+  }
+
+  if (scale>0) {
+
+    for (i=0; i<64; i++) {
+      y128[0]  = shiftright_int16(y128[0],scale);
+      y128[1]  = shiftright_int16(y128[1],scale);
+      y128[2]  = shiftright_int16(y128[2],scale);
+      y128[3]  = shiftright_int16(y128[3],scale);
+      y128[4]  = shiftright_int16(y128[4],scale);
+      y128[5]  = shiftright_int16(y128[5],scale);
+      y128[6]  = shiftright_int16(y128[6],scale);
+      y128[7]  = shiftright_int16(y128[7],scale);
+      y128[8]  = shiftright_int16(y128[8],scale);
+      y128[9]  = shiftright_int16(y128[9],scale);
+      y128[10] = shiftright_int16(y128[10],scale);
+      y128[11] = shiftright_int16(y128[11],scale);
+      y128[12] = shiftright_int16(y128[12],scale);
+      y128[13] = shiftright_int16(y128[13],scale);
+      y128[14] = shiftright_int16(y128[14],scale);
+      y128[15] = shiftright_int16(y128[15],scale);
+
+      y128+=16;
+    }
+  }
+}
+
+int16_t tw8192[2*4096] __attribute__((aligned(32)));
+
+void dft8192(int16_t *x,int16_t *y,unsigned char scale)
+{
+
+  simdshort_q15_t xtmp[4096],*xtmpp,*x64 = (simdshort_q15_t *)x;
+  simd_q15_t ytmp[1024],*tw8192_128p=(simd_q15_t *)tw8192,*y128=(simd_q15_t *)y,*y128p=(simd_q15_t *)y;
+  simd_q15_t *ytmpp = &ytmp[0];
+  int i;
+  simd_q15_t ONE_OVER_SQRT2_Q15_128 = set1_int16(ONE_OVER_SQRT2_Q15);
+  
+  xtmpp = xtmp;
+
+  for (i=0; i<64; i++) {
+    transpose4_ooff(x64  ,xtmpp,2048);
+    transpose4_ooff(x64+2,xtmpp+1,2048);
+    transpose4_ooff(x64+4,xtmpp+2,2048);
+    transpose4_ooff(x64+6,xtmpp+3,2048);
+    transpose4_ooff(x64+8,xtmpp+4,2048);
+    transpose4_ooff(x64+10,xtmpp+5,2048);
+    transpose4_ooff(x64+12,xtmpp+6,2048);
+    transpose4_ooff(x64+14,xtmpp+7,2048);
+    transpose4_ooff(x64+16,xtmpp+8,2048);
+    transpose4_ooff(x64+18,xtmpp+9,2048);
+    transpose4_ooff(x64+20,xtmpp+10,2048);
+    transpose4_ooff(x64+22,xtmpp+11,2048);
+    transpose4_ooff(x64+24,xtmpp+12,2048);
+    transpose4_ooff(x64+26,xtmpp+13,2048);
+    transpose4_ooff(x64+28,xtmpp+14,2048);
+    transpose4_ooff(x64+30,xtmpp+15,2048);
+    transpose4_ooff(x64+32,xtmpp+16,2048);
+    transpose4_ooff(x64+34,xtmpp+17,2048);
+    transpose4_ooff(x64+36,xtmpp+18,2048);
+    transpose4_ooff(x64+38,xtmpp+19,2048);
+    transpose4_ooff(x64+40,xtmpp+20,2048);
+    transpose4_ooff(x64+42,xtmpp+21,2048);
+    transpose4_ooff(x64+44,xtmpp+22,2048);
+    transpose4_ooff(x64+46,xtmpp+23,2048);
+    transpose4_ooff(x64+48,xtmpp+24,2048);
+    transpose4_ooff(x64+50,xtmpp+25,2048);
+    transpose4_ooff(x64+52,xtmpp+26,2048);
+    transpose4_ooff(x64+54,xtmpp+27,2048);
+    transpose4_ooff(x64+56,xtmpp+28,2048);
+    transpose4_ooff(x64+58,xtmpp+29,2048);
+    transpose4_ooff(x64+60,xtmpp+30,2048);
+    transpose4_ooff(x64+62,xtmpp+31,2048);
+    x64+=64;
+    xtmpp+=32;
+  }
+
+  dft4096((int16_t*)(xtmp),(int16_t*)ytmp,1);
+  dft4096((int16_t*)(xtmp+2048),(int16_t*)(ytmp+1024),1);
+
+
+  for (i=0; i<1024; i++) {
+    bfly2(ytmpp,ytmpp+1024,
+          y128p,y128p+1024,
+          tw8192_128p);
+    tw8192_128p++;
+    y128p++;
+    ytmpp++;
+  }
+
+  if (scale>0) {
+    y128p = y128;
+
+    for (i=0; i<128; i++) {
+      y128p[0]  = mulhi_int16(y128p[0],ONE_OVER_SQRT2_Q15_128);
+      y128p[1]  = mulhi_int16(y128p[1],ONE_OVER_SQRT2_Q15_128);
+      y128p[2]  = mulhi_int16(y128p[2],ONE_OVER_SQRT2_Q15_128);
+      y128p[3]  = mulhi_int16(y128p[3],ONE_OVER_SQRT2_Q15_128);
+      y128p[4]  = mulhi_int16(y128p[4],ONE_OVER_SQRT2_Q15_128);
+      y128p[5]  = mulhi_int16(y128p[5],ONE_OVER_SQRT2_Q15_128);
+      y128p[6]  = mulhi_int16(y128p[6],ONE_OVER_SQRT2_Q15_128);
+      y128p[7]  = mulhi_int16(y128p[7],ONE_OVER_SQRT2_Q15_128);
+      y128p[8]  = mulhi_int16(y128p[8],ONE_OVER_SQRT2_Q15_128);
+      y128p[9]  = mulhi_int16(y128p[9],ONE_OVER_SQRT2_Q15_128);
+      y128p[10] = mulhi_int16(y128p[10],ONE_OVER_SQRT2_Q15_128);
+      y128p[11] = mulhi_int16(y128p[11],ONE_OVER_SQRT2_Q15_128);
+      y128p[12] = mulhi_int16(y128p[12],ONE_OVER_SQRT2_Q15_128);
+      y128p[13] = mulhi_int16(y128p[13],ONE_OVER_SQRT2_Q15_128);
+      y128p[14] = mulhi_int16(y128p[14],ONE_OVER_SQRT2_Q15_128);
+      y128p[15] = mulhi_int16(y128p[15],ONE_OVER_SQRT2_Q15_128);
+      y128p+=16;
+    }
+  }
+}
+
+void idft8192(int16_t *x,int16_t *y,unsigned char scale)
+{
+
+  simdshort_q15_t xtmp[4096],*xtmpp,*x64 = (simdshort_q15_t *)x;
+  simd_q15_t ytmp[2048],*tw8192_128p=(simd_q15_t *)tw8192,*y128=(simd_q15_t *)y,*y128p=(simd_q15_t *)y;
+  simd_q15_t *ytmpp = &ytmp[0];
+  int i;
+  simd_q15_t ONE_OVER_SQRT2_Q15_128 = set1_int16(ONE_OVER_SQRT2_Q15);
+  
+  xtmpp = xtmp;
+
+  for (i=0; i<64; i++) {
+    transpose4_ooff(x64  ,xtmpp,2048);
+    transpose4_ooff(x64+2,xtmpp+1,2048);
+    transpose4_ooff(x64+4,xtmpp+2,2048);
+    transpose4_ooff(x64+6,xtmpp+3,2048);
+    transpose4_ooff(x64+8,xtmpp+4,2048);
+    transpose4_ooff(x64+10,xtmpp+5,2048);
+    transpose4_ooff(x64+12,xtmpp+6,2048);
+    transpose4_ooff(x64+14,xtmpp+7,2048);
+    transpose4_ooff(x64+16,xtmpp+8,2048);
+    transpose4_ooff(x64+18,xtmpp+9,2048);
+    transpose4_ooff(x64+20,xtmpp+10,2048);
+    transpose4_ooff(x64+22,xtmpp+11,2048);
+    transpose4_ooff(x64+24,xtmpp+12,2048);
+    transpose4_ooff(x64+26,xtmpp+13,2048);
+    transpose4_ooff(x64+28,xtmpp+14,2048);
+    transpose4_ooff(x64+30,xtmpp+15,2048);
+    transpose4_ooff(x64+32,xtmpp+16,2048);
+    transpose4_ooff(x64+34,xtmpp+17,2048);
+    transpose4_ooff(x64+36,xtmpp+18,2048);
+    transpose4_ooff(x64+38,xtmpp+19,2048);
+    transpose4_ooff(x64+40,xtmpp+20,2048);
+    transpose4_ooff(x64+42,xtmpp+21,2048);
+    transpose4_ooff(x64+44,xtmpp+22,2048);
+    transpose4_ooff(x64+46,xtmpp+23,2048);
+    transpose4_ooff(x64+48,xtmpp+24,2048);
+    transpose4_ooff(x64+50,xtmpp+25,2048);
+    transpose4_ooff(x64+52,xtmpp+26,2048);
+    transpose4_ooff(x64+54,xtmpp+27,2048);
+    transpose4_ooff(x64+56,xtmpp+28,2048);
+    transpose4_ooff(x64+58,xtmpp+29,2048);
+    transpose4_ooff(x64+60,xtmpp+30,2048);
+    transpose4_ooff(x64+62,xtmpp+31,2048);
+    x64+=64;
+    xtmpp+=32;
+  }
+
+  idft4096((int16_t*)(xtmp),(int16_t*)ytmp,1);
+  idft4096((int16_t*)(xtmp+2048),(int16_t*)(ytmp+1024),1);
+
+
+  for (i=0; i<1024; i++) {
+    ibfly2(ytmpp,ytmpp+1024,
+           y128p,y128p+1024,
+           tw8192_128p);
+    tw8192_128p++;
+    y128p++;
+    ytmpp++;
+  }
+
+  if (scale>0) {
+    y128p = y128;
+
+    for (i=0; i<128; i++) {
+      y128p[0]  = mulhi_int16(y128p[0],ONE_OVER_SQRT2_Q15_128);
+      y128p[1]  = mulhi_int16(y128p[1],ONE_OVER_SQRT2_Q15_128);
+      y128p[2]  = mulhi_int16(y128p[2],ONE_OVER_SQRT2_Q15_128);
+      y128p[3]  = mulhi_int16(y128p[3],ONE_OVER_SQRT2_Q15_128);
+      y128p[4]  = mulhi_int16(y128p[4],ONE_OVER_SQRT2_Q15_128);
+      y128p[5]  = mulhi_int16(y128p[5],ONE_OVER_SQRT2_Q15_128);
+      y128p[6]  = mulhi_int16(y128p[6],ONE_OVER_SQRT2_Q15_128);
+      y128p[7]  = mulhi_int16(y128p[7],ONE_OVER_SQRT2_Q15_128);
+      y128p[8]  = mulhi_int16(y128p[8],ONE_OVER_SQRT2_Q15_128);
+      y128p[9]  = mulhi_int16(y128p[9],ONE_OVER_SQRT2_Q15_128);
+      y128p[10] = mulhi_int16(y128p[10],ONE_OVER_SQRT2_Q15_128);
+      y128p[11] = mulhi_int16(y128p[11],ONE_OVER_SQRT2_Q15_128);
+      y128p[12] = mulhi_int16(y128p[12],ONE_OVER_SQRT2_Q15_128);
+      y128p[13] = mulhi_int16(y128p[13],ONE_OVER_SQRT2_Q15_128);
+      y128p[14] = mulhi_int16(y128p[14],ONE_OVER_SQRT2_Q15_128);
+      y128p[15] = mulhi_int16(y128p[15],ONE_OVER_SQRT2_Q15_128);
+      y128p+=16;
+    }
+  }
+}
+
+int16_t tw16384[3*2*4096];
+
+void dft16384(int16_t *x,int16_t *y,unsigned char scale)
+{
+
+  simd_q15_t xtmp[4096],ytmp[4096],*tw16384_128p=(simd_q15_t *)tw16384,*x128=(simd_q15_t *)x,*y128=(simd_q15_t *)y,*y128p=(simd_q15_t *)y;
+  simd_q15_t *ytmpp = &ytmp[0];
+  int i,j;
+
+  for (i=0,j=0; i<4096; i+=4,j++) {
+    transpose16_ooff(x128+i,xtmp+j,1024);
+  }
+
+
+  dft4096((int16_t*)(xtmp),(int16_t*)(ytmp),1);
+  dft4096((int16_t*)(xtmp+1024),(int16_t*)(ytmp+1024),1);
+  dft4096((int16_t*)(xtmp+2048),(int16_t*)(ytmp+2048),1);
+  dft4096((int16_t*)(xtmp+3072),(int16_t*)(ytmp+3072),1);
+
+  for (i=0; i<1024; i++) {
+    bfly4(ytmpp,ytmpp+1024,ytmpp+2048,ytmpp+3072,
+          y128p,y128p+1024,y128p+2048,y128p+3072,
+          tw16384_128p,tw16384_128p+1024,tw16384_128p+2048);
+    tw16384_128p++;
+    y128p++;
+    ytmpp++;
+  }
+
+  if (scale>0) {
+
+    for (i=0; i<256; i++) {
+      y128[0]  = shiftright_int16(y128[0],1);
+      y128[1]  = shiftright_int16(y128[1],1);
+      y128[2]  = shiftright_int16(y128[2],1);
+      y128[3]  = shiftright_int16(y128[3],1);
+      y128[4]  = shiftright_int16(y128[4],1);
+      y128[5]  = shiftright_int16(y128[5],1);
+      y128[6]  = shiftright_int16(y128[6],1);
+      y128[7]  = shiftright_int16(y128[7],1);
+      y128[8]  = shiftright_int16(y128[8],1);
+      y128[9]  = shiftright_int16(y128[9],1);
+      y128[10] = shiftright_int16(y128[10],1);
+      y128[11] = shiftright_int16(y128[11],1);
+      y128[12] = shiftright_int16(y128[12],1);
+      y128[13] = shiftright_int16(y128[13],1);
+      y128[14] = shiftright_int16(y128[14],1);
+      y128[15] = shiftright_int16(y128[15],1);
+
+      y128+=16;
+    }
+
+  }
+
+  
+  
+
+}
+
+ 
+
+void idft16384(int16_t *x,int16_t *y,unsigned char scale)
+{
+
+  simd_q15_t xtmp[4096],ytmp[4096],*tw16384_128p=(simd_q15_t *)tw16384,*x128=(simd_q15_t *)x,*y128=(simd_q15_t *)y,*y128p=(simd_q15_t *)y;
+  simd_q15_t *ytmpp = &ytmp[0];
+  int i,j;
+
+  for (i=0,j=0; i<4096; i+=4,j++) {
+    transpose16_ooff(x128+i,xtmp+j,1024);
+  }
+
+
+  idft4096((int16_t*)(xtmp),(int16_t*)(ytmp),1);
+  idft4096((int16_t*)(xtmp+1024),(int16_t*)(ytmp+1024),1);
+  idft4096((int16_t*)(xtmp+2048),(int16_t*)(ytmp+2048),1);
+  idft4096((int16_t*)(xtmp+3072),(int16_t*)(ytmp+3072),1);
+
+  for (i=0; i<1024; i++) {
+    ibfly4(ytmpp,ytmpp+1024,ytmpp+2048,ytmpp+3072,
+           y128p,y128p+1024,y128p+2048,y128p+3072,
+           tw16384_128p,tw16384_128p+1024,tw16384_128p+2048);
+    tw16384_128p++;
+    y128p++;
+    ytmpp++;
+  }
+
+  if (scale>0) {
+
+    for (i=0; i<256; i++) {
+      y128[0]  = shiftright_int16(y128[0],scale);
+      y128[1]  = shiftright_int16(y128[1],scale);
+      y128[2]  = shiftright_int16(y128[2],scale);
+      y128[3]  = shiftright_int16(y128[3],scale);
+      y128[4]  = shiftright_int16(y128[4],scale);
+      y128[5]  = shiftright_int16(y128[5],scale);
+      y128[6]  = shiftright_int16(y128[6],scale);
+      y128[7]  = shiftright_int16(y128[7],scale);
+      y128[8]  = shiftright_int16(y128[8],scale);
+      y128[9]  = shiftright_int16(y128[9],scale);
+      y128[10] = shiftright_int16(y128[10],scale);
+      y128[11] = shiftright_int16(y128[11],scale);
+      y128[12] = shiftright_int16(y128[12],scale);
+      y128[13] = shiftright_int16(y128[13],scale);
+      y128[14] = shiftright_int16(y128[14],scale);
+      y128[15] = shiftright_int16(y128[15],scale);
+
+      y128+=16;
+    }
+ 
+  }
+}
+
+int16_t tw32768[2*16384] __attribute__((aligned(32)));
+
+void dft32768(int16_t *x,int16_t *y,unsigned char scale)
+{
+
+  simdshort_q15_t xtmp[16384],*xtmpp,*x64 = (simdshort_q15_t *)x;
+  simd_q15_t ytmp[8192],*tw32768_128p=(simd_q15_t *)tw32768,*y128=(simd_q15_t *)y,*y128p=(simd_q15_t *)y;
+  simd_q15_t *ytmpp = &ytmp[0];
+  int i;
+  simd_q15_t ONE_OVER_SQRT2_Q15_128 = set1_int16(ONE_OVER_SQRT2_Q15);
+
+  xtmpp = xtmp;
+
+  for (i=0; i<256; i++) {
+    transpose4_ooff(x64  ,xtmpp,8192);
+    transpose4_ooff(x64+2,xtmpp+1,8192);
+    transpose4_ooff(x64+4,xtmpp+2,8192);
+    transpose4_ooff(x64+6,xtmpp+3,8192);
+    transpose4_ooff(x64+8,xtmpp+4,8192);
+    transpose4_ooff(x64+10,xtmpp+5,8192);
+    transpose4_ooff(x64+12,xtmpp+6,8192);
+    transpose4_ooff(x64+14,xtmpp+7,8192);
+    transpose4_ooff(x64+16,xtmpp+8,8192);
+    transpose4_ooff(x64+18,xtmpp+9,8192);
+    transpose4_ooff(x64+20,xtmpp+10,8192);
+    transpose4_ooff(x64+22,xtmpp+11,8192);
+    transpose4_ooff(x64+24,xtmpp+12,8192);
+    transpose4_ooff(x64+26,xtmpp+13,8192);
+    transpose4_ooff(x64+28,xtmpp+14,8192);
+    transpose4_ooff(x64+30,xtmpp+15,8192);
+    transpose4_ooff(x64+32,xtmpp+16,8192);
+    transpose4_ooff(x64+34,xtmpp+17,8192);
+    transpose4_ooff(x64+36,xtmpp+18,8192);
+    transpose4_ooff(x64+38,xtmpp+19,8192);
+    transpose4_ooff(x64+40,xtmpp+20,8192);
+    transpose4_ooff(x64+42,xtmpp+21,8192);
+    transpose4_ooff(x64+44,xtmpp+22,8192);
+    transpose4_ooff(x64+46,xtmpp+23,8192);
+    transpose4_ooff(x64+48,xtmpp+24,8192);
+    transpose4_ooff(x64+50,xtmpp+25,8192);
+    transpose4_ooff(x64+52,xtmpp+26,8192);
+    transpose4_ooff(x64+54,xtmpp+27,8192);
+    transpose4_ooff(x64+56,xtmpp+28,8192);
+    transpose4_ooff(x64+58,xtmpp+29,8192);
+    transpose4_ooff(x64+60,xtmpp+30,8192);
+    transpose4_ooff(x64+62,xtmpp+31,8192);
+    x64+=64;
+    xtmpp+=32;
+  }
+
+  dft16384((int16_t*)(xtmp),(int16_t*)ytmp,1);
+  dft16384((int16_t*)(xtmp+8192),(int16_t*)(ytmp+4096),1);
+
+
+  for (i=0; i<4096; i++) {
+    bfly2(ytmpp,ytmpp+4096,
+          y128p,y128p+4096,
+          tw32768_128p);
+    tw32768_128p++;
+    y128p++;
+    ytmpp++;
+  }
+
+  if (scale>0) {
+    y128p = y128;
+
+    for (i=0; i<512; i++) {
+      y128p[0]  = mulhi_int16(y128p[0],ONE_OVER_SQRT2_Q15_128);
+      y128p[1]  = mulhi_int16(y128p[1],ONE_OVER_SQRT2_Q15_128);
+      y128p[2]  = mulhi_int16(y128p[2],ONE_OVER_SQRT2_Q15_128);
+      y128p[3]  = mulhi_int16(y128p[3],ONE_OVER_SQRT2_Q15_128);
+      y128p[4]  = mulhi_int16(y128p[4],ONE_OVER_SQRT2_Q15_128);
+      y128p[5]  = mulhi_int16(y128p[5],ONE_OVER_SQRT2_Q15_128);
+      y128p[6]  = mulhi_int16(y128p[6],ONE_OVER_SQRT2_Q15_128);
+      y128p[7]  = mulhi_int16(y128p[7],ONE_OVER_SQRT2_Q15_128);
+      y128p[8]  = mulhi_int16(y128p[8],ONE_OVER_SQRT2_Q15_128);
+      y128p[9]  = mulhi_int16(y128p[9],ONE_OVER_SQRT2_Q15_128);
+      y128p[10] = mulhi_int16(y128p[10],ONE_OVER_SQRT2_Q15_128);
+      y128p[11] = mulhi_int16(y128p[11],ONE_OVER_SQRT2_Q15_128);
+      y128p[12] = mulhi_int16(y128p[12],ONE_OVER_SQRT2_Q15_128);
+      y128p[13] = mulhi_int16(y128p[13],ONE_OVER_SQRT2_Q15_128);
+      y128p[14] = mulhi_int16(y128p[14],ONE_OVER_SQRT2_Q15_128);
+      y128p[15] = mulhi_int16(y128p[15],ONE_OVER_SQRT2_Q15_128);
+      y128p+=16;
+    }
+  }
+}
+
+void idft32768(int16_t *x,int16_t *y,unsigned char scale)
+{
+
+  simdshort_q15_t xtmp[16384],*xtmpp,*x64 = (simdshort_q15_t *)x;
+  simd_q15_t ytmp[8192],*tw32768_128p=(simd_q15_t *)tw32768,*y128=(simd_q15_t *)y,*y128p=(simd_q15_t *)y;
+  simd_q15_t *ytmpp = &ytmp[0];
+  int i;
+  simd_q15_t ONE_OVER_SQRT2_Q15_128 = set1_int16(ONE_OVER_SQRT2_Q15);
+  
+  xtmpp = xtmp;
+
+  for (i=0; i<256; i++) {
+    transpose4_ooff(x64  ,xtmpp,8192);
+    transpose4_ooff(x64+2,xtmpp+1,8192);
+    transpose4_ooff(x64+4,xtmpp+2,8192);
+    transpose4_ooff(x64+6,xtmpp+3,8192);
+    transpose4_ooff(x64+8,xtmpp+4,8192);
+    transpose4_ooff(x64+10,xtmpp+5,8192);
+    transpose4_ooff(x64+12,xtmpp+6,8192);
+    transpose4_ooff(x64+14,xtmpp+7,8192);
+    transpose4_ooff(x64+16,xtmpp+8,8192);
+    transpose4_ooff(x64+18,xtmpp+9,8192);
+    transpose4_ooff(x64+20,xtmpp+10,8192);
+    transpose4_ooff(x64+22,xtmpp+11,8192);
+    transpose4_ooff(x64+24,xtmpp+12,8192);
+    transpose4_ooff(x64+26,xtmpp+13,8192);
+    transpose4_ooff(x64+28,xtmpp+14,8192);
+    transpose4_ooff(x64+30,xtmpp+15,8192);
+    transpose4_ooff(x64+32,xtmpp+16,8192);
+    transpose4_ooff(x64+34,xtmpp+17,8192);
+    transpose4_ooff(x64+36,xtmpp+18,8192);
+    transpose4_ooff(x64+38,xtmpp+19,8192);
+    transpose4_ooff(x64+40,xtmpp+20,8192);
+    transpose4_ooff(x64+42,xtmpp+21,8192);
+    transpose4_ooff(x64+44,xtmpp+22,8192);
+    transpose4_ooff(x64+46,xtmpp+23,8192);
+    transpose4_ooff(x64+48,xtmpp+24,8192);
+    transpose4_ooff(x64+50,xtmpp+25,8192);
+    transpose4_ooff(x64+52,xtmpp+26,8192);
+    transpose4_ooff(x64+54,xtmpp+27,8192);
+    transpose4_ooff(x64+56,xtmpp+28,8192);
+    transpose4_ooff(x64+58,xtmpp+29,8192);
+    transpose4_ooff(x64+60,xtmpp+30,8192);
+    transpose4_ooff(x64+62,xtmpp+31,8192);
+    x64+=64;
+    xtmpp+=32;
+  }
+
+  idft16384((int16_t*)(xtmp),(int16_t*)ytmp,1);
+  idft16384((int16_t*)(xtmp+8192),(int16_t*)(ytmp+4096),1);
+
+
+  for (i=0; i<4096; i++) {
+    ibfly2(ytmpp,ytmpp+4096,
+           y128p,y128p+4096,
+           tw32768_128p);
+    tw32768_128p++;
+    y128p++;
+    ytmpp++;
+  }
+
+  if (scale>0) {
+    y128p = y128;
+
+    for (i=0; i<512; i++) {
+      y128p[0]  = mulhi_int16(y128p[0],ONE_OVER_SQRT2_Q15_128);
+      y128p[1]  = mulhi_int16(y128p[1],ONE_OVER_SQRT2_Q15_128);
+      y128p[2]  = mulhi_int16(y128p[2],ONE_OVER_SQRT2_Q15_128);
+      y128p[3]  = mulhi_int16(y128p[3],ONE_OVER_SQRT2_Q15_128);
+      y128p[4]  = mulhi_int16(y128p[4],ONE_OVER_SQRT2_Q15_128);
+      y128p[5]  = mulhi_int16(y128p[5],ONE_OVER_SQRT2_Q15_128);
+      y128p[6]  = mulhi_int16(y128p[6],ONE_OVER_SQRT2_Q15_128);
+      y128p[7]  = mulhi_int16(y128p[7],ONE_OVER_SQRT2_Q15_128);
+      y128p[8]  = mulhi_int16(y128p[8],ONE_OVER_SQRT2_Q15_128);
+      y128p[9]  = mulhi_int16(y128p[9],ONE_OVER_SQRT2_Q15_128);
+      y128p[10] = mulhi_int16(y128p[10],ONE_OVER_SQRT2_Q15_128);
+      y128p[11] = mulhi_int16(y128p[11],ONE_OVER_SQRT2_Q15_128);
+      y128p[12] = mulhi_int16(y128p[12],ONE_OVER_SQRT2_Q15_128);
+      y128p[13] = mulhi_int16(y128p[13],ONE_OVER_SQRT2_Q15_128);
+      y128p[14] = mulhi_int16(y128p[14],ONE_OVER_SQRT2_Q15_128);
+      y128p[15] = mulhi_int16(y128p[15],ONE_OVER_SQRT2_Q15_128);
+      y128p+=16;
+    }
+  }
+}
+
+int16_t twa768[512],twb768[512];
+
+// 256 x 3
+void idft768(int16_t *input, int16_t *output, unsigned char scale)
+{
+  int i,i2,j;
+  uint32_t tmp[3][256]__attribute__((aligned(32)));
+  uint32_t tmpo[3][256] __attribute__((aligned(32)));
+  simd_q15_t *y128p=(simd_q15_t*)output;
+  simd_q15_t ONE_OVER_SQRT3_Q15_128 = set1_int16(ONE_OVER_SQRT3_Q15);
+
+  for (i=0,j=0; i<256; i++) {
+    tmp[0][i] = ((uint32_t *)input)[j++];
+    tmp[1][i] = ((uint32_t *)input)[j++];
+    tmp[2][i] = ((uint32_t *)input)[j++];
+  }
+
+  idft256((int16_t*)(tmp[0]),(int16_t*)(tmpo[0]),1);
+  idft256((int16_t*)(tmp[1]),(int16_t*)(tmpo[1]),1);
+  idft256((int16_t*)(tmp[2]),(int16_t*)(tmpo[2]),1);
+
+  for (i=0,i2=0; i<512; i+=8,i2+=4)  {
+    ibfly3((simd_q15_t*)(&tmpo[0][i2]),(simd_q15_t*)(&tmpo[1][i2]),((simd_q15_t*)&tmpo[2][i2]),
+          (simd_q15_t*)(output+i),(simd_q15_t*)(output+512+i),(simd_q15_t*)(output+1024+i),
+          (simd_q15_t*)(twa768+i),(simd_q15_t*)(twb768+i));
+  }
+
+
+  if (scale==1) {
+    for (i=0; i<12; i++) {
+      y128p[0]  = mulhi_int16(y128p[0],ONE_OVER_SQRT3_Q15_128);
+      y128p[1]  = mulhi_int16(y128p[1],ONE_OVER_SQRT3_Q15_128);
+      y128p[2]  = mulhi_int16(y128p[2],ONE_OVER_SQRT3_Q15_128);
+      y128p[3]  = mulhi_int16(y128p[3],ONE_OVER_SQRT3_Q15_128);
+      y128p[4]  = mulhi_int16(y128p[4],ONE_OVER_SQRT3_Q15_128);
+      y128p[5]  = mulhi_int16(y128p[5],ONE_OVER_SQRT3_Q15_128);
+      y128p[6]  = mulhi_int16(y128p[6],ONE_OVER_SQRT3_Q15_128);
+      y128p[7]  = mulhi_int16(y128p[7],ONE_OVER_SQRT3_Q15_128);
+      y128p[8]  = mulhi_int16(y128p[8],ONE_OVER_SQRT3_Q15_128);
+      y128p[9]  = mulhi_int16(y128p[9],ONE_OVER_SQRT3_Q15_128);
+      y128p[10] = mulhi_int16(y128p[10],ONE_OVER_SQRT3_Q15_128);
+      y128p[11] = mulhi_int16(y128p[11],ONE_OVER_SQRT3_Q15_128);
+      y128p[12] = mulhi_int16(y128p[12],ONE_OVER_SQRT3_Q15_128);
+      y128p[13] = mulhi_int16(y128p[13],ONE_OVER_SQRT3_Q15_128);
+      y128p[14] = mulhi_int16(y128p[14],ONE_OVER_SQRT3_Q15_128);
+      y128p[15] = mulhi_int16(y128p[15],ONE_OVER_SQRT3_Q15_128);
+      y128p+=16;
+    }
+  }
+}
+
+void dft768(int16_t *input, int16_t *output, unsigned char scale)
+{
+  int i,i2,j;
+  uint32_t tmp[3][256] __attribute__((aligned(32)));
+  uint32_t tmpo[3][256] __attribute__((aligned(32)));
+  simd_q15_t *y128p=(simd_q15_t*)output;
+  simd_q15_t ONE_OVER_SQRT3_Q15_128 = set1_int16(ONE_OVER_SQRT3_Q15);
+
+  for (i=0,j=0; i<256; i++) {
+    tmp[0][i] = ((uint32_t *)input)[j++];
+    tmp[1][i] = ((uint32_t *)input)[j++];
+    tmp[2][i] = ((uint32_t *)input)[j++];
+  }
+
+  dft256((int16_t*)(tmp[0]),(int16_t*)(tmpo[0]),1);
+  dft256((int16_t*)(tmp[1]),(int16_t*)(tmpo[1]),1);
+  dft256((int16_t*)(tmp[2]),(int16_t*)(tmpo[2]),1);
+
+  /*
+  for (i=1; i<512; i++) {
+    tmpo[0][i] = tmpo[0][i<<1];
+    tmpo[1][i] = tmpo[1][i<<1];
+    tmpo[2][i] = tmpo[2][i<<1];
+    }*/
+#ifndef MR_MAIN
+  if (LOG_DUMPFLAG(DEBUG_DFT)) {
+    LOG_M("dft768out0.m","o0",tmpo[0],1024,1,1);
+    LOG_M("dft768out1.m","o1",tmpo[1],1024,1,1);
+    LOG_M("dft768out2.m","o2",tmpo[2],1024,1,1);
+  }
+#endif
+  for (i=0,i2=0; i<512; i+=8,i2+=4)  {
+    bfly3((simd_q15_t*)(&tmpo[0][i2]),(simd_q15_t*)(&tmpo[1][i2]),(simd_q15_t*)(&tmpo[2][i2]),
+          (simd_q15_t*)(output+i),(simd_q15_t*)(output+512+i),(simd_q15_t*)(output+1024+i),
+          (simd_q15_t*)(twa768+i),(simd_q15_t*)(twb768+i));
+  }
+
+  if (scale==1) {
+    for (i=0; i<12; i++) {
+      y128p[0]  = mulhi_int16(y128p[0],ONE_OVER_SQRT3_Q15_128);
+      y128p[1]  = mulhi_int16(y128p[1],ONE_OVER_SQRT3_Q15_128);
+      y128p[2]  = mulhi_int16(y128p[2],ONE_OVER_SQRT3_Q15_128);
+      y128p[3]  = mulhi_int16(y128p[3],ONE_OVER_SQRT3_Q15_128);
+      y128p[4]  = mulhi_int16(y128p[4],ONE_OVER_SQRT3_Q15_128);
+      y128p[5]  = mulhi_int16(y128p[5],ONE_OVER_SQRT3_Q15_128);
+      y128p[6]  = mulhi_int16(y128p[6],ONE_OVER_SQRT3_Q15_128);
+      y128p[7]  = mulhi_int16(y128p[7],ONE_OVER_SQRT3_Q15_128);
+      y128p[8]  = mulhi_int16(y128p[8],ONE_OVER_SQRT3_Q15_128);
+      y128p[9]  = mulhi_int16(y128p[9],ONE_OVER_SQRT3_Q15_128);
+      y128p[10] = mulhi_int16(y128p[10],ONE_OVER_SQRT3_Q15_128);
+      y128p[11] = mulhi_int16(y128p[11],ONE_OVER_SQRT3_Q15_128);
+      y128p[12] = mulhi_int16(y128p[12],ONE_OVER_SQRT3_Q15_128);
+      y128p[13] = mulhi_int16(y128p[13],ONE_OVER_SQRT3_Q15_128);
+      y128p[14] = mulhi_int16(y128p[14],ONE_OVER_SQRT3_Q15_128);
+      y128p[15] = mulhi_int16(y128p[15],ONE_OVER_SQRT3_Q15_128);
+      y128p+=16;
+    }
+  }
+}
+int16_t twa1536[1024],twb1536[1024];
+
+// 512 x 3
+void idft1536(int16_t *input, int16_t *output, unsigned char scale)
+{
+  int i,i2,j;
+  uint32_t tmp[3][512 ]__attribute__((aligned(32)));
+  uint32_t tmpo[3][512] __attribute__((aligned(32)));
+  simd_q15_t *y128p=(simd_q15_t*)output;
+  simd_q15_t ONE_OVER_SQRT3_Q15_128 = set1_int16(ONE_OVER_SQRT3_Q15);
+
+  for (i=0,j=0; i<512; i++) {
+    tmp[0][i] = ((uint32_t *)input)[j++];
+    tmp[1][i] = ((uint32_t *)input)[j++];
+    tmp[2][i] = ((uint32_t *)input)[j++];
+  }
+
+  idft512((int16_t*)(tmp[0]),(int16_t*)(tmpo[0]),1);
+  idft512((int16_t*)(tmp[1]),(int16_t*)(tmpo[1]),1);
+  idft512((int16_t*)(tmp[2]),(int16_t*)(tmpo[2]),1);
+
+  for (i=0,i2=0; i<1024; i+=8,i2+=4)  {
+    ibfly3((simd_q15_t*)(&tmpo[0][i2]),(simd_q15_t*)(&tmpo[1][i2]),((simd_q15_t*)&tmpo[2][i2]),
+          (simd_q15_t*)(output+i),(simd_q15_t*)(output+1024+i),(simd_q15_t*)(output+2048+i),
+          (simd_q15_t*)(twa1536+i),(simd_q15_t*)(twb1536+i));
+  }
+
+
+  if (scale==1) {
+    for (i=0; i<24; i++) {
+      y128p[0]  = mulhi_int16(y128p[0],ONE_OVER_SQRT3_Q15_128);
+      y128p[1]  = mulhi_int16(y128p[1],ONE_OVER_SQRT3_Q15_128);
+      y128p[2]  = mulhi_int16(y128p[2],ONE_OVER_SQRT3_Q15_128);
+      y128p[3]  = mulhi_int16(y128p[3],ONE_OVER_SQRT3_Q15_128);
+      y128p[4]  = mulhi_int16(y128p[4],ONE_OVER_SQRT3_Q15_128);
+      y128p[5]  = mulhi_int16(y128p[5],ONE_OVER_SQRT3_Q15_128);
+      y128p[6]  = mulhi_int16(y128p[6],ONE_OVER_SQRT3_Q15_128);
+      y128p[7]  = mulhi_int16(y128p[7],ONE_OVER_SQRT3_Q15_128);
+      y128p[8]  = mulhi_int16(y128p[8],ONE_OVER_SQRT3_Q15_128);
+      y128p[9]  = mulhi_int16(y128p[9],ONE_OVER_SQRT3_Q15_128);
+      y128p[10] = mulhi_int16(y128p[10],ONE_OVER_SQRT3_Q15_128);
+      y128p[11] = mulhi_int16(y128p[11],ONE_OVER_SQRT3_Q15_128);
+      y128p[12] = mulhi_int16(y128p[12],ONE_OVER_SQRT3_Q15_128);
+      y128p[13] = mulhi_int16(y128p[13],ONE_OVER_SQRT3_Q15_128);
+      y128p[14] = mulhi_int16(y128p[14],ONE_OVER_SQRT3_Q15_128);
+      y128p[15] = mulhi_int16(y128p[15],ONE_OVER_SQRT3_Q15_128);
+      y128p+=16;
+    }
+  }
+}
+
+void dft1536(int16_t *input, int16_t *output, unsigned char scale)
+{
+  int i,i2,j;
+  uint32_t tmp[3][512] __attribute__((aligned(32)));
+  uint32_t tmpo[3][512] __attribute__((aligned(32)));
+  simd_q15_t *y128p=(simd_q15_t*)output;
+  simd_q15_t ONE_OVER_SQRT3_Q15_128 = set1_int16(ONE_OVER_SQRT3_Q15);
+
+  for (i=0,j=0; i<512; i++) {
+    tmp[0][i] = ((uint32_t *)input)[j++];
+    tmp[1][i] = ((uint32_t *)input)[j++];
+    tmp[2][i] = ((uint32_t *)input)[j++];
+  }
+
+  dft512((int16_t*)(tmp[0]),(int16_t*)(tmpo[0]),1);
+  dft512((int16_t*)(tmp[1]),(int16_t*)(tmpo[1]),1);
+  dft512((int16_t*)(tmp[2]),(int16_t*)(tmpo[2]),1);
+
+  /*
+  for (i=1; i<512; i++) {
+    tmpo[0][i] = tmpo[0][i<<1];
+    tmpo[1][i] = tmpo[1][i<<1];
+    tmpo[2][i] = tmpo[2][i<<1];
+    }*/
+#ifndef MR_MAIN
+  if (LOG_DUMPFLAG(DEBUG_DFT)) {
+    LOG_M("dft1536out0.m","o0",tmpo[0],2048,1,1);
+    LOG_M("dft1536out1.m","o1",tmpo[1],2048,1,1);
+    LOG_M("dft1536out2.m","o2",tmpo[2],2048,1,1);
+  }
+#endif
+  for (i=0,i2=0; i<1024; i+=8,i2+=4)  {
+    bfly3((simd_q15_t*)(&tmpo[0][i2]),(simd_q15_t*)(&tmpo[1][i2]),(simd_q15_t*)(&tmpo[2][i2]),
+          (simd_q15_t*)(output+i),(simd_q15_t*)(output+1024+i),(simd_q15_t*)(output+2048+i),
+          (simd_q15_t*)(twa1536+i),(simd_q15_t*)(twb1536+i));
+  }
+
+  if (scale==1) {
+    for (i=0; i<24; i++) {
+      y128p[0]  = mulhi_int16(y128p[0],ONE_OVER_SQRT3_Q15_128);
+      y128p[1]  = mulhi_int16(y128p[1],ONE_OVER_SQRT3_Q15_128);
+      y128p[2]  = mulhi_int16(y128p[2],ONE_OVER_SQRT3_Q15_128);
+      y128p[3]  = mulhi_int16(y128p[3],ONE_OVER_SQRT3_Q15_128);
+      y128p[4]  = mulhi_int16(y128p[4],ONE_OVER_SQRT3_Q15_128);
+      y128p[5]  = mulhi_int16(y128p[5],ONE_OVER_SQRT3_Q15_128);
+      y128p[6]  = mulhi_int16(y128p[6],ONE_OVER_SQRT3_Q15_128);
+      y128p[7]  = mulhi_int16(y128p[7],ONE_OVER_SQRT3_Q15_128);
+      y128p[8]  = mulhi_int16(y128p[8],ONE_OVER_SQRT3_Q15_128);
+      y128p[9]  = mulhi_int16(y128p[9],ONE_OVER_SQRT3_Q15_128);
+      y128p[10] = mulhi_int16(y128p[10],ONE_OVER_SQRT3_Q15_128);
+      y128p[11] = mulhi_int16(y128p[11],ONE_OVER_SQRT3_Q15_128);
+      y128p[12] = mulhi_int16(y128p[12],ONE_OVER_SQRT3_Q15_128);
+      y128p[13] = mulhi_int16(y128p[13],ONE_OVER_SQRT3_Q15_128);
+      y128p[14] = mulhi_int16(y128p[14],ONE_OVER_SQRT3_Q15_128);
+      y128p[15] = mulhi_int16(y128p[15],ONE_OVER_SQRT3_Q15_128);
+      y128p+=16;
+    }
+  }
+}
+
+int16_t twa3072[2048] __attribute__((aligned(32)));
+int16_t twb3072[2048] __attribute__((aligned(32)));
+// 1024 x 3
+void dft3072(int16_t *input, int16_t *output,unsigned char scale)
+{
+  int i,i2,j;
+  uint32_t tmp[3][1024] __attribute__((aligned(32)));
+  uint32_t tmpo[3][1024] __attribute__((aligned(32)));
+  simd_q15_t *y128p=(simd_q15_t*)output;
+  simd_q15_t ONE_OVER_SQRT3_Q15_128 = set1_int16(ONE_OVER_SQRT3_Q15);
+
+  for (i=0,j=0; i<1024; i++) {
+    tmp[0][i] = ((uint32_t *)input)[j++];
+    tmp[1][i] = ((uint32_t *)input)[j++];
+    tmp[2][i] = ((uint32_t *)input)[j++];
+  }
+
+  dft1024((int16_t*)(tmp[0]),(int16_t*)(tmpo[0]),1);
+  dft1024((int16_t*)(tmp[1]),(int16_t*)(tmpo[1]),1);
+  dft1024((int16_t*)(tmp[2]),(int16_t*)(tmpo[2]),1);
+
+  for (i=0,i2=0; i<2048; i+=8,i2+=4)  {
+    bfly3((simd_q15_t*)(&tmpo[0][i2]),(simd_q15_t*)(&tmpo[1][i2]),(simd_q15_t*)(&tmpo[2][i2]),
+          (simd_q15_t*)(output+i),(simd_q15_t*)(output+2048+i),(simd_q15_t*)(output+4096+i),
+          (simd_q15_t*)(twa3072+i),(simd_q15_t*)(twb3072+i));
+  }
+
+  if (scale==1) {
+    for (i=0; i<48; i++) {
+      y128p[0]  = mulhi_int16(y128p[0],ONE_OVER_SQRT3_Q15_128);
+      y128p[1]  = mulhi_int16(y128p[1],ONE_OVER_SQRT3_Q15_128);
+      y128p[2]  = mulhi_int16(y128p[2],ONE_OVER_SQRT3_Q15_128);
+      y128p[3]  = mulhi_int16(y128p[3],ONE_OVER_SQRT3_Q15_128);
+      y128p[4]  = mulhi_int16(y128p[4],ONE_OVER_SQRT3_Q15_128);
+      y128p[5]  = mulhi_int16(y128p[5],ONE_OVER_SQRT3_Q15_128);
+      y128p[6]  = mulhi_int16(y128p[6],ONE_OVER_SQRT3_Q15_128);
+      y128p[7]  = mulhi_int16(y128p[7],ONE_OVER_SQRT3_Q15_128);
+      y128p[8]  = mulhi_int16(y128p[8],ONE_OVER_SQRT3_Q15_128);
+      y128p[9]  = mulhi_int16(y128p[9],ONE_OVER_SQRT3_Q15_128);
+      y128p[10] = mulhi_int16(y128p[10],ONE_OVER_SQRT3_Q15_128);
+      y128p[11] = mulhi_int16(y128p[11],ONE_OVER_SQRT3_Q15_128);
+      y128p[12] = mulhi_int16(y128p[12],ONE_OVER_SQRT3_Q15_128);
+      y128p[13] = mulhi_int16(y128p[13],ONE_OVER_SQRT3_Q15_128);
+      y128p[14] = mulhi_int16(y128p[14],ONE_OVER_SQRT3_Q15_128);
+      y128p[15] = mulhi_int16(y128p[15],ONE_OVER_SQRT3_Q15_128);
+      y128p+=16;
+    }
+  }
+}
+
+void idft3072(int16_t *input, int16_t *output,unsigned char scale)
+{
+  int i,i2,j;
+  uint32_t tmp[3][1024]__attribute__((aligned(32)));
+  uint32_t tmpo[3][1024] __attribute__((aligned(32)));
+  simd_q15_t *y128p=(simd_q15_t*)output;
+  simd_q15_t ONE_OVER_SQRT3_Q15_128 = set1_int16(ONE_OVER_SQRT3_Q15);
+
+  for (i=0,j=0; i<1024; i++) {
+    tmp[0][i] = ((uint32_t *)input)[j++];
+    tmp[1][i] = ((uint32_t *)input)[j++];
+    tmp[2][i] = ((uint32_t *)input)[j++];
+  }
+  idft1024((int16_t*)(tmp[0]),(int16_t*)(tmpo[0]),1);
+  idft1024((int16_t*)(tmp[1]),(int16_t*)(tmpo[1]),1);
+  idft1024((int16_t*)(tmp[2]),(int16_t*)(tmpo[2]),1);
+
+  for (i=0,i2=0; i<2048; i+=8,i2+=4)  {
+    ibfly3((simd_q15_t*)(&tmpo[0][i2]),(simd_q15_t*)(&tmpo[1][i2]),((simd_q15_t*)&tmpo[2][i2]),
+          (simd_q15_t*)(output+i),(simd_q15_t*)(output+2048+i),(simd_q15_t*)(output+4096+i),
+          (simd_q15_t*)(twa3072+i),(simd_q15_t*)(twb3072+i));
+  }
+
+
+  if (scale==1) {
+    for (i=0; i<48; i++) {
+      y128p[0]  = mulhi_int16(y128p[0],ONE_OVER_SQRT3_Q15_128);
+      y128p[1]  = mulhi_int16(y128p[1],ONE_OVER_SQRT3_Q15_128);
+      y128p[2]  = mulhi_int16(y128p[2],ONE_OVER_SQRT3_Q15_128);
+      y128p[3]  = mulhi_int16(y128p[3],ONE_OVER_SQRT3_Q15_128);
+      y128p[4]  = mulhi_int16(y128p[4],ONE_OVER_SQRT3_Q15_128);
+      y128p[5]  = mulhi_int16(y128p[5],ONE_OVER_SQRT3_Q15_128);
+      y128p[6]  = mulhi_int16(y128p[6],ONE_OVER_SQRT3_Q15_128);
+      y128p[7]  = mulhi_int16(y128p[7],ONE_OVER_SQRT3_Q15_128);
+      y128p[8]  = mulhi_int16(y128p[8],ONE_OVER_SQRT3_Q15_128);
+      y128p[9]  = mulhi_int16(y128p[9],ONE_OVER_SQRT3_Q15_128);
+      y128p[10] = mulhi_int16(y128p[10],ONE_OVER_SQRT3_Q15_128);
+      y128p[11] = mulhi_int16(y128p[11],ONE_OVER_SQRT3_Q15_128);
+      y128p[12] = mulhi_int16(y128p[12],ONE_OVER_SQRT3_Q15_128);
+      y128p[13] = mulhi_int16(y128p[13],ONE_OVER_SQRT3_Q15_128);
+      y128p[14] = mulhi_int16(y128p[14],ONE_OVER_SQRT3_Q15_128);
+      y128p[15] = mulhi_int16(y128p[15],ONE_OVER_SQRT3_Q15_128);
+      y128p+=16;
+    }
+  }
+}
+
+
+int16_t twa6144[4096] __attribute__((aligned(32)));
+int16_t twb6144[4096] __attribute__((aligned(32)));
+
+void idft6144(int16_t *input, int16_t *output,unsigned char scale)
+{
+  int i,i2,j;
+  uint32_t tmp[3][2048] __attribute__((aligned(32)));
+  uint32_t tmpo[3][2048] __attribute__((aligned(32)));
+  simd_q15_t *y128p=(simd_q15_t*)output;
+  simd_q15_t ONE_OVER_SQRT3_Q15_128 = set1_int16(ONE_OVER_SQRT3_Q15);
+
+  for (i=0,j=0; i<2048; i++) {
+    tmp[0][i] = ((uint32_t *)input)[j++];
+    tmp[1][i] = ((uint32_t *)input)[j++];
+    tmp[2][i] = ((uint32_t *)input)[j++];
+  }
+
+  idft2048((int16_t*)(tmp[0]),(int16_t*)(tmpo[0]),1);
+  idft2048((int16_t*)(tmp[1]),(int16_t*)(tmpo[1]),1);
+  idft2048((int16_t*)(tmp[2]),(int16_t*)(tmpo[2]),1);
+#ifndef MR_MAIN
+  if (LOG_DUMPFLAG(DEBUG_DFT)) {
+    LOG_M("idft6144in.m","in",input,6144,1,1);
+    LOG_M("idft6144out0.m","o0",tmpo[0],2048,1,1);
+    LOG_M("idft6144out1.m","o1",tmpo[1],2048,1,1);
+    LOG_M("idft6144out2.m","o2",tmpo[2],2048,1,1);
+  }
+#endif
+  for (i=0,i2=0; i<4096; i+=8,i2+=4)  {
+    ibfly3((simd_q15_t*)(&tmpo[0][i2]),(simd_q15_t*)(&tmpo[1][i2]),((simd_q15_t*)&tmpo[2][i2]),
+	   (simd_q15_t*)(output+i),(simd_q15_t*)(output+4096+i),(simd_q15_t*)(output+8192+i),
+	   (simd_q15_t*)(twa6144+i),(simd_q15_t*)(twb6144+i));
+  }
+
+
+  if (scale==1) {
+    for (i=0; i<96; i++) {
+      y128p[0]  = mulhi_int16(y128p[0],ONE_OVER_SQRT3_Q15_128);
+      y128p[1]  = mulhi_int16(y128p[1],ONE_OVER_SQRT3_Q15_128);
+      y128p[2]  = mulhi_int16(y128p[2],ONE_OVER_SQRT3_Q15_128);
+      y128p[3]  = mulhi_int16(y128p[3],ONE_OVER_SQRT3_Q15_128);
+      y128p[4]  = mulhi_int16(y128p[4],ONE_OVER_SQRT3_Q15_128);
+      y128p[5]  = mulhi_int16(y128p[5],ONE_OVER_SQRT3_Q15_128);
+      y128p[6]  = mulhi_int16(y128p[6],ONE_OVER_SQRT3_Q15_128);
+      y128p[7]  = mulhi_int16(y128p[7],ONE_OVER_SQRT3_Q15_128);
+      y128p[8]  = mulhi_int16(y128p[8],ONE_OVER_SQRT3_Q15_128);
+      y128p[9]  = mulhi_int16(y128p[9],ONE_OVER_SQRT3_Q15_128);
+      y128p[10] = mulhi_int16(y128p[10],ONE_OVER_SQRT3_Q15_128);
+      y128p[11] = mulhi_int16(y128p[11],ONE_OVER_SQRT3_Q15_128);
+      y128p[12] = mulhi_int16(y128p[12],ONE_OVER_SQRT3_Q15_128);
+      y128p[13] = mulhi_int16(y128p[13],ONE_OVER_SQRT3_Q15_128);
+      y128p[14] = mulhi_int16(y128p[14],ONE_OVER_SQRT3_Q15_128);
+      y128p[15] = mulhi_int16(y128p[15],ONE_OVER_SQRT3_Q15_128);
+      y128p+=16;
+    }
+  }
+}
+
+
+void dft6144(int16_t *input, int16_t *output,unsigned char scale)
+{
+  int i,i2,j;
+  uint32_t tmp[3][2048] __attribute__((aligned(32)));
+  uint32_t tmpo[3][2048] __attribute__((aligned(32)));
+  simd_q15_t *y128p=(simd_q15_t*)output;
+  simd_q15_t ONE_OVER_SQRT3_Q15_128 = set1_int16(ONE_OVER_SQRT3_Q15);
+
+  for (i=0,j=0; i<2048; i++) {
+    tmp[0][i] = ((uint32_t *)input)[j++];
+    tmp[1][i] = ((uint32_t *)input)[j++];
+    tmp[2][i] = ((uint32_t *)input)[j++];
+  }
+
+  dft2048((int16_t*)(tmp[0]),(int16_t*)(tmpo[0]),1);
+  dft2048((int16_t*)(tmp[1]),(int16_t*)(tmpo[1]),1);
+  dft2048((int16_t*)(tmp[2]),(int16_t*)(tmpo[2]),1);
+
+  /*
+  for (i=1; i<2048; i++) {
+    tmpo[0][i] = tmpo[0][i<<1];
+    tmpo[1][i] = tmpo[1][i<<1];
+    tmpo[2][i] = tmpo[2][i<<1];
+    }*/
+#ifndef MR_MAIN
+  if (LOG_DUMPFLAG(DEBUG_DFT)) {
+    LOG_M("ft6144out0.m","o0",tmpo[0],2048,1,1);
+    LOG_M("ft6144out1.m","o1",tmpo[1],2048,1,1);
+    LOG_M("ft6144out2.m","o2",tmpo[2],2048,1,1);
+  }
+#endif
+  for (i=0,i2=0; i<4096; i+=8,i2+=4)  {
+    bfly3((simd_q15_t*)(&tmpo[0][i2]),(simd_q15_t*)(&tmpo[1][i2]),(simd_q15_t*)(&tmpo[2][i2]),
+          (simd_q15_t*)(output+i),(simd_q15_t*)(output+4096+i),(simd_q15_t*)(output+8192+i),
+          (simd_q15_t*)(twa6144+i),(simd_q15_t*)(twb6144+i));
+  }
+
+  if (scale==1) {
+    for (i=0; i<96; i++) {
+      y128p[0]  = mulhi_int16(y128p[0],ONE_OVER_SQRT3_Q15_128);
+      y128p[1]  = mulhi_int16(y128p[1],ONE_OVER_SQRT3_Q15_128);
+      y128p[2]  = mulhi_int16(y128p[2],ONE_OVER_SQRT3_Q15_128);
+      y128p[3]  = mulhi_int16(y128p[3],ONE_OVER_SQRT3_Q15_128);
+      y128p[4]  = mulhi_int16(y128p[4],ONE_OVER_SQRT3_Q15_128);
+      y128p[5]  = mulhi_int16(y128p[5],ONE_OVER_SQRT3_Q15_128);
+      y128p[6]  = mulhi_int16(y128p[6],ONE_OVER_SQRT3_Q15_128);
+      y128p[7]  = mulhi_int16(y128p[7],ONE_OVER_SQRT3_Q15_128);
+      y128p[8]  = mulhi_int16(y128p[8],ONE_OVER_SQRT3_Q15_128);
+      y128p[9]  = mulhi_int16(y128p[9],ONE_OVER_SQRT3_Q15_128);
+      y128p[10] = mulhi_int16(y128p[10],ONE_OVER_SQRT3_Q15_128);
+      y128p[11] = mulhi_int16(y128p[11],ONE_OVER_SQRT3_Q15_128);
+      y128p[12] = mulhi_int16(y128p[12],ONE_OVER_SQRT3_Q15_128);
+      y128p[13] = mulhi_int16(y128p[13],ONE_OVER_SQRT3_Q15_128);
+      y128p[14] = mulhi_int16(y128p[14],ONE_OVER_SQRT3_Q15_128);
+      y128p[15] = mulhi_int16(y128p[15],ONE_OVER_SQRT3_Q15_128);
+      y128p+=16;
+    }
+  }
+}
+
+int16_t twa9216[6144] __attribute__((aligned(32)));
+int16_t twb9216[6144] __attribute__((aligned(32)));
+// 3072 x 3
+void dft9216(int16_t *input, int16_t *output,uint8_t scale) {
+
+  AssertFatal(1==0,"Need to do this ..\n");
+}
+
+void idft9216(int16_t *input, int16_t *output,uint8_t scale) {
+
+  AssertFatal(1==0,"Need to do this ..\n");
+}
+
+int16_t twa12288[8192] __attribute__((aligned(32)));
+int16_t twb12288[8192] __attribute__((aligned(32)));
+// 4096 x 3
+void dft12288(int16_t *input, int16_t *output,unsigned char scale)
+{
+  int i,i2,j;
+  uint32_t tmp[3][4096] __attribute__((aligned(32)));
+  uint32_t tmpo[3][4096] __attribute__((aligned(32)));
+  simd_q15_t *y128p=(simd_q15_t*)output;
+  simd_q15_t ONE_OVER_SQRT3_Q15_128 = set1_int16(ONE_OVER_SQRT3_Q15);
+
+  for (i=0,j=0; i<4096; i++) {
+    tmp[0][i] = ((uint32_t *)input)[j++];
+    tmp[1][i] = ((uint32_t *)input)[j++];
+    tmp[2][i] = ((uint32_t *)input)[j++];
+  }
+
+  dft4096((int16_t*)(tmp[0]),(int16_t*)(tmpo[0]),scale);
+  dft4096((int16_t*)(tmp[1]),(int16_t*)(tmpo[1]),scale);
+  dft4096((int16_t*)(tmp[2]),(int16_t*)(tmpo[2]),scale);
+  /*
+  for (i=1; i<4096; i++) {
+    tmpo[0][i] = tmpo[0][i<<1];
+    tmpo[1][i] = tmpo[1][i<<1];
+    tmpo[2][i] = tmpo[2][i<<1];
+    }*/
+#ifndef MR_MAIN
+  if (LOG_DUMPFLAG(DEBUG_DFT)) {
+    LOG_M("dft12288out0.m","o0",tmpo[0],4096,1,1);
+    LOG_M("dft12288out1.m","o1",tmpo[1],4096,1,1);
+    LOG_M("dft12288out2.m","o2",tmpo[2],4096,1,1);
+  }
+#endif
+  for (i=0,i2=0; i<8192; i+=8,i2+=4)  {
+    bfly3((simd_q15_t*)(&tmpo[0][i2]),(simd_q15_t*)(&tmpo[1][i2]),(simd_q15_t*)(&tmpo[2][i2]),
+          (simd_q15_t*)(output+i),(simd_q15_t*)(output+8192+i),(simd_q15_t*)(output+16384+i),
+          (simd_q15_t*)(twa12288+i),(simd_q15_t*)(twb12288+i));
+  }
+
+  if (scale==1) {
+    for (i=0; i<192; i++) {
+      y128p[0]  = mulhi_int16(y128p[0],ONE_OVER_SQRT3_Q15_128);
+      y128p[1]  = mulhi_int16(y128p[1],ONE_OVER_SQRT3_Q15_128);
+      y128p[2]  = mulhi_int16(y128p[2],ONE_OVER_SQRT3_Q15_128);
+      y128p[3]  = mulhi_int16(y128p[3],ONE_OVER_SQRT3_Q15_128);
+      y128p[4]  = mulhi_int16(y128p[4],ONE_OVER_SQRT3_Q15_128);
+      y128p[5]  = mulhi_int16(y128p[5],ONE_OVER_SQRT3_Q15_128);
+      y128p[6]  = mulhi_int16(y128p[6],ONE_OVER_SQRT3_Q15_128);
+      y128p[7]  = mulhi_int16(y128p[7],ONE_OVER_SQRT3_Q15_128);
+      y128p[8]  = mulhi_int16(y128p[8],ONE_OVER_SQRT3_Q15_128);
+      y128p[9]  = mulhi_int16(y128p[9],ONE_OVER_SQRT3_Q15_128);
+      y128p[10] = mulhi_int16(y128p[10],ONE_OVER_SQRT3_Q15_128);
+      y128p[11] = mulhi_int16(y128p[11],ONE_OVER_SQRT3_Q15_128);
+      y128p[12] = mulhi_int16(y128p[12],ONE_OVER_SQRT3_Q15_128);
+      y128p[13] = mulhi_int16(y128p[13],ONE_OVER_SQRT3_Q15_128);
+      y128p[14] = mulhi_int16(y128p[14],ONE_OVER_SQRT3_Q15_128);
+      y128p[15] = mulhi_int16(y128p[15],ONE_OVER_SQRT3_Q15_128);
+      y128p+=16;
+    }
+  }
+}
+
+void idft12288(int16_t *input, int16_t *output,unsigned char scale)
+{
+  int i,i2,j;
+  uint32_t tmp[3][4096] __attribute__((aligned(32)));
+  uint32_t tmpo[3][4096] __attribute__((aligned(32)));
+  simd_q15_t *y128p=(simd_q15_t*)output;
+  simd_q15_t ONE_OVER_SQRT3_Q15_128 = set1_int16(ONE_OVER_SQRT3_Q15);
+
+  for (i=0,j=0; i<4096; i++) {
+    tmp[0][i] = ((uint32_t *)input)[j++];
+    tmp[1][i] = ((uint32_t *)input)[j++];
+    tmp[2][i] = ((uint32_t *)input)[j++];
+  }
+
+
+
+  idft4096((int16_t*)(tmp[0]),(int16_t*)(tmpo[0]),scale);
+  idft4096((int16_t*)(tmp[1]),(int16_t*)(tmpo[1]),scale);
+  idft4096((int16_t*)(tmp[2]),(int16_t*)(tmpo[2]),scale);
+#ifndef MR_MAIN
+  if (LOG_DUMPFLAG(DEBUG_DFT)) {
+    LOG_M("idft12288in.m","in",input,12288,1,1);
+    LOG_M("idft12288out0.m","o0",tmpo[0],4096,1,1);
+    LOG_M("idft12288out1.m","o1",tmpo[1],4096,1,1);
+    LOG_M("idft12288out2.m","o2",tmpo[2],4096,1,1);
+  }
+#endif
+  for (i=0,i2=0; i<8192; i+=8,i2+=4)  {
+    ibfly3((simd_q15_t*)(&tmpo[0][i2]),(simd_q15_t*)(&tmpo[1][i2]),((simd_q15_t*)&tmpo[2][i2]),
+          (simd_q15_t*)(output+i),(simd_q15_t*)(output+8192+i),(simd_q15_t*)(output+16384+i),
+          (simd_q15_t*)(twa12288+i),(simd_q15_t*)(twb12288+i));
+  }
+
+  if (scale==1) {
+    for (i=0; i<192; i++) {
+      y128p[0]  = mulhi_int16(y128p[0],ONE_OVER_SQRT3_Q15_128);
+      y128p[1]  = mulhi_int16(y128p[1],ONE_OVER_SQRT3_Q15_128);
+      y128p[2]  = mulhi_int16(y128p[2],ONE_OVER_SQRT3_Q15_128);
+      y128p[3]  = mulhi_int16(y128p[3],ONE_OVER_SQRT3_Q15_128);
+      y128p[4]  = mulhi_int16(y128p[4],ONE_OVER_SQRT3_Q15_128);
+      y128p[5]  = mulhi_int16(y128p[5],ONE_OVER_SQRT3_Q15_128);
+      y128p[6]  = mulhi_int16(y128p[6],ONE_OVER_SQRT3_Q15_128);
+      y128p[7]  = mulhi_int16(y128p[7],ONE_OVER_SQRT3_Q15_128);
+      y128p[8]  = mulhi_int16(y128p[8],ONE_OVER_SQRT3_Q15_128);
+      y128p[9]  = mulhi_int16(y128p[9],ONE_OVER_SQRT3_Q15_128);
+      y128p[10] = mulhi_int16(y128p[10],ONE_OVER_SQRT3_Q15_128);
+      y128p[11] = mulhi_int16(y128p[11],ONE_OVER_SQRT3_Q15_128);
+      y128p[12] = mulhi_int16(y128p[12],ONE_OVER_SQRT3_Q15_128);
+      y128p[13] = mulhi_int16(y128p[13],ONE_OVER_SQRT3_Q15_128);
+      y128p[14] = mulhi_int16(y128p[14],ONE_OVER_SQRT3_Q15_128);
+      y128p[15] = mulhi_int16(y128p[15],ONE_OVER_SQRT3_Q15_128);
+      y128p+=16;
+    }
+  }
+  
+  
+#ifndef MR_MAIN
+  if (LOG_DUMPFLAG(DEBUG_DFT)) {
+     LOG_M("idft12288out.m","out",output,6144,1,1);
+  }
+#endif
+}
+
+int16_t twa18432[12288] __attribute__((aligned(32)));
+int16_t twb18432[12288] __attribute__((aligned(32)));
+// 6144 x 3
+void dft18432(int16_t *input, int16_t *output,unsigned char scale) {
+
+  int i,i2,j;
+  uint32_t tmp[3][6144] __attribute__((aligned(32)));
+  uint32_t tmpo[3][6144] __attribute__((aligned(32)));
+  simd_q15_t *y128p=(simd_q15_t*)output;
+  simd_q15_t ONE_OVER_SQRT3_Q15_128 = set1_int16(ONE_OVER_SQRT3_Q15);
+
+  for (i=0,j=0; i<6144; i++) {
+    tmp[0][i] = ((uint32_t *)input)[j++];
+    tmp[1][i] = ((uint32_t *)input)[j++];
+    tmp[2][i] = ((uint32_t *)input)[j++];
+  }
+
+  dft6144((int16_t*)(tmp[0]),(int16_t*)(tmpo[0]),scale);
+  dft6144((int16_t*)(tmp[1]),(int16_t*)(tmpo[1]),scale);
+  dft6144((int16_t*)(tmp[2]),(int16_t*)(tmpo[2]),scale);
+
+  for (i=0,i2=0; i<12288; i+=8,i2+=4)  {
+    bfly3((simd_q15_t*)(&tmpo[0][i2]),(simd_q15_t*)(&tmpo[1][i2]),(simd_q15_t*)(&tmpo[2][i2]),
+          (simd_q15_t*)(output+i),(simd_q15_t*)(output+12288+i),(simd_q15_t*)(output+24576+i),
+          (simd_q15_t*)(twa18432+i),(simd_q15_t*)(twb18432+i));
+  }
+  if (scale==1) {
+    for (i=0; i<288; i++) {
+      y128p[0]  = mulhi_int16(y128p[0],ONE_OVER_SQRT3_Q15_128);
+      y128p[1]  = mulhi_int16(y128p[1],ONE_OVER_SQRT3_Q15_128);
+      y128p[2]  = mulhi_int16(y128p[2],ONE_OVER_SQRT3_Q15_128);
+      y128p[3]  = mulhi_int16(y128p[3],ONE_OVER_SQRT3_Q15_128);
+      y128p[4]  = mulhi_int16(y128p[4],ONE_OVER_SQRT3_Q15_128);
+      y128p[5]  = mulhi_int16(y128p[5],ONE_OVER_SQRT3_Q15_128);
+      y128p[6]  = mulhi_int16(y128p[6],ONE_OVER_SQRT3_Q15_128);
+      y128p[7]  = mulhi_int16(y128p[7],ONE_OVER_SQRT3_Q15_128);
+      y128p[8]  = mulhi_int16(y128p[8],ONE_OVER_SQRT3_Q15_128);
+      y128p[9]  = mulhi_int16(y128p[9],ONE_OVER_SQRT3_Q15_128);
+      y128p[10] = mulhi_int16(y128p[10],ONE_OVER_SQRT3_Q15_128);
+      y128p[11] = mulhi_int16(y128p[11],ONE_OVER_SQRT3_Q15_128);
+      y128p[12] = mulhi_int16(y128p[12],ONE_OVER_SQRT3_Q15_128);
+      y128p[13] = mulhi_int16(y128p[13],ONE_OVER_SQRT3_Q15_128);
+      y128p[14] = mulhi_int16(y128p[14],ONE_OVER_SQRT3_Q15_128);
+      y128p[15] = mulhi_int16(y128p[15],ONE_OVER_SQRT3_Q15_128);
+      y128p+=16;
+    }
+  }
+}
+
+void idft18432(int16_t *input, int16_t *output,unsigned char scale) {
+
+  int i,i2,j;
+  uint32_t tmp[3][6144] __attribute__((aligned(32)));
+  uint32_t tmpo[3][6144] __attribute__((aligned(32)));
+  simd_q15_t *y128p=(simd_q15_t*)output;
+  simd_q15_t ONE_OVER_SQRT3_Q15_128 = set1_int16(ONE_OVER_SQRT3_Q15);
+
+  for (i=0,j=0; i<6144; i++) {
+    tmp[0][i] = ((uint32_t *)input)[j++];
+    tmp[1][i] = ((uint32_t *)input)[j++];
+    tmp[2][i] = ((uint32_t *)input)[j++];
+  }
+
+  idft6144((int16_t*)(tmp[0]),(int16_t*)(tmpo[0]),scale);
+  idft6144((int16_t*)(tmp[1]),(int16_t*)(tmpo[1]),scale);
+  idft6144((int16_t*)(tmp[2]),(int16_t*)(tmpo[2]),scale);
+
+  for (i=0,i2=0; i<12288; i+=8,i2+=4)  {
+    ibfly3((simd_q15_t*)(&tmpo[0][i2]),(simd_q15_t*)(&tmpo[1][i2]),(simd_q15_t*)(&tmpo[2][i2]),
+	   (simd_q15_t*)(output+i),(simd_q15_t*)(output+12288+i),(simd_q15_t*)(output+24576+i),
+	   (simd_q15_t*)(twa18432+i),(simd_q15_t*)(twb18432+i));
+  }
+  if (scale==1) {
+    for (i=0; i<288; i++) {
+      y128p[0]  = mulhi_int16(y128p[0],ONE_OVER_SQRT3_Q15_128);
+      y128p[1]  = mulhi_int16(y128p[1],ONE_OVER_SQRT3_Q15_128);
+      y128p[2]  = mulhi_int16(y128p[2],ONE_OVER_SQRT3_Q15_128);
+      y128p[3]  = mulhi_int16(y128p[3],ONE_OVER_SQRT3_Q15_128);
+      y128p[4]  = mulhi_int16(y128p[4],ONE_OVER_SQRT3_Q15_128);
+      y128p[5]  = mulhi_int16(y128p[5],ONE_OVER_SQRT3_Q15_128);
+      y128p[6]  = mulhi_int16(y128p[6],ONE_OVER_SQRT3_Q15_128);
+      y128p[7]  = mulhi_int16(y128p[7],ONE_OVER_SQRT3_Q15_128);
+      y128p[8]  = mulhi_int16(y128p[8],ONE_OVER_SQRT3_Q15_128);
+      y128p[9]  = mulhi_int16(y128p[9],ONE_OVER_SQRT3_Q15_128);
+      y128p[10] = mulhi_int16(y128p[10],ONE_OVER_SQRT3_Q15_128);
+      y128p[11] = mulhi_int16(y128p[11],ONE_OVER_SQRT3_Q15_128);
+      y128p[12] = mulhi_int16(y128p[12],ONE_OVER_SQRT3_Q15_128);
+      y128p[13] = mulhi_int16(y128p[13],ONE_OVER_SQRT3_Q15_128);
+      y128p[14] = mulhi_int16(y128p[14],ONE_OVER_SQRT3_Q15_128);
+      y128p[15] = mulhi_int16(y128p[15],ONE_OVER_SQRT3_Q15_128);
+      y128p+=16;
+    }
+  }
+}
+
+
+int16_t twa24576[16384] __attribute__((aligned(32)));
+int16_t twb24576[16384] __attribute__((aligned(32)));
+// 8192 x 3
+void dft24576(int16_t *input, int16_t *output,unsigned char scale)
+{
+  int i,i2,j;
+  uint32_t tmp[3][8192] __attribute__((aligned(32)));
+  uint32_t tmpo[3][8192] __attribute__((aligned(32)));
+  simd_q15_t *y128p=(simd_q15_t*)output;
+  simd_q15_t ONE_OVER_SQRT3_Q15_128 = set1_int16(ONE_OVER_SQRT3_Q15);
+
+  for (i=0,j=0; i<8192; i++) {
+    tmp[0][i] = ((uint32_t *)input)[j++];
+    tmp[1][i] = ((uint32_t *)input)[j++];
+    tmp[2][i] = ((uint32_t *)input)[j++];
+  }
+
+  dft8192((int16_t*)(tmp[0]),(int16_t*)(tmpo[0]),1);
+  dft8192((int16_t*)(tmp[1]),(int16_t*)(tmpo[1]),1);
+  dft8192((int16_t*)(tmp[2]),(int16_t*)(tmpo[2]),1);
+  /*
+  for (i=1; i<8192; i++) {
+    tmpo[0][i] = tmpo[0][i<<1];
+    tmpo[1][i] = tmpo[1][i<<1];
+    tmpo[2][i] = tmpo[2][i<<1];
+    }*/
+#ifndef MR_MAIN
+  if (LOG_DUMPFLAG(DEBUG_DFT)) {
+    LOG_M("dft24576out0.m","o0",tmpo[0],8192,1,1);
+    LOG_M("dft24576out1.m","o1",tmpo[1],8192,1,1);
+    LOG_M("dft24576out2.m","o2",tmpo[2],8192,1,1);
+  }
+#endif
+  for (i=0,i2=0; i<16384; i+=8,i2+=4)  {
+    bfly3((simd_q15_t*)(&tmpo[0][i2]),(simd_q15_t*)(&tmpo[1][i2]),(simd_q15_t*)(&tmpo[2][i2]),
+          (simd_q15_t*)(output+i),(simd_q15_t*)(output+16384+i),(simd_q15_t*)(output+32768+i),
+          (simd_q15_t*)(twa24576+i),(simd_q15_t*)(twb24576+i));
+  }
+
+
+  if (scale==1) {
+    for (i=0; i<384; i++) {
+      y128p[0]  = mulhi_int16(y128p[0],ONE_OVER_SQRT3_Q15_128);
+      y128p[1]  = mulhi_int16(y128p[1],ONE_OVER_SQRT3_Q15_128);
+      y128p[2]  = mulhi_int16(y128p[2],ONE_OVER_SQRT3_Q15_128);
+      y128p[3]  = mulhi_int16(y128p[3],ONE_OVER_SQRT3_Q15_128);
+      y128p[4]  = mulhi_int16(y128p[4],ONE_OVER_SQRT3_Q15_128);
+      y128p[5]  = mulhi_int16(y128p[5],ONE_OVER_SQRT3_Q15_128);
+      y128p[6]  = mulhi_int16(y128p[6],ONE_OVER_SQRT3_Q15_128);
+      y128p[7]  = mulhi_int16(y128p[7],ONE_OVER_SQRT3_Q15_128);
+      y128p[8]  = mulhi_int16(y128p[8],ONE_OVER_SQRT3_Q15_128);
+      y128p[9]  = mulhi_int16(y128p[9],ONE_OVER_SQRT3_Q15_128);
+      y128p[10] = mulhi_int16(y128p[10],ONE_OVER_SQRT3_Q15_128);
+      y128p[11] = mulhi_int16(y128p[11],ONE_OVER_SQRT3_Q15_128);
+      y128p[12] = mulhi_int16(y128p[12],ONE_OVER_SQRT3_Q15_128);
+      y128p[13] = mulhi_int16(y128p[13],ONE_OVER_SQRT3_Q15_128);
+      y128p[14] = mulhi_int16(y128p[14],ONE_OVER_SQRT3_Q15_128);
+      y128p[15] = mulhi_int16(y128p[15],ONE_OVER_SQRT3_Q15_128);
+      y128p+=16;
+    }
+  }
+#ifndef MR_MAIN
+  if (LOG_DUMPFLAG(DEBUG_DFT)) {
+     LOG_M("out.m","out",output,24576,1,1);
+  }
+#endif
+}
+
+void idft24576(int16_t *input, int16_t *output,unsigned char scale)
+{
+  int i,i2,j;
+  uint32_t tmp[3][8192] __attribute__((aligned(32)));
+  uint32_t tmpo[3][8192] __attribute__((aligned(32)));
+  simd_q15_t *y128p=(simd_q15_t*)output;
+  simd_q15_t ONE_OVER_SQRT3_Q15_128 = set1_int16(ONE_OVER_SQRT3_Q15);
+
+  for (i=0,j=0; i<8192; i++) {
+    tmp[0][i] = ((uint32_t *)input)[j++];
+    tmp[1][i] = ((uint32_t *)input)[j++];
+    tmp[2][i] = ((uint32_t *)input)[j++];
+  }
+
+  idft8192((int16_t*)(tmp[0]),(int16_t*)(tmpo[0]),1);
+  idft8192((int16_t*)(tmp[1]),(int16_t*)(tmpo[1]),1);
+  idft8192((int16_t*)(tmp[2]),(int16_t*)(tmpo[2]),1);
+ #ifndef MR_MAIN 
+  if (LOG_DUMPFLAG(DEBUG_DFT)) {
+    LOG_M("idft24576in.m","in",input,24576,1,1);
+    LOG_M("idft24576out0.m","o0",tmpo[0],8192,1,1);
+    LOG_M("idft24576out1.m","o1",tmpo[1],8192,1,1);
+    LOG_M("idft24576out2.m","o2",tmpo[2],8192,1,1);
+  }
+#endif
+  for (i=0,i2=0; i<16384; i+=8,i2+=4)  {
+    ibfly3((simd_q15_t*)(&tmpo[0][i2]),(simd_q15_t*)(&tmpo[1][i2]),((simd_q15_t*)&tmpo[2][i2]),
+          (simd_q15_t*)(output+i),(simd_q15_t*)(output+16384+i),(simd_q15_t*)(output+32768+i),
+          (simd_q15_t*)(twa24576+i),(simd_q15_t*)(twb24576+i));
+  }
+  if (scale==1) {
+    for (i=0; i<384; i++) {
+      y128p[0]  = mulhi_int16(y128p[0],ONE_OVER_SQRT3_Q15_128);
+      y128p[1]  = mulhi_int16(y128p[1],ONE_OVER_SQRT3_Q15_128);
+      y128p[2]  = mulhi_int16(y128p[2],ONE_OVER_SQRT3_Q15_128);
+      y128p[3]  = mulhi_int16(y128p[3],ONE_OVER_SQRT3_Q15_128);
+      y128p[4]  = mulhi_int16(y128p[4],ONE_OVER_SQRT3_Q15_128);
+      y128p[5]  = mulhi_int16(y128p[5],ONE_OVER_SQRT3_Q15_128);
+      y128p[6]  = mulhi_int16(y128p[6],ONE_OVER_SQRT3_Q15_128);
+      y128p[7]  = mulhi_int16(y128p[7],ONE_OVER_SQRT3_Q15_128);
+      y128p[8]  = mulhi_int16(y128p[8],ONE_OVER_SQRT3_Q15_128);
+      y128p[9]  = mulhi_int16(y128p[9],ONE_OVER_SQRT3_Q15_128);
+      y128p[10] = mulhi_int16(y128p[10],ONE_OVER_SQRT3_Q15_128);
+      y128p[11] = mulhi_int16(y128p[11],ONE_OVER_SQRT3_Q15_128);
+      y128p[12] = mulhi_int16(y128p[12],ONE_OVER_SQRT3_Q15_128);
+      y128p[13] = mulhi_int16(y128p[13],ONE_OVER_SQRT3_Q15_128);
+      y128p[14] = mulhi_int16(y128p[14],ONE_OVER_SQRT3_Q15_128);
+      y128p[15] = mulhi_int16(y128p[15],ONE_OVER_SQRT3_Q15_128);
+      y128p+=16;
+    }
+  }
+#ifndef MR_MAIN
+  if (LOG_DUMPFLAG(DEBUG_DFT)) {
+    LOG_M("idft24576out.m","out",output,24576,1,1);
+  }
+#endif
+}
+
+int16_t twa36864[24576] __attribute__((aligned(32)));
+int16_t twb36864[24576] __attribute__((aligned(32)));
+
+// 12288 x 3
+void dft36864(int16_t *input, int16_t *output,uint8_t scale) {
+
+  int i,i2,j;
+  uint32_t tmp[3][12288] __attribute__((aligned(32)));
+  uint32_t tmpo[3][12288] __attribute__((aligned(32)));
+  simd_q15_t *y128p=(simd_q15_t*)output;
+  simd_q15_t ONE_OVER_SQRT3_Q15_128 = set1_int16(ONE_OVER_SQRT3_Q15);
+
+  for (i=0,j=0; i<12288; i++) {
+    tmp[0][i] = ((uint32_t *)input)[j++];
+    tmp[1][i] = ((uint32_t *)input)[j++];
+    tmp[2][i] = ((uint32_t *)input)[j++];
+  }
+
+  dft12288((int16_t*)(tmp[0]),(int16_t*)(tmpo[0]),1);
+  dft12288((int16_t*)(tmp[1]),(int16_t*)(tmpo[1]),1);
+  dft12288((int16_t*)(tmp[2]),(int16_t*)(tmpo[2]),1);
+#ifndef MR_MAIN
+  if (LOG_DUMPFLAG(DEBUG_DFT)) {
+    LOG_M("dft36864out0.m","o0",tmpo[0],12288,1,1);
+    LOG_M("dft36864out1.m","o1",tmpo[1],12288,1,1);
+    LOG_M("dft36864out2.m","o2",tmpo[2],12288,1,1);
+  }
+#endif
+  for (i=0,i2=0; i<24576; i+=8,i2+=4)  {
+    bfly3((simd_q15_t*)(&tmpo[0][i2]),(simd_q15_t*)(&tmpo[1][i2]),(simd_q15_t*)(&tmpo[2][i2]),
+          (simd_q15_t*)(output+i),(simd_q15_t*)(output+24576+i),(simd_q15_t*)(output+49152+i),
+          (simd_q15_t*)(twa36864+i),(simd_q15_t*)(twb36864+i));
+  }
+
+  if (scale==1) {
+    for (i=0; i<576; i++) {
+      y128p[0]  = mulhi_int16(y128p[0],ONE_OVER_SQRT3_Q15_128);
+      y128p[1]  = mulhi_int16(y128p[1],ONE_OVER_SQRT3_Q15_128);
+      y128p[2]  = mulhi_int16(y128p[2],ONE_OVER_SQRT3_Q15_128);
+      y128p[3]  = mulhi_int16(y128p[3],ONE_OVER_SQRT3_Q15_128);
+      y128p[4]  = mulhi_int16(y128p[4],ONE_OVER_SQRT3_Q15_128);
+      y128p[5]  = mulhi_int16(y128p[5],ONE_OVER_SQRT3_Q15_128);
+      y128p[6]  = mulhi_int16(y128p[6],ONE_OVER_SQRT3_Q15_128);
+      y128p[7]  = mulhi_int16(y128p[7],ONE_OVER_SQRT3_Q15_128);
+      y128p[8]  = mulhi_int16(y128p[8],ONE_OVER_SQRT3_Q15_128);
+      y128p[9]  = mulhi_int16(y128p[9],ONE_OVER_SQRT3_Q15_128);
+      y128p[10] = mulhi_int16(y128p[10],ONE_OVER_SQRT3_Q15_128);
+      y128p[11] = mulhi_int16(y128p[11],ONE_OVER_SQRT3_Q15_128);
+      y128p[12] = mulhi_int16(y128p[12],ONE_OVER_SQRT3_Q15_128);
+      y128p[13] = mulhi_int16(y128p[13],ONE_OVER_SQRT3_Q15_128);
+      y128p[14] = mulhi_int16(y128p[14],ONE_OVER_SQRT3_Q15_128);
+      y128p[15] = mulhi_int16(y128p[15],ONE_OVER_SQRT3_Q15_128);
+      y128p+=16;
+    }
+  }
+#ifndef MR_MAIN
+  if (LOG_DUMPFLAG(DEBUG_DFT)) {
+     LOG_M("out.m","out",output,36864,1,1);
+  }
+#endif
+}
+
+void idft36864(int16_t *input, int16_t *output,uint8_t scale) {
+
+  int i,i2,j;
+  uint32_t tmp[3][12288] __attribute__((aligned(32)));
+  uint32_t tmpo[3][12288] __attribute__((aligned(32)));
+  simd_q15_t *y128p=(simd_q15_t*)output;
+  simd_q15_t ONE_OVER_SQRT3_Q15_128 = set1_int16(ONE_OVER_SQRT3_Q15);
+
+  for (i=0,j=0; i<12288; i++) {
+    tmp[0][i] = ((uint32_t *)input)[j++];
+    tmp[1][i] = ((uint32_t *)input)[j++];
+    tmp[2][i] = ((uint32_t *)input)[j++];
+  }
+
+  idft12288((int16_t*)(tmp[0]),(int16_t*)(tmpo[0]),1);
+  idft12288((int16_t*)(tmp[1]),(int16_t*)(tmpo[1]),1);
+  idft12288((int16_t*)(tmp[2]),(int16_t*)(tmpo[2]),1);
+
+  for (i=0,i2=0; i<24576; i+=8,i2+=4)  {
+    ibfly3((simd_q15_t*)(&tmpo[0][i2]),(simd_q15_t*)(&tmpo[1][i2]),((simd_q15_t*)&tmpo[2][i2]),
+          (simd_q15_t*)(output+i),(simd_q15_t*)(output+24576+i),(simd_q15_t*)(output+49152+i),
+          (simd_q15_t*)(twa36864+i),(simd_q15_t*)(twb36864+i));
+  }
+  if (scale==1) {
+    for (i=0; i<576; i++) {
+      y128p[0]  = mulhi_int16(y128p[0],ONE_OVER_SQRT3_Q15_128);
+      y128p[1]  = mulhi_int16(y128p[1],ONE_OVER_SQRT3_Q15_128);
+      y128p[2]  = mulhi_int16(y128p[2],ONE_OVER_SQRT3_Q15_128);
+      y128p[3]  = mulhi_int16(y128p[3],ONE_OVER_SQRT3_Q15_128);
+      y128p[4]  = mulhi_int16(y128p[4],ONE_OVER_SQRT3_Q15_128);
+      y128p[5]  = mulhi_int16(y128p[5],ONE_OVER_SQRT3_Q15_128);
+      y128p[6]  = mulhi_int16(y128p[6],ONE_OVER_SQRT3_Q15_128);
+      y128p[7]  = mulhi_int16(y128p[7],ONE_OVER_SQRT3_Q15_128);
+      y128p[8]  = mulhi_int16(y128p[8],ONE_OVER_SQRT3_Q15_128);
+      y128p[9]  = mulhi_int16(y128p[9],ONE_OVER_SQRT3_Q15_128);
+      y128p[10] = mulhi_int16(y128p[10],ONE_OVER_SQRT3_Q15_128);
+      y128p[11] = mulhi_int16(y128p[11],ONE_OVER_SQRT3_Q15_128);
+      y128p[12] = mulhi_int16(y128p[12],ONE_OVER_SQRT3_Q15_128);
+      y128p[13] = mulhi_int16(y128p[13],ONE_OVER_SQRT3_Q15_128);
+      y128p[14] = mulhi_int16(y128p[14],ONE_OVER_SQRT3_Q15_128);
+      y128p[15] = mulhi_int16(y128p[15],ONE_OVER_SQRT3_Q15_128);
+      y128p+=16;
+    }
+  }
+}
+
+int16_t twa49152[32768] __attribute__((aligned(32)));
+int16_t twb49152[32768] __attribute__((aligned(32)));
+
+// 16384 x 3
+void dft49152(int16_t *input, int16_t *output,uint8_t scale) {
+
+  int i,i2,j;
+  uint32_t tmp[3][16384] __attribute__((aligned(32)));
+  uint32_t tmpo[3][16384] __attribute__((aligned(32)));
+  simd_q15_t *y128p=(simd_q15_t*)output;
+  simd_q15_t ONE_OVER_SQRT3_Q15_128 = set1_int16(ONE_OVER_SQRT3_Q15);
+
+  for (i=0,j=0; i<16384; i++) {
+    tmp[0][i] = ((uint32_t *)input)[j++];
+    tmp[1][i] = ((uint32_t *)input)[j++];
+    tmp[2][i] = ((uint32_t *)input)[j++];
+  }
+
+  dft16384((int16_t*)(tmp[0]),(int16_t*)(tmpo[0]),1);
+  dft16384((int16_t*)(tmp[1]),(int16_t*)(tmpo[1]),1);
+  dft16384((int16_t*)(tmp[2]),(int16_t*)(tmpo[2]),1);
+
+  for (i=0,i2=0; i<32768; i+=8,i2+=4)  {
+    bfly3((simd_q15_t*)(&tmpo[0][i2]),(simd_q15_t*)(&tmpo[1][i2]),((simd_q15_t*)&tmpo[2][i2]),
+          (simd_q15_t*)(output+i),(simd_q15_t*)(output+32768+i),(simd_q15_t*)(output+65536+i),
+          (simd_q15_t*)(twa49152+i),(simd_q15_t*)(twb49152+i));
+  }
+  if (scale==1) {
+    for (i=0; i<768; i++) {
+      y128p[0]  = mulhi_int16(y128p[0],ONE_OVER_SQRT3_Q15_128);
+      y128p[1]  = mulhi_int16(y128p[1],ONE_OVER_SQRT3_Q15_128);
+      y128p[2]  = mulhi_int16(y128p[2],ONE_OVER_SQRT3_Q15_128);
+      y128p[3]  = mulhi_int16(y128p[3],ONE_OVER_SQRT3_Q15_128);
+      y128p[4]  = mulhi_int16(y128p[4],ONE_OVER_SQRT3_Q15_128);
+      y128p[5]  = mulhi_int16(y128p[5],ONE_OVER_SQRT3_Q15_128);
+      y128p[6]  = mulhi_int16(y128p[6],ONE_OVER_SQRT3_Q15_128);
+      y128p[7]  = mulhi_int16(y128p[7],ONE_OVER_SQRT3_Q15_128);
+      y128p[8]  = mulhi_int16(y128p[8],ONE_OVER_SQRT3_Q15_128);
+      y128p[9]  = mulhi_int16(y128p[9],ONE_OVER_SQRT3_Q15_128);
+      y128p[10] = mulhi_int16(y128p[10],ONE_OVER_SQRT3_Q15_128);
+      y128p[11] = mulhi_int16(y128p[11],ONE_OVER_SQRT3_Q15_128);
+      y128p[12] = mulhi_int16(y128p[12],ONE_OVER_SQRT3_Q15_128);
+      y128p[13] = mulhi_int16(y128p[13],ONE_OVER_SQRT3_Q15_128);
+      y128p[14] = mulhi_int16(y128p[14],ONE_OVER_SQRT3_Q15_128);
+      y128p[15] = mulhi_int16(y128p[15],ONE_OVER_SQRT3_Q15_128);
+      y128p+=16;
+    }
+  }
+}
+
+void idft49152(int16_t *input, int16_t *output,uint8_t scale) {
+
+   int i,i2,j;
+  uint32_t tmp[3][16384] __attribute__((aligned(32)));
+  uint32_t tmpo[3][16384] __attribute__((aligned(32)));
+  simd_q15_t *y128p=(simd_q15_t*)output;
+  simd_q15_t ONE_OVER_SQRT3_Q15_128 = set1_int16(ONE_OVER_SQRT3_Q15);
+
+  for (i=0,j=0; i<16384; i++) {
+    tmp[0][i] = ((uint32_t *)input)[j++];
+    tmp[1][i] = ((uint32_t *)input)[j++];
+    tmp[2][i] = ((uint32_t *)input)[j++];
+  }
+
+  idft16384((int16_t*)(tmp[0]),(int16_t*)(tmpo[0]),1);
+  idft16384((int16_t*)(tmp[1]),(int16_t*)(tmpo[1]),1);
+  idft16384((int16_t*)(tmp[2]),(int16_t*)(tmpo[2]),1);
+
+  for (i=0,i2=0; i<32768; i+=8,i2+=4)  {
+    ibfly3((simd_q15_t*)(&tmpo[0][i2]),(simd_q15_t*)(&tmpo[1][i2]),((simd_q15_t*)&tmpo[2][i2]),
+	   (simd_q15_t*)(output+i),(simd_q15_t*)(output+32768+i),(simd_q15_t*)(output+65536+i),
+	   (simd_q15_t*)(twa49152+i),(simd_q15_t*)(twb49152+i));
+  }
+  if (scale==1) {
+    for (i=0; i<768; i++) {
+      y128p[0]  = mulhi_int16(y128p[0],ONE_OVER_SQRT3_Q15_128);
+      y128p[1]  = mulhi_int16(y128p[1],ONE_OVER_SQRT3_Q15_128);
+      y128p[2]  = mulhi_int16(y128p[2],ONE_OVER_SQRT3_Q15_128);
+      y128p[3]  = mulhi_int16(y128p[3],ONE_OVER_SQRT3_Q15_128);
+      y128p[4]  = mulhi_int16(y128p[4],ONE_OVER_SQRT3_Q15_128);
+      y128p[5]  = mulhi_int16(y128p[5],ONE_OVER_SQRT3_Q15_128);
+      y128p[6]  = mulhi_int16(y128p[6],ONE_OVER_SQRT3_Q15_128);
+      y128p[7]  = mulhi_int16(y128p[7],ONE_OVER_SQRT3_Q15_128);
+      y128p[8]  = mulhi_int16(y128p[8],ONE_OVER_SQRT3_Q15_128);
+      y128p[9]  = mulhi_int16(y128p[9],ONE_OVER_SQRT3_Q15_128);
+      y128p[10] = mulhi_int16(y128p[10],ONE_OVER_SQRT3_Q15_128);
+      y128p[11] = mulhi_int16(y128p[11],ONE_OVER_SQRT3_Q15_128);
+      y128p[12] = mulhi_int16(y128p[12],ONE_OVER_SQRT3_Q15_128);
+      y128p[13] = mulhi_int16(y128p[13],ONE_OVER_SQRT3_Q15_128);
+      y128p[14] = mulhi_int16(y128p[14],ONE_OVER_SQRT3_Q15_128);
+      y128p[15] = mulhi_int16(y128p[15],ONE_OVER_SQRT3_Q15_128);
+      y128p+=16;
+    }
+  }
+}
+
+int16_t tw65536[3*2*16384] __attribute__((aligned(32)));
+
+void idft65536(int16_t *x,int16_t *y,unsigned char scale)
+{
+
+  simd_q15_t xtmp[16384],ytmp[16384],*tw65536_128p=(simd_q15_t *)tw65536,*x128=(simd_q15_t *)x,*y128p=(simd_q15_t *)y;
+  simd_q15_t *ytmpp = &ytmp[0];
+  int i,j;
+
+  for (i=0,j=0; i<16384; i+=4,j++) {
+    transpose16_ooff(x128+i,xtmp+j,4096);
+  }
+
+
+  idft16384((int16_t*)(xtmp),(int16_t*)(ytmp),1);
+  idft16384((int16_t*)(xtmp+4096),(int16_t*)(ytmp+4096),1);
+  idft16384((int16_t*)(xtmp+8192),(int16_t*)(ytmp+8192),1);
+  idft16384((int16_t*)(xtmp+12299),(int16_t*)(ytmp+12288),1);
+
+  for (i=0; i<4096; i++) {
+    ibfly4(ytmpp,ytmpp+4096,ytmpp+8192,ytmpp+12288,
+           y128p,y128p+4096,y128p+8192,y128p+12288,
+           tw65536_128p,tw65536_128p+8192,tw65536_128p+16384);
+    tw65536_128p++;
+    y128p++;
+    ytmpp++;
+  }
+
+  if (scale>0) {
+
+    for (i=0; i<1024; i++) {
+      y128p[0]  = shiftright_int16(y128p[0],scale);
+      y128p[1]  = shiftright_int16(y128p[1],scale);
+      y128p[2]  = shiftright_int16(y128p[2],scale);
+      y128p[3]  = shiftright_int16(y128p[3],scale);
+      y128p[4]  = shiftright_int16(y128p[4],scale);
+      y128p[5]  = shiftright_int16(y128p[5],scale);
+      y128p[6]  = shiftright_int16(y128p[6],scale);
+      y128p[7]  = shiftright_int16(y128p[7],scale);
+      y128p[8]  = shiftright_int16(y128p[8],scale);
+      y128p[9]  = shiftright_int16(y128p[9],scale);
+      y128p[10] = shiftright_int16(y128p[10],scale);
+      y128p[11] = shiftright_int16(y128p[11],scale);
+      y128p[12] = shiftright_int16(y128p[12],scale);
+      y128p[13] = shiftright_int16(y128p[13],scale);
+      y128p[14] = shiftright_int16(y128p[14],scale);
+      y128p[15] = shiftright_int16(y128p[15],scale);
+
+      y128p+=16;
+    }
+
+  }
+
+  simde_mm_empty();
+  simde_m_empty();
+}
+int16_t twa73728[49152] __attribute__((aligned(32)));
+int16_t twb73728[49152] __attribute__((aligned(32)));
+// 24576 x 3
+void dft73728(int16_t *input, int16_t *output,uint8_t scale) {
+
+  AssertFatal(1==0,"Need to do this ..\n");
+}
+
+void idft73728(int16_t *input, int16_t *output,uint8_t scale) {
+
+  AssertFatal(1==0,"Need to do this ..\n");
+}
+
+
+int16_t twa98304[65536] __attribute__((aligned(32)));
+int16_t twb98304[65536] __attribute__((aligned(32)));
+// 32768 x 3
+void dft98304(int16_t *input, int16_t *output,uint8_t scale) {
+
+  int i,i2,j;
+  uint32_t tmp[3][32768] __attribute__((aligned(32)));
+  uint32_t tmpo[3][32768] __attribute__((aligned(32)));
+  simd_q15_t *y128p=(simd_q15_t*)output;
+  simd_q15_t ONE_OVER_SQRT3_Q15_128 = set1_int16(ONE_OVER_SQRT3_Q15);
+
+  for (i=0,j=0; i<32768; i++) {
+    tmp[0][i] = ((uint32_t *)input)[j++];
+    tmp[1][i] = ((uint32_t *)input)[j++];
+    tmp[2][i] = ((uint32_t *)input)[j++];
+  }
+
+  dft32768((int16_t*)(tmp[0]),(int16_t*)(tmpo[0]),1);
+  dft32768((int16_t*)(tmp[1]),(int16_t*)(tmpo[1]),1);
+  dft32768((int16_t*)(tmp[2]),(int16_t*)(tmpo[2]),1);
+
+  for (i=0,i2=0; i<65536; i+=8,i2+=4)  {
+    bfly3((simd_q15_t*)(&tmpo[0][i2]),(simd_q15_t*)(&tmpo[1][i2]),((simd_q15_t*)&tmpo[2][i2]),
+          (simd_q15_t*)(output+i),(simd_q15_t*)(output+65536+i),(simd_q15_t*)(output+131072+i),
+          (simd_q15_t*)(twa98304+i),(simd_q15_t*)(twb98304+i));
+  }
+  if (scale==1) {
+    for (i=0; i<1536; i++) {
+      y128p[0]  = mulhi_int16(y128p[0],ONE_OVER_SQRT3_Q15_128);
+      y128p[1]  = mulhi_int16(y128p[1],ONE_OVER_SQRT3_Q15_128);
+      y128p[2]  = mulhi_int16(y128p[2],ONE_OVER_SQRT3_Q15_128);
+      y128p[3]  = mulhi_int16(y128p[3],ONE_OVER_SQRT3_Q15_128);
+      y128p[4]  = mulhi_int16(y128p[4],ONE_OVER_SQRT3_Q15_128);
+      y128p[5]  = mulhi_int16(y128p[5],ONE_OVER_SQRT3_Q15_128);
+      y128p[6]  = mulhi_int16(y128p[6],ONE_OVER_SQRT3_Q15_128);
+      y128p[7]  = mulhi_int16(y128p[7],ONE_OVER_SQRT3_Q15_128);
+      y128p[8]  = mulhi_int16(y128p[8],ONE_OVER_SQRT3_Q15_128);
+      y128p[9]  = mulhi_int16(y128p[9],ONE_OVER_SQRT3_Q15_128);
+      y128p[10] = mulhi_int16(y128p[10],ONE_OVER_SQRT3_Q15_128);
+      y128p[11] = mulhi_int16(y128p[11],ONE_OVER_SQRT3_Q15_128);
+      y128p[12] = mulhi_int16(y128p[12],ONE_OVER_SQRT3_Q15_128);
+      y128p[13] = mulhi_int16(y128p[13],ONE_OVER_SQRT3_Q15_128);
+      y128p[14] = mulhi_int16(y128p[14],ONE_OVER_SQRT3_Q15_128);
+      y128p[15] = mulhi_int16(y128p[15],ONE_OVER_SQRT3_Q15_128);
+      y128p+=16;
+    }
+  }
+}
+
+void idft98304(int16_t *input, int16_t *output,uint8_t scale) {
+
+  int i,i2,j;
+  uint32_t tmp[3][32768] __attribute__((aligned(32)));
+  uint32_t tmpo[3][32768] __attribute__((aligned(32)));
+  simd_q15_t *y128p=(simd_q15_t*)output;
+  simd_q15_t ONE_OVER_SQRT3_Q15_128 = set1_int16(ONE_OVER_SQRT3_Q15);
+
+  for (i=0,j=0; i<32768; i++) {
+    tmp[0][i] = ((uint32_t *)input)[j++];
+    tmp[1][i] = ((uint32_t *)input)[j++];
+    tmp[2][i] = ((uint32_t *)input)[j++];
+  }
+
+  idft32768((int16_t*)(tmp[0]),(int16_t*)(tmpo[0]),1);
+  idft32768((int16_t*)(tmp[1]),(int16_t*)(tmpo[1]),1);
+  idft32768((int16_t*)(tmp[2]),(int16_t*)(tmpo[2]),1);
+
+  for (i=0,i2=0; i<65536; i+=8,i2+=4)  {
+    ibfly3((simd_q15_t*)(&tmpo[0][i2]),(simd_q15_t*)(&tmpo[1][i2]),((simd_q15_t*)&tmpo[2][i2]),
+	   (simd_q15_t*)(output+i),(simd_q15_t*)(output+65536+i),(simd_q15_t*)(output+131072+i),
+	   (simd_q15_t*)(twa98304+i),(simd_q15_t*)(twb98304+i));
+  }
+  if (scale==1) {
+    for (i=0; i<1536; i++) {
+      y128p[0]  = mulhi_int16(y128p[0],ONE_OVER_SQRT3_Q15_128);
+      y128p[1]  = mulhi_int16(y128p[1],ONE_OVER_SQRT3_Q15_128);
+      y128p[2]  = mulhi_int16(y128p[2],ONE_OVER_SQRT3_Q15_128);
+      y128p[3]  = mulhi_int16(y128p[3],ONE_OVER_SQRT3_Q15_128);
+      y128p[4]  = mulhi_int16(y128p[4],ONE_OVER_SQRT3_Q15_128);
+      y128p[5]  = mulhi_int16(y128p[5],ONE_OVER_SQRT3_Q15_128);
+      y128p[6]  = mulhi_int16(y128p[6],ONE_OVER_SQRT3_Q15_128);
+      y128p[7]  = mulhi_int16(y128p[7],ONE_OVER_SQRT3_Q15_128);
+      y128p[8]  = mulhi_int16(y128p[8],ONE_OVER_SQRT3_Q15_128);
+      y128p[9]  = mulhi_int16(y128p[9],ONE_OVER_SQRT3_Q15_128);
+      y128p[10] = mulhi_int16(y128p[10],ONE_OVER_SQRT3_Q15_128);
+      y128p[11] = mulhi_int16(y128p[11],ONE_OVER_SQRT3_Q15_128);
+      y128p[12] = mulhi_int16(y128p[12],ONE_OVER_SQRT3_Q15_128);
+      y128p[13] = mulhi_int16(y128p[13],ONE_OVER_SQRT3_Q15_128);
+      y128p[14] = mulhi_int16(y128p[14],ONE_OVER_SQRT3_Q15_128);
+      y128p[15] = mulhi_int16(y128p[15],ONE_OVER_SQRT3_Q15_128);
+      y128p+=16;
+    }
+  }
+}
+
+ 
+///  THIS SECTION IS FOR ALL PUSCH DFTS (i.e. radix 2^a * 3^b * 4^c * 5^d)
+///  They use twiddles for 4-way parallel DFTS (i.e. 4 DFTS with interleaved input/output)
+
+static int16_t W1_12s[8]__attribute__((aligned(32))) = {28377,-16383,28377,-16383,28377,-16383,28377,-16383};
+static int16_t W2_12s[8]__attribute__((aligned(32))) = {16383,-28377,16383,-28377,16383,-28377,16383,-28377};
+static int16_t W3_12s[8]__attribute__((aligned(32))) = {0,-32767,0,-32767,0,-32767,0,-32767};
+static int16_t W4_12s[8]__attribute__((aligned(32))) = {-16383,-28377,-16383,-28377,-16383,-28377,-16383,-28377};
+static int16_t W6_12s[8]__attribute__((aligned(32))) = {-32767,0,-32767,0,-32767,0,-32767,0};
+
+simd_q15_t *W1_12=(simd_q15_t *)W1_12s;
+simd_q15_t *W2_12=(simd_q15_t *)W2_12s;
+simd_q15_t *W3_12=(simd_q15_t *)W3_12s;
+simd_q15_t *W4_12=(simd_q15_t *)W4_12s;
+simd_q15_t *W6_12=(simd_q15_t *)W6_12s;
+
+
+static simd_q15_t norm128;
+
+static inline void dft12f(simd_q15_t *x0,
+                          simd_q15_t *x1,
+                          simd_q15_t *x2,
+                          simd_q15_t *x3,
+                          simd_q15_t *x4,
+                          simd_q15_t *x5,
+                          simd_q15_t *x6,
+                          simd_q15_t *x7,
+                          simd_q15_t *x8,
+                          simd_q15_t *x9,
+                          simd_q15_t *x10,
+                          simd_q15_t *x11,
+                          simd_q15_t *y0,
+                          simd_q15_t *y1,
+                          simd_q15_t *y2,
+                          simd_q15_t *y3,
+                          simd_q15_t *y4,
+                          simd_q15_t *y5,
+                          simd_q15_t *y6,
+                          simd_q15_t *y7,
+                          simd_q15_t *y8,
+                          simd_q15_t *y9,
+                          simd_q15_t *y10,
+                          simd_q15_t *y11) __attribute__((always_inline));
+
+static inline void dft12f(simd_q15_t *x0,
+                          simd_q15_t *x1,
+                          simd_q15_t *x2,
+                          simd_q15_t *x3,
+                          simd_q15_t *x4,
+                          simd_q15_t *x5,
+                          simd_q15_t *x6,
+                          simd_q15_t *x7,
+                          simd_q15_t *x8,
+                          simd_q15_t *x9,
+                          simd_q15_t *x10,
+                          simd_q15_t *x11,
+                          simd_q15_t *y0,
+                          simd_q15_t *y1,
+                          simd_q15_t *y2,
+                          simd_q15_t *y3,
+                          simd_q15_t *y4,
+                          simd_q15_t *y5,
+                          simd_q15_t *y6,
+                          simd_q15_t *y7,
+                          simd_q15_t *y8,
+                          simd_q15_t *y9,
+                          simd_q15_t *y10,
+                          simd_q15_t *y11)
+{
+
+
+  simd_q15_t tmp_dft12[12];
+
+  // msg("dft12\n");
+
+  bfly4_tw1(x0,
+            x3,
+            x6,
+            x9,
+            tmp_dft12,
+            tmp_dft12+3,
+            tmp_dft12+6,
+            tmp_dft12+9);
+
+  bfly4_tw1(x1,
+            x4,
+            x7,
+            x10,
+            tmp_dft12+1,
+            tmp_dft12+4,
+            tmp_dft12+7,
+            tmp_dft12+10);
+
+
+  bfly4_tw1(x2,
+            x5,
+            x8,
+            x11,
+            tmp_dft12+2,
+            tmp_dft12+5,
+            tmp_dft12+8,
+            tmp_dft12+11);
+
+  //  k2=0;
+  bfly3_tw1(tmp_dft12,
+            tmp_dft12+1,
+            tmp_dft12+2,
+            y0,
+            y4,
+            y8);
+
+
+
+  //  k2=1;
+  bfly3(tmp_dft12+3,
+        tmp_dft12+4,
+        tmp_dft12+5,
+        y1,
+        y5,
+        y9,
+        W1_12,
+        W2_12);
+
+
+
+  //  k2=2;
+  bfly3(tmp_dft12+6,
+        tmp_dft12+7,
+        tmp_dft12+8,
+        y2,
+        y6,
+        y10,
+        W2_12,
+        W4_12);
+
+  //  k2=3;
+  bfly3(tmp_dft12+9,
+        tmp_dft12+10,
+        tmp_dft12+11,
+        y3,
+        y7,
+        y11,
+        W3_12,
+        W6_12);
+
+}
+
+
+
+
+void dft12(int16_t *x,int16_t *y ,unsigned char scale_flag)
+{
+
+  simd_q15_t *x128 = (simd_q15_t *)x,*y128 = (simd_q15_t *)y;
+  dft12f(&x128[0],
+         &x128[1],
+         &x128[2],
+         &x128[3],
+         &x128[4],
+         &x128[5],
+         &x128[6],
+         &x128[7],
+         &x128[8],
+         &x128[9],
+         &x128[10],
+         &x128[11],
+         &y128[0],
+         &y128[1],
+         &y128[2],
+         &y128[3],
+         &y128[4],
+         &y128[5],
+         &y128[6],
+         &y128[7],
+         &y128[8],
+         &y128[9],
+         &y128[10],
+         &y128[11]);
+
+}
+
+static int16_t tw24[88]__attribute__((aligned(32)));
+
+void dft24(int16_t *x,int16_t *y,unsigned char scale_flag)
+{
+
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *tw128=(simd_q15_t *)&tw24[0];
+  simd_q15_t ytmp128[24];//=&ytmp128array[0];
+  int i,j,k;
+
+  //  msg("dft24\n");
+  dft12f(x128,
+         x128+2,
+         x128+4,
+         x128+6,
+         x128+8,
+         x128+10,
+         x128+12,
+         x128+14,
+         x128+16,
+         x128+18,
+         x128+20,
+         x128+22,
+         ytmp128,
+         ytmp128+2,
+         ytmp128+4,
+         ytmp128+6,
+         ytmp128+8,
+         ytmp128+10,
+         ytmp128+12,
+         ytmp128+14,
+         ytmp128+16,
+         ytmp128+18,
+         ytmp128+20,
+         ytmp128+22);
+  //  msg("dft24b\n");
+
+  dft12f(x128+1,
+         x128+3,
+         x128+5,
+         x128+7,
+         x128+9,
+         x128+11,
+         x128+13,
+         x128+15,
+         x128+17,
+         x128+19,
+         x128+21,
+         x128+23,
+         ytmp128+1,
+         ytmp128+3,
+         ytmp128+5,
+         ytmp128+7,
+         ytmp128+9,
+         ytmp128+11,
+         ytmp128+13,
+         ytmp128+15,
+         ytmp128+17,
+         ytmp128+19,
+         ytmp128+21,
+         ytmp128+23);
+
+  //  msg("dft24c\n");
+
+  bfly2_tw1(ytmp128,
+            ytmp128+1,
+            y128,
+            y128+12);
+
+  //  msg("dft24d\n");
+
+  for (i=2,j=1,k=0; i<24; i+=2,j++,k++) {
+
+    bfly2(ytmp128+i,
+          ytmp128+i+1,
+          y128+j,
+          y128+j+12,
+          tw128+k);
+    //    msg("dft24e\n");
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[1]);
+
+    for (i=0; i<24; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+
+  
+  
+
+}
+
+static int16_t twa36[88]__attribute__((aligned(32)));
+static int16_t twb36[88]__attribute__((aligned(32)));
+
+void dft36(int16_t *x,int16_t *y,unsigned char scale_flag)
+{
+
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa36[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb36[0];
+  simd_q15_t ytmp128[36];//&ytmp128array[0];
+
+
+  int i,j,k;
+
+  dft12f(x128,
+         x128+3,
+         x128+6,
+         x128+9,
+         x128+12,
+         x128+15,
+         x128+18,
+         x128+21,
+         x128+24,
+         x128+27,
+         x128+30,
+         x128+33,
+         ytmp128,
+         ytmp128+3,
+         ytmp128+6,
+         ytmp128+9,
+         ytmp128+12,
+         ytmp128+15,
+         ytmp128+18,
+         ytmp128+21,
+         ytmp128+24,
+         ytmp128+27,
+         ytmp128+30,
+         ytmp128+33);
+
+  dft12f(x128+1,
+         x128+4,
+         x128+7,
+         x128+10,
+         x128+13,
+         x128+16,
+         x128+19,
+         x128+22,
+         x128+25,
+         x128+28,
+         x128+31,
+         x128+34,
+         ytmp128+1,
+         ytmp128+4,
+         ytmp128+7,
+         ytmp128+10,
+         ytmp128+13,
+         ytmp128+16,
+         ytmp128+19,
+         ytmp128+22,
+         ytmp128+25,
+         ytmp128+28,
+         ytmp128+31,
+         ytmp128+34);
+
+  dft12f(x128+2,
+         x128+5,
+         x128+8,
+         x128+11,
+         x128+14,
+         x128+17,
+         x128+20,
+         x128+23,
+         x128+26,
+         x128+29,
+         x128+32,
+         x128+35,
+         ytmp128+2,
+         ytmp128+5,
+         ytmp128+8,
+         ytmp128+11,
+         ytmp128+14,
+         ytmp128+17,
+         ytmp128+20,
+         ytmp128+23,
+         ytmp128+26,
+         ytmp128+29,
+         ytmp128+32,
+         ytmp128+35);
+
+
+  bfly3_tw1(ytmp128,
+            ytmp128+1,
+            ytmp128+2,
+            y128,
+            y128+12,
+            y128+24);
+
+  for (i=3,j=1,k=0; i<36; i+=3,j++,k++) {
+
+    bfly3(ytmp128+i,
+          ytmp128+i+1,
+          ytmp128+i+2,
+          y128+j,
+          y128+j+12,
+          y128+j+24,
+          twa128+k,
+          twb128+k);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[2]);
+
+    for (i=0; i<36; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+}
+
+static int16_t twa48[88]__attribute__((aligned(32)));
+static int16_t twb48[88]__attribute__((aligned(32)));
+static int16_t twc48[88]__attribute__((aligned(32)));
+
+void dft48(int16_t *x, int16_t *y,unsigned char scale_flag)
+{
+
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa48[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb48[0];
+  simd_q15_t *twc128=(simd_q15_t *)&twc48[0];
+  simd_q15_t ytmp128[48];//=&ytmp128array[0];
+  int i,j,k;
+
+
+  dft12f(x128,
+         x128+4,
+         x128+8,
+         x128+12,
+         x128+16,
+         x128+20,
+         x128+24,
+         x128+28,
+         x128+32,
+         x128+36,
+         x128+40,
+         x128+44,
+         ytmp128,
+         ytmp128+4,
+         ytmp128+8,
+         ytmp128+12,
+         ytmp128+16,
+         ytmp128+20,
+         ytmp128+24,
+         ytmp128+28,
+         ytmp128+32,
+         ytmp128+36,
+         ytmp128+40,
+         ytmp128+44);
+
+
+  dft12f(x128+1,
+         x128+5,
+         x128+9,
+         x128+13,
+         x128+17,
+         x128+21,
+         x128+25,
+         x128+29,
+         x128+33,
+         x128+37,
+         x128+41,
+         x128+45,
+         ytmp128+1,
+         ytmp128+5,
+         ytmp128+9,
+         ytmp128+13,
+         ytmp128+17,
+         ytmp128+21,
+         ytmp128+25,
+         ytmp128+29,
+         ytmp128+33,
+         ytmp128+37,
+         ytmp128+41,
+         ytmp128+45);
+
+
+  dft12f(x128+2,
+         x128+6,
+         x128+10,
+         x128+14,
+         x128+18,
+         x128+22,
+         x128+26,
+         x128+30,
+         x128+34,
+         x128+38,
+         x128+42,
+         x128+46,
+         ytmp128+2,
+         ytmp128+6,
+         ytmp128+10,
+         ytmp128+14,
+         ytmp128+18,
+         ytmp128+22,
+         ytmp128+26,
+         ytmp128+30,
+         ytmp128+34,
+         ytmp128+38,
+         ytmp128+42,
+         ytmp128+46);
+
+
+  dft12f(x128+3,
+         x128+7,
+         x128+11,
+         x128+15,
+         x128+19,
+         x128+23,
+         x128+27,
+         x128+31,
+         x128+35,
+         x128+39,
+         x128+43,
+         x128+47,
+         ytmp128+3,
+         ytmp128+7,
+         ytmp128+11,
+         ytmp128+15,
+         ytmp128+19,
+         ytmp128+23,
+         ytmp128+27,
+         ytmp128+31,
+         ytmp128+35,
+         ytmp128+39,
+         ytmp128+43,
+         ytmp128+47);
+
+
+
+  bfly4_tw1(ytmp128,
+            ytmp128+1,
+            ytmp128+2,
+            ytmp128+3,
+            y128,
+            y128+12,
+            y128+24,
+            y128+36);
+
+
+
+  for (i=4,j=1,k=0; i<48; i+=4,j++,k++) {
+
+    bfly4(ytmp128+i,
+          ytmp128+i+1,
+          ytmp128+i+2,
+          ytmp128+i+3,
+          y128+j,
+          y128+j+12,
+          y128+j+24,
+          y128+j+36,
+          twa128+k,
+          twb128+k,
+          twc128+k);
+
+  }
+
+  if (scale_flag == 1) {
+    norm128 = set1_int16(dft_norm_table[3]);
+
+    for (i=0; i<48; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+}
+
+static int16_t twa60[88]__attribute__((aligned(32)));
+static int16_t twb60[88]__attribute__((aligned(32)));
+static int16_t twc60[88]__attribute__((aligned(32)));
+static int16_t twd60[88]__attribute__((aligned(32)));
+
+void dft60(int16_t *x,int16_t *y,unsigned char scale)
+{
+
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa60[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb60[0];
+  simd_q15_t *twc128=(simd_q15_t *)&twc60[0];
+  simd_q15_t *twd128=(simd_q15_t *)&twd60[0];
+  simd_q15_t ytmp128[60];//=&ytmp128array[0];
+  int i,j,k;
+
+  dft12f(x128,
+         x128+5,
+         x128+10,
+         x128+15,
+         x128+20,
+         x128+25,
+         x128+30,
+         x128+35,
+         x128+40,
+         x128+45,
+         x128+50,
+         x128+55,
+         ytmp128,
+         ytmp128+5,
+         ytmp128+10,
+         ytmp128+15,
+         ytmp128+20,
+         ytmp128+25,
+         ytmp128+30,
+         ytmp128+35,
+         ytmp128+40,
+         ytmp128+45,
+         ytmp128+50,
+         ytmp128+55);
+
+  dft12f(x128+1,
+         x128+6,
+         x128+11,
+         x128+16,
+         x128+21,
+         x128+26,
+         x128+31,
+         x128+36,
+         x128+41,
+         x128+46,
+         x128+51,
+         x128+56,
+         ytmp128+1,
+         ytmp128+6,
+         ytmp128+11,
+         ytmp128+16,
+         ytmp128+21,
+         ytmp128+26,
+         ytmp128+31,
+         ytmp128+36,
+         ytmp128+41,
+         ytmp128+46,
+         ytmp128+51,
+         ytmp128+56);
+
+  dft12f(x128+2,
+         x128+7,
+         x128+12,
+         x128+17,
+         x128+22,
+         x128+27,
+         x128+32,
+         x128+37,
+         x128+42,
+         x128+47,
+         x128+52,
+         x128+57,
+         ytmp128+2,
+         ytmp128+7,
+         ytmp128+12,
+         ytmp128+17,
+         ytmp128+22,
+         ytmp128+27,
+         ytmp128+32,
+         ytmp128+37,
+         ytmp128+42,
+         ytmp128+47,
+         ytmp128+52,
+         ytmp128+57);
+
+  dft12f(x128+3,
+         x128+8,
+         x128+13,
+         x128+18,
+         x128+23,
+         x128+28,
+         x128+33,
+         x128+38,
+         x128+43,
+         x128+48,
+         x128+53,
+         x128+58,
+         ytmp128+3,
+         ytmp128+8,
+         ytmp128+13,
+         ytmp128+18,
+         ytmp128+23,
+         ytmp128+28,
+         ytmp128+33,
+         ytmp128+38,
+         ytmp128+43,
+         ytmp128+48,
+         ytmp128+53,
+         ytmp128+58);
+
+  dft12f(x128+4,
+         x128+9,
+         x128+14,
+         x128+19,
+         x128+24,
+         x128+29,
+         x128+34,
+         x128+39,
+         x128+44,
+         x128+49,
+         x128+54,
+         x128+59,
+         ytmp128+4,
+         ytmp128+9,
+         ytmp128+14,
+         ytmp128+19,
+         ytmp128+24,
+         ytmp128+29,
+         ytmp128+34,
+         ytmp128+39,
+         ytmp128+44,
+         ytmp128+49,
+         ytmp128+54,
+         ytmp128+59);
+
+  bfly5_tw1(ytmp128,
+            ytmp128+1,
+            ytmp128+2,
+            ytmp128+3,
+            ytmp128+4,
+            y128,
+            y128+12,
+            y128+24,
+            y128+36,
+            y128+48);
+
+  for (i=5,j=1,k=0; i<60; i+=5,j++,k++) {
+
+    bfly5(ytmp128+i,
+          ytmp128+i+1,
+          ytmp128+i+2,
+          ytmp128+i+3,
+          ytmp128+i+4,
+          y128+j,
+          y128+j+12,
+          y128+j+24,
+          y128+j+36,
+          y128+j+48,
+          twa128+k,
+          twb128+k,
+          twc128+k,
+          twd128+k);
+  }
+
+  if (scale == 1) {
+    norm128 = set1_int16(dft_norm_table[4]);
+
+    for (i=0; i<60; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+//      printf("y[%d] = (%d,%d)\n",i,((int16_t*)&y128[i])[0],((int16_t*)&y128[i])[1]);
+    }
+  }
+}
+
+static int16_t tw72[280]__attribute__((aligned(32)));
+
+void dft72(int16_t *x,int16_t *y,unsigned char scale_flag)
+{
+
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *tw128=(simd_q15_t *)&tw72[0];
+  simd_q15_t x2128[72];// = (simd_q15_t *)&x2128array[0];
+
+  simd_q15_t ytmp128[72];//=&ytmp128array2[0];
+
+  for (i=0,j=0; i<36; i++,j+=2) {
+    x2128[i]    = x128[j];    // even inputs
+    x2128[i+36] = x128[j+1];  // odd inputs
+  }
+
+  dft36((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft36((int16_t *)(x2128+36),(int16_t *)(ytmp128+36),1);
+
+  bfly2_tw1(ytmp128,ytmp128+36,y128,y128+36);
+
+  for (i=1,j=0; i<36; i++,j++) {
+    bfly2(ytmp128+i,
+          ytmp128+36+i,
+          y128+i,
+          y128+36+i,
+          tw128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[5]);
+
+    for (i=0; i<72; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+}
+
+static int16_t tw96[376]__attribute__((aligned(32)));
+
+void dft96(int16_t *x,int16_t *y,unsigned char scale_flag)
+{
+
+
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *tw128=(simd_q15_t *)&tw96[0];
+  simd_q15_t x2128[96];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[96];//=&ytmp128array2[0];
+
+
+  for (i=0,j=0; i<48; i++,j+=2) {
+    x2128[i]    = x128[j];
+    x2128[i+48] = x128[j+1];
+  }
+
+  dft48((int16_t *)x2128,(int16_t *)ytmp128,0);
+  dft48((int16_t *)(x2128+48),(int16_t *)(ytmp128+48),0);
+
+
+  bfly2_tw1(ytmp128,ytmp128+48,y128,y128+48);
+
+  for (i=1,j=0; i<48; i++,j++) {
+    bfly2(ytmp128+i,
+          ytmp128+48+i,
+          y128+i,
+          y128+48+i,
+          tw128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[6]);
+
+    for (i=0; i<96; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+}
+
+static int16_t twa108[280]__attribute__((aligned(32)));
+static int16_t twb108[280]__attribute__((aligned(32)));
+
+void dft108(int16_t *x,int16_t *y,unsigned char scale_flag)
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa108[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb108[0];
+  simd_q15_t x2128[108];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[108];//=&ytmp128array2[0];
+
+
+  for (i=0,j=0; i<36; i++,j+=3) {
+    x2128[i]    = x128[j];
+    x2128[i+36] = x128[j+1];
+    x2128[i+72] = x128[j+2];
+  }
+
+  dft36((int16_t *)x2128,(int16_t *)ytmp128,0);
+  dft36((int16_t *)(x2128+36),(int16_t *)(ytmp128+36),0);
+  dft36((int16_t *)(x2128+72),(int16_t *)(ytmp128+72),0);
+
+  bfly3_tw1(ytmp128,ytmp128+36,ytmp128+72,y128,y128+36,y128+72);
+
+  for (i=1,j=0; i<36; i++,j++) {
+    bfly3(ytmp128+i,
+          ytmp128+36+i,
+          ytmp128+72+i,
+          y128+i,
+          y128+36+i,
+          y128+72+i,
+          twa128+j,
+          twb128+j);
+
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[7]);
+
+    for (i=0; i<108; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+}
+
+static int16_t tw120[472]__attribute__((aligned(32)));
+void dft120(int16_t *x,int16_t *y, unsigned char scale_flag)
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *tw128=(simd_q15_t *)&tw120[0];
+  simd_q15_t x2128[120];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[120];//=&ytmp128array2[0];
+
+  for (i=0,j=0; i<60; i++,j+=2) {
+    x2128[i]    = x128[j];
+    x2128[i+60] = x128[j+1];
+  }
+
+  dft60((int16_t *)x2128,(int16_t *)ytmp128,0);
+  dft60((int16_t *)(x2128+60),(int16_t *)(ytmp128+60),0);
+
+
+  bfly2_tw1(ytmp128,ytmp128+60,y128,y128+60);
+
+  for (i=1,j=0; i<60; i++,j++) {
+    bfly2(ytmp128+i,
+          ytmp128+60+i,
+          y128+i,
+          y128+60+i,
+          tw128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[8]);
+
+    for (i=0; i<120; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+}
+
+static int16_t twa144[376]__attribute__((aligned(32)));
+static int16_t twb144[376]__attribute__((aligned(32)));
+
+void dft144(int16_t *x,int16_t *y,unsigned char scale_flag)
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa144[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb144[0];
+  simd_q15_t x2128[144];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[144];//=&ytmp128array2[0];
+
+
+
+  for (i=0,j=0; i<48; i++,j+=3) {
+    x2128[i]    = x128[j];
+    x2128[i+48] = x128[j+1];
+    x2128[i+96] = x128[j+2];
+  }
+
+  dft48((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft48((int16_t *)(x2128+48),(int16_t *)(ytmp128+48),1);
+  dft48((int16_t *)(x2128+96),(int16_t *)(ytmp128+96),1);
+
+  bfly3_tw1(ytmp128,ytmp128+48,ytmp128+96,y128,y128+48,y128+96);
+
+  for (i=1,j=0; i<48; i++,j++) {
+    bfly3(ytmp128+i,
+          ytmp128+48+i,
+          ytmp128+96+i,
+          y128+i,
+          y128+48+i,
+          y128+96+i,
+          twa128+j,
+          twb128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[9]);
+
+    for (i=0; i<144; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+}
+
+static int16_t twa180[472]__attribute__((aligned(32)));
+static int16_t twb180[472]__attribute__((aligned(32)));
+
+void dft180(int16_t *x,int16_t *y,unsigned char scale_flag)
+{
+
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa180[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb180[0];
+  simd_q15_t x2128[180];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[180];//=&ytmp128array2[0];
+
+
+
+  for (i=0,j=0; i<60; i++,j+=3) {
+    x2128[i]    = x128[j];
+    x2128[i+60] = x128[j+1];
+    x2128[i+120] = x128[j+2];
+  }
+
+  dft60((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft60((int16_t *)(x2128+60),(int16_t *)(ytmp128+60),1);
+  dft60((int16_t *)(x2128+120),(int16_t *)(ytmp128+120),1);
+
+  bfly3_tw1(ytmp128,ytmp128+60,ytmp128+120,y128,y128+60,y128+120);
+
+  for (i=1,j=0; i<60; i++,j++) {
+    bfly3(ytmp128+i,
+          ytmp128+60+i,
+          ytmp128+120+i,
+          y128+i,
+          y128+60+i,
+          y128+120+i,
+          twa128+j,
+          twb128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[10]);
+
+    for (i=0; i<180; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+}
+
+static int16_t twa192[376]__attribute__((aligned(32)));
+static int16_t twb192[376]__attribute__((aligned(32)));
+static int16_t twc192[376]__attribute__((aligned(32)));
+
+void dft192(int16_t *x,int16_t *y,unsigned char scale_flag)
+{
+
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa192[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb192[0];
+  simd_q15_t *twc128=(simd_q15_t *)&twc192[0];
+  simd_q15_t x2128[192];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[192];//=&ytmp128array2[0];
+
+
+
+  for (i=0,j=0; i<48; i++,j+=4) {
+    x2128[i]    = x128[j];
+    x2128[i+48] = x128[j+1];
+    x2128[i+96] = x128[j+2];
+    x2128[i+144] = x128[j+3];
+  }
+
+  dft48((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft48((int16_t *)(x2128+48),(int16_t *)(ytmp128+48),1);
+  dft48((int16_t *)(x2128+96),(int16_t *)(ytmp128+96),1);
+  dft48((int16_t *)(x2128+144),(int16_t *)(ytmp128+144),1);
+
+  bfly4_tw1(ytmp128,ytmp128+48,ytmp128+96,ytmp128+144,y128,y128+48,y128+96,y128+144);
+
+  for (i=1,j=0; i<48; i++,j++) {
+    bfly4(ytmp128+i,
+          ytmp128+48+i,
+          ytmp128+96+i,
+          ytmp128+144+i,
+          y128+i,
+          y128+48+i,
+          y128+96+i,
+          y128+144+i,
+          twa128+j,
+          twb128+j,
+          twc128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[11]);
+
+    for (i=0; i<192; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+}
+
+static int16_t twa216[568]__attribute__((aligned(32)));
+static int16_t twb216[568]__attribute__((aligned(32)));
+
+void dft216(int16_t *x,int16_t *y,unsigned char scale_flag)
+{
+
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa216[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb216[0];
+  simd_q15_t x2128[216];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[216];//=&ytmp128array3[0];
+
+
+
+  for (i=0,j=0; i<72; i++,j+=3) {
+    x2128[i]    = x128[j];
+    x2128[i+72] = x128[j+1];
+    x2128[i+144] = x128[j+2];
+  }
+
+  dft72((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft72((int16_t *)(x2128+72),(int16_t *)(ytmp128+72),1);
+  dft72((int16_t *)(x2128+144),(int16_t *)(ytmp128+144),1);
+
+  bfly3_tw1(ytmp128,ytmp128+72,ytmp128+144,y128,y128+72,y128+144);
+
+  for (i=1,j=0; i<72; i++,j++) {
+    bfly3(ytmp128+i,
+          ytmp128+72+i,
+          ytmp128+144+i,
+          y128+i,
+          y128+72+i,
+          y128+144+i,
+          twa128+j,
+          twb128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[12]);
+
+    for (i=0; i<216; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+}
+
+static int16_t twa240[472]__attribute__((aligned(32)));
+static int16_t twb240[472]__attribute__((aligned(32)));
+static int16_t twc240[472]__attribute__((aligned(32)));
+
+void dft240(int16_t *x,int16_t *y,unsigned char scale_flag)
+{
+
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa240[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb240[0];
+  simd_q15_t *twc128=(simd_q15_t *)&twc240[0];
+  simd_q15_t x2128[240];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[240];//=&ytmp128array2[0];
+
+
+
+  for (i=0,j=0; i<60; i++,j+=4) {
+    x2128[i]    = x128[j];
+    x2128[i+60] = x128[j+1];
+    x2128[i+120] = x128[j+2];
+    x2128[i+180] = x128[j+3];
+  }
+
+  dft60((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft60((int16_t *)(x2128+60),(int16_t *)(ytmp128+60),1);
+  dft60((int16_t *)(x2128+120),(int16_t *)(ytmp128+120),1);
+  dft60((int16_t *)(x2128+180),(int16_t *)(ytmp128+180),1);
+
+  bfly4_tw1(ytmp128,ytmp128+60,ytmp128+120,ytmp128+180,y128,y128+60,y128+120,y128+180);
+
+  for (i=1,j=0; i<60; i++,j++) {
+    bfly4(ytmp128+i,
+          ytmp128+60+i,
+          ytmp128+120+i,
+          ytmp128+180+i,
+          y128+i,
+          y128+60+i,
+          y128+120+i,
+          y128+180+i,
+          twa128+j,
+          twb128+j,
+          twc128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[13]);
+
+    for (i=0; i<240; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+}
+
+static int16_t twa288[760]__attribute__((aligned(32)));
+static int16_t twb288[760]__attribute__((aligned(32)));
+
+void dft288(int16_t *x,int16_t *y,unsigned char scale_flag)
+{
+
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa288[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb288[0];
+  simd_q15_t x2128[288];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[288];//=&ytmp128array3[0];
+
+
+
+  for (i=0,j=0; i<96; i++,j+=3) {
+    x2128[i]    = x128[j];
+    x2128[i+96] = x128[j+1];
+    x2128[i+192] = x128[j+2];
+  }
+
+  dft96((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft96((int16_t *)(x2128+96),(int16_t *)(ytmp128+96),1);
+  dft96((int16_t *)(x2128+192),(int16_t *)(ytmp128+192),1);
+
+  bfly3_tw1(ytmp128,ytmp128+96,ytmp128+192,y128,y128+96,y128+192);
+
+  for (i=1,j=0; i<96; i++,j++) {
+    bfly3(ytmp128+i,
+          ytmp128+96+i,
+          ytmp128+192+i,
+          y128+i,
+          y128+96+i,
+          y128+192+i,
+          twa128+j,
+          twb128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[14]);
+
+    for (i=0; i<288; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+}
+
+static int16_t twa300[472]__attribute__((aligned(32)));
+static int16_t twb300[472]__attribute__((aligned(32)));
+static int16_t twc300[472]__attribute__((aligned(32)));
+static int16_t twd300[472]__attribute__((aligned(32)));
+
+void dft300(int16_t *x,int16_t *y,unsigned char scale_flag)
+{
+
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa300[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb300[0];
+  simd_q15_t *twc128=(simd_q15_t *)&twc300[0];
+  simd_q15_t *twd128=(simd_q15_t *)&twd300[0];
+  simd_q15_t x2128[300];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[300];//=&ytmp128array2[0];
+
+
+
+  for (i=0,j=0; i<60; i++,j+=5) {
+    x2128[i]    = x128[j];
+    x2128[i+60] = x128[j+1];
+    x2128[i+120] = x128[j+2];
+    x2128[i+180] = x128[j+3];
+    x2128[i+240] = x128[j+4];
+  }
+
+  dft60((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft60((int16_t *)(x2128+60),(int16_t *)(ytmp128+60),1);
+  dft60((int16_t *)(x2128+120),(int16_t *)(ytmp128+120),1);
+  dft60((int16_t *)(x2128+180),(int16_t *)(ytmp128+180),1);
+  dft60((int16_t *)(x2128+240),(int16_t *)(ytmp128+240),1);
+
+  bfly5_tw1(ytmp128,ytmp128+60,ytmp128+120,ytmp128+180,ytmp128+240,y128,y128+60,y128+120,y128+180,y128+240);
+
+  for (i=1,j=0; i<60; i++,j++) {
+    bfly5(ytmp128+i,
+          ytmp128+60+i,
+          ytmp128+120+i,
+          ytmp128+180+i,
+          ytmp128+240+i,
+          y128+i,
+          y128+60+i,
+          y128+120+i,
+          y128+180+i,
+          y128+240+i,
+          twa128+j,
+          twb128+j,
+          twc128+j,
+          twd128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[15]);
+
+    for (i=0; i<300; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+}
+
+static int16_t twa324[107*2*4];
+static int16_t twb324[107*2*4];
+
+void dft324(int16_t *x,int16_t *y,unsigned char scale_flag)  // 108 x 3
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa324[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb324[0];
+  simd_q15_t x2128[324];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[324];//=&ytmp128array3[0];
+
+
+
+  for (i=0,j=0; i<108; i++,j+=3) {
+    x2128[i]    = x128[j];
+    x2128[i+108] = x128[j+1];
+    x2128[i+216] = x128[j+2];
+  }
+
+  dft108((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft108((int16_t *)(x2128+108),(int16_t *)(ytmp128+108),1);
+  dft108((int16_t *)(x2128+216),(int16_t *)(ytmp128+216),1);
+
+  bfly3_tw1(ytmp128,ytmp128+108,ytmp128+216,y128,y128+108,y128+216);
+
+  for (i=1,j=0; i<108; i++,j++) {
+    bfly3(ytmp128+i,
+          ytmp128+108+i,
+          ytmp128+216+i,
+          y128+i,
+          y128+108+i,
+          y128+216+i,
+          twa128+j,
+          twb128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[14]);
+
+    for (i=0; i<324; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+};
+
+static int16_t twa360[119*2*4];
+static int16_t twb360[119*2*4];
+
+void dft360(int16_t *x,int16_t *y,unsigned char scale_flag)  // 120 x 3
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa360[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb360[0];
+  simd_q15_t x2128[360];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[360];//=&ytmp128array3[0];
+
+
+
+  for (i=0,j=0; i<120; i++,j+=3) {
+    x2128[i]    = x128[j];
+    x2128[i+120] = x128[j+1];
+    x2128[i+240] = x128[j+2];
+  }
+
+  dft120((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft120((int16_t *)(x2128+120),(int16_t *)(ytmp128+120),1);
+  dft120((int16_t *)(x2128+240),(int16_t *)(ytmp128+240),1);
+
+  bfly3_tw1(ytmp128,ytmp128+120,ytmp128+240,y128,y128+120,y128+240);
+
+  for (i=1,j=0; i<120; i++,j++) {
+    bfly3(ytmp128+i,
+          ytmp128+120+i,
+          ytmp128+240+i,
+          y128+i,
+          y128+120+i,
+          y128+240+i,
+          twa128+j,
+          twb128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[14]);
+
+    for (i=0; i<360; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+};
+
+static int16_t twa384[95*2*4];
+static int16_t twb384[95*2*4];
+static int16_t twc384[95*2*4];
+
+void dft384(int16_t *x,int16_t *y,unsigned char scale_flag)  // 96 x 4
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa384[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb384[0];
+  simd_q15_t *twc128=(simd_q15_t *)&twc384[0];
+  simd_q15_t x2128[384];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[384];//=&ytmp128array2[0];
+
+
+
+  for (i=0,j=0; i<96; i++,j+=4) {
+    x2128[i]    = x128[j];
+    x2128[i+96] = x128[j+1];
+    x2128[i+192] = x128[j+2];
+    x2128[i+288] = x128[j+3];
+  }
+
+  dft96((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft96((int16_t *)(x2128+96),(int16_t *)(ytmp128+96),1);
+  dft96((int16_t *)(x2128+192),(int16_t *)(ytmp128+192),1);
+  dft96((int16_t *)(x2128+288),(int16_t *)(ytmp128+288),1);
+
+  bfly4_tw1(ytmp128,ytmp128+96,ytmp128+192,ytmp128+288,y128,y128+96,y128+192,y128+288);
+
+  for (i=1,j=0; i<96; i++,j++) {
+    bfly4(ytmp128+i,
+          ytmp128+96+i,
+          ytmp128+192+i,
+          ytmp128+288+i,
+          y128+i,
+          y128+96+i,
+          y128+192+i,
+          y128+288+i,
+          twa128+j,
+          twb128+j,
+          twc128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(16384);//dft_norm_table[13]);
+
+    for (i=0; i<384; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+};
+
+static int16_t twa432[107*2*4];
+static int16_t twb432[107*2*4];
+static int16_t twc432[107*2*4];
+
+void dft432(int16_t *x,int16_t *y,unsigned char scale_flag)  // 108 x 4
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa432[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb432[0];
+  simd_q15_t *twc128=(simd_q15_t *)&twc432[0];
+  simd_q15_t x2128[432];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[432];//=&ytmp128array2[0];
+
+
+  for (i=0,j=0; i<108; i++,j+=4) {
+    x2128[i]    = x128[j];
+    x2128[i+108] = x128[j+1];
+    x2128[i+216] = x128[j+2];
+    x2128[i+324] = x128[j+3];
+  }
+
+  dft108((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft108((int16_t *)(x2128+108),(int16_t *)(ytmp128+108),1);
+  dft108((int16_t *)(x2128+216),(int16_t *)(ytmp128+216),1);
+  dft108((int16_t *)(x2128+324),(int16_t *)(ytmp128+324),1);
+
+  bfly4_tw1(ytmp128,ytmp128+108,ytmp128+216,ytmp128+324,y128,y128+108,y128+216,y128+324);
+
+  for (i=1,j=0; i<108; i++,j++) {
+    bfly4(ytmp128+i,
+          ytmp128+108+i,
+          ytmp128+216+i,
+          ytmp128+324+i,
+          y128+i,
+          y128+108+i,
+          y128+216+i,
+          y128+324+i,
+          twa128+j,
+          twb128+j,
+          twc128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(16384);//dft_norm_table[13]);
+
+    for (i=0; i<432; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+};
+static int16_t twa480[119*2*4];
+static int16_t twb480[119*2*4];
+static int16_t twc480[119*2*4];
+
+void dft480(int16_t *x,int16_t *y,unsigned char scale_flag)  // 120 x 4
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa480[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb480[0];
+  simd_q15_t *twc128=(simd_q15_t *)&twc480[0];
+  simd_q15_t x2128[480];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[480];//=&ytmp128array2[0];
+
+
+
+  for (i=0,j=0; i<120; i++,j+=4) {
+    x2128[i]    = x128[j];
+    x2128[i+120] = x128[j+1];
+    x2128[i+240] = x128[j+2];
+    x2128[i+360] = x128[j+3];
+  }
+
+  dft120((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft120((int16_t *)(x2128+120),(int16_t *)(ytmp128+120),1);
+  dft120((int16_t *)(x2128+240),(int16_t *)(ytmp128+240),1);
+  dft120((int16_t *)(x2128+360),(int16_t *)(ytmp128+360),1);
+
+  bfly4_tw1(ytmp128,ytmp128+120,ytmp128+240,ytmp128+360,y128,y128+120,y128+240,y128+360);
+
+  for (i=1,j=0; i<120; i++,j++) {
+    bfly4(ytmp128+i,
+          ytmp128+120+i,
+          ytmp128+240+i,
+          ytmp128+360+i,
+          y128+i,
+          y128+120+i,
+          y128+240+i,
+          y128+360+i,
+          twa128+j,
+          twb128+j,
+          twc128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(16384);//dft_norm_table[13]);
+
+    for (i=0; i<480; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+};
+
+
+static int16_t twa540[179*2*4];
+static int16_t twb540[179*2*4];
+
+void dft540(int16_t *x,int16_t *y,unsigned char scale_flag)  // 180 x 3
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa540[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb540[0];
+  simd_q15_t x2128[540];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[540];//=&ytmp128array3[0];
+
+
+
+  for (i=0,j=0; i<180; i++,j+=3) {
+    x2128[i]    = x128[j];
+    x2128[i+180] = x128[j+1];
+    x2128[i+360] = x128[j+2];
+  }
+
+  dft180((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft180((int16_t *)(x2128+180),(int16_t *)(ytmp128+180),1);
+  dft180((int16_t *)(x2128+360),(int16_t *)(ytmp128+360),1);
+
+  bfly3_tw1(ytmp128,ytmp128+180,ytmp128+360,y128,y128+180,y128+360);
+
+  for (i=1,j=0; i<180; i++,j++) {
+    bfly3(ytmp128+i,
+          ytmp128+180+i,
+          ytmp128+360+i,
+          y128+i,
+          y128+180+i,
+          y128+360+i,
+          twa128+j,
+          twb128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[14]);
+
+    for (i=0; i<540; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+};
+
+static int16_t twa576[191*2*4];
+static int16_t twb576[191*2*4];
+
+void dft576(int16_t *x,int16_t *y,unsigned char scale_flag)  // 192 x 3
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa576[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb576[0];
+  simd_q15_t x2128[576];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[576];//=&ytmp128array3[0];
+
+
+
+  for (i=0,j=0; i<192; i++,j+=3) {
+    x2128[i]    = x128[j];
+    x2128[i+192] = x128[j+1];
+    x2128[i+384] = x128[j+2];
+  }
+
+
+  dft192((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft192((int16_t *)(x2128+192),(int16_t *)(ytmp128+192),1);
+  dft192((int16_t *)(x2128+384),(int16_t *)(ytmp128+384),1);
+
+  bfly3_tw1(ytmp128,ytmp128+192,ytmp128+384,y128,y128+192,y128+384);
+
+  for (i=1,j=0; i<192; i++,j++) {
+    bfly3(ytmp128+i,
+          ytmp128+192+i,
+          ytmp128+384+i,
+          y128+i,
+          y128+192+i,
+          y128+384+i,
+          twa128+j,
+          twb128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[14]);
+
+    for (i=0; i<576; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+};
+
+
+static int16_t twa600[299*2*4];
+
+void dft600(int16_t *x,int16_t *y,unsigned char scale_flag)  // 300 x 2
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *tw128=(simd_q15_t *)&twa600[0];
+  simd_q15_t x2128[600];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[600];//=&ytmp128array2[0];
+
+
+  for (i=0,j=0; i<300; i++,j+=2) {
+    x2128[i]    = x128[j];
+    x2128[i+300] = x128[j+1];
+  }
+
+  dft300((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft300((int16_t *)(x2128+300),(int16_t *)(ytmp128+300),1);
+
+
+  bfly2_tw1(ytmp128,ytmp128+300,y128,y128+300);
+
+  for (i=1,j=0; i<300; i++,j++) {
+    bfly2(ytmp128+i,
+          ytmp128+300+i,
+          y128+i,
+          y128+300+i,
+          tw128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(ONE_OVER_SQRT2_Q15);
+
+    for (i=0; i<600; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+};
+
+
+static int16_t twa648[215*2*4];
+static int16_t twb648[215*2*4];
+
+void dft648(int16_t *x,int16_t *y,unsigned char scale_flag)  // 216 x 3
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa648[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb648[0];
+  simd_q15_t x2128[648];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[648];//=&ytmp128array3[0];
+
+
+
+  for (i=0,j=0; i<216; i++,j+=3) {
+    x2128[i]    = x128[j];
+    x2128[i+216] = x128[j+1];
+    x2128[i+432] = x128[j+2];
+  }
+
+  dft216((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft216((int16_t *)(x2128+216),(int16_t *)(ytmp128+216),1);
+  dft216((int16_t *)(x2128+432),(int16_t *)(ytmp128+432),1);
+
+  bfly3_tw1(ytmp128,ytmp128+216,ytmp128+432,y128,y128+216,y128+432);
+
+  for (i=1,j=0; i<216; i++,j++) {
+    bfly3(ytmp128+i,
+          ytmp128+216+i,
+          ytmp128+432+i,
+          y128+i,
+          y128+216+i,
+          y128+432+i,
+          twa128+j,
+          twb128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[14]);
+
+    for (i=0; i<648; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+
+  
+  
+
+};
+
+
+static int16_t twa720[179*2*4];
+static int16_t twb720[179*2*4];
+static int16_t twc720[179*2*4];
+
+
+void dft720(int16_t *x,int16_t *y,unsigned char scale_flag)  // 180 x 4
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa720[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb720[0];
+  simd_q15_t *twc128=(simd_q15_t *)&twc720[0];
+  simd_q15_t x2128[720];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[720];//=&ytmp128array2[0];
+
+
+
+  for (i=0,j=0; i<180; i++,j+=4) {
+    x2128[i]    = x128[j];
+    x2128[i+180] = x128[j+1];
+    x2128[i+360] = x128[j+2];
+    x2128[i+540] = x128[j+3];
+  }
+
+  dft180((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft180((int16_t *)(x2128+180),(int16_t *)(ytmp128+180),1);
+  dft180((int16_t *)(x2128+360),(int16_t *)(ytmp128+360),1);
+  dft180((int16_t *)(x2128+540),(int16_t *)(ytmp128+540),1);
+
+  bfly4_tw1(ytmp128,ytmp128+180,ytmp128+360,ytmp128+540,y128,y128+180,y128+360,y128+540);
+
+  for (i=1,j=0; i<180; i++,j++) {
+    bfly4(ytmp128+i,
+          ytmp128+180+i,
+          ytmp128+360+i,
+          ytmp128+540+i,
+          y128+i,
+          y128+180+i,
+          y128+360+i,
+          y128+540+i,
+          twa128+j,
+          twb128+j,
+          twc128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(16384);//dft_norm_table[13]);
+
+    for (i=0; i<720; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+
+  
+  
+
+};
+
+static int16_t twa768p[191*2*4];
+static int16_t twb768p[191*2*4];
+static int16_t twc768p[191*2*4];
+
+void dft768p(int16_t *x,int16_t *y,unsigned char scale_flag) { // 192x 4;
+
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa768p[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb768p[0];
+  simd_q15_t *twc128=(simd_q15_t *)&twc768p[0];
+  simd_q15_t x2128[768];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[768];//=&ytmp128array2[0];
+
+
+
+  for (i=0,j=0; i<192; i++,j+=4) {
+    x2128[i]     = x128[j];
+    x2128[i+192] = x128[j+1];
+    x2128[i+384] = x128[j+2];
+    x2128[i+576] = x128[j+3];
+  }
+
+  dft192((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft192((int16_t *)(x2128+192),(int16_t *)(ytmp128+192),1);
+  dft192((int16_t *)(x2128+384),(int16_t *)(ytmp128+384),1);
+  dft192((int16_t *)(x2128+576),(int16_t *)(ytmp128+576),1);
+
+  bfly4_tw1(ytmp128,ytmp128+192,ytmp128+384,ytmp128+576,y128,y128+192,y128+384,y128+576);
+
+  for (i=1,j=0; i<192; i++,j++) {
+    bfly4(ytmp128+i,
+          ytmp128+192+i,
+          ytmp128+384+i,
+          ytmp128+576+i,
+          y128+i,
+          y128+192+i,
+          y128+384+i,
+          y128+576+i,
+          twa128+j,
+          twb128+j,
+          twc128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(16384);//dft_norm_table[13]);
+
+    for (i=0; i<768; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+
+  
+  
+
+
+}
+
+
+static int16_t twa384i[256];
+static int16_t twb384i[256];
+// 128 x 3
+void idft384(int16_t *input, int16_t *output, unsigned char scale)
+{
+  int i,i2,j;
+  uint32_t tmp[3][128]__attribute__((aligned(32)));
+  uint32_t tmpo[3][128] __attribute__((aligned(32)));
+  simd_q15_t *y128p=(simd_q15_t*)output;
+  simd_q15_t ONE_OVER_SQRT3_Q15_128 = set1_int16(ONE_OVER_SQRT3_Q15);
+
+  for (i=0,j=0; i<128; i++) {
+    tmp[0][i] = ((uint32_t *)input)[j++];
+    tmp[1][i] = ((uint32_t *)input)[j++];
+    tmp[2][i] = ((uint32_t *)input)[j++];
+  }
+
+  idft128((int16_t*)(tmp[0]),(int16_t*)(tmpo[0]),1);
+  idft128((int16_t*)(tmp[1]),(int16_t*)(tmpo[1]),1);
+  idft128((int16_t*)(tmp[2]),(int16_t*)(tmpo[2]),1);
+
+  for (i=0,i2=0; i<256; i+=8,i2+=4)  {
+    ibfly3((simd_q15_t*)(&tmpo[0][i2]),(simd_q15_t*)(&tmpo[1][i2]),(simd_q15_t*)(&tmpo[2][i2]),
+          (simd_q15_t*)(output+i),(simd_q15_t*)(output+256+i),(simd_q15_t*)(output+512+i),
+          (simd_q15_t*)(twa384+i),(simd_q15_t*)(twb384+i));
+  }
+
+
+  if (scale==1) {
+    for (i=0; i<6; i++) {
+      y128p[0]  = mulhi_int16(y128p[0],ONE_OVER_SQRT3_Q15_128);
+      y128p[1]  = mulhi_int16(y128p[1],ONE_OVER_SQRT3_Q15_128);
+      y128p[2]  = mulhi_int16(y128p[2],ONE_OVER_SQRT3_Q15_128);
+      y128p[3]  = mulhi_int16(y128p[3],ONE_OVER_SQRT3_Q15_128);
+      y128p[4]  = mulhi_int16(y128p[4],ONE_OVER_SQRT3_Q15_128);
+      y128p[5]  = mulhi_int16(y128p[5],ONE_OVER_SQRT3_Q15_128);
+      y128p[6]  = mulhi_int16(y128p[6],ONE_OVER_SQRT3_Q15_128);
+      y128p[7]  = mulhi_int16(y128p[7],ONE_OVER_SQRT3_Q15_128);
+      y128p[8]  = mulhi_int16(y128p[8],ONE_OVER_SQRT3_Q15_128);
+      y128p[9]  = mulhi_int16(y128p[9],ONE_OVER_SQRT3_Q15_128);
+      y128p[10] = mulhi_int16(y128p[10],ONE_OVER_SQRT3_Q15_128);
+      y128p[11] = mulhi_int16(y128p[11],ONE_OVER_SQRT3_Q15_128);
+      y128p[12] = mulhi_int16(y128p[12],ONE_OVER_SQRT3_Q15_128);
+      y128p[13] = mulhi_int16(y128p[13],ONE_OVER_SQRT3_Q15_128);
+      y128p[14] = mulhi_int16(y128p[14],ONE_OVER_SQRT3_Q15_128);
+      y128p[15] = mulhi_int16(y128p[15],ONE_OVER_SQRT3_Q15_128);
+      y128p+=16;
+    }
+  }
+}
+static int16_t twa864[287*2*4];
+static int16_t twb864[287*2*4];
+
+void dft864(int16_t *x,int16_t *y,unsigned char scale_flag)  // 288 x 3
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa864[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb864[0];
+  simd_q15_t x2128[864];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[864];//=&ytmp128array3[0];
+
+
+
+  for (i=0,j=0; i<288; i++,j+=3) {
+    x2128[i]    = x128[j];
+    x2128[i+288] = x128[j+1];
+    x2128[i+576] = x128[j+2];
+  }
+
+  dft288((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft288((int16_t *)(x2128+288),(int16_t *)(ytmp128+288),1);
+  dft288((int16_t *)(x2128+576),(int16_t *)(ytmp128+576),1);
+
+  bfly3_tw1(ytmp128,ytmp128+288,ytmp128+576,y128,y128+288,y128+576);
+
+  for (i=1,j=0; i<288; i++,j++) {
+    bfly3(ytmp128+i,
+          ytmp128+288+i,
+          ytmp128+576+i,
+          y128+i,
+          y128+288+i,
+          y128+576+i,
+          twa128+j,
+          twb128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[14]);
+
+    for (i=0; i<864; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+
+  
+  
+
+};
+
+static int16_t twa900[299*2*4];
+static int16_t twb900[299*2*4];
+
+void dft900(int16_t *x,int16_t *y,unsigned char scale_flag)  // 300 x 3
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa900[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb900[0];
+  simd_q15_t x2128[900];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[900];//=&ytmp128array3[0];
+
+
+
+  for (i=0,j=0; i<300; i++,j+=3) {
+    x2128[i]    = x128[j];
+    x2128[i+300] = x128[j+1];
+    x2128[i+600] = x128[j+2];
+  }
+
+  dft300((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft300((int16_t *)(x2128+300),(int16_t *)(ytmp128+300),1);
+  dft300((int16_t *)(x2128+600),(int16_t *)(ytmp128+600),1);
+
+  bfly3_tw1(ytmp128,ytmp128+300,ytmp128+600,y128,y128+300,y128+600);
+
+  for (i=1,j=0; i<300; i++,j++) {
+    bfly3(ytmp128+i,
+          ytmp128+300+i,
+          ytmp128+600+i,
+          y128+i,
+          y128+300+i,
+          y128+600+i,
+          twa128+j,
+          twb128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[14]);
+
+    for (i=0; i<900; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+
+  
+  
+
+};
+
+
+static int16_t twa960[239*2*4];
+static int16_t twb960[239*2*4];
+static int16_t twc960[239*2*4];
+
+
+void dft960(int16_t *x,int16_t *y,unsigned char scale_flag)  // 240 x 4
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa960[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb960[0];
+  simd_q15_t *twc128=(simd_q15_t *)&twc960[0];
+  simd_q15_t x2128[960];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[960];//=&ytmp128array2[0];
+
+
+
+  for (i=0,j=0; i<240; i++,j+=4) {
+    x2128[i]    = x128[j];
+    x2128[i+240] = x128[j+1];
+    x2128[i+480] = x128[j+2];
+    x2128[i+720] = x128[j+3];
+  }
+
+  dft240((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft240((int16_t *)(x2128+240),(int16_t *)(ytmp128+240),1);
+  dft240((int16_t *)(x2128+480),(int16_t *)(ytmp128+480),1);
+  dft240((int16_t *)(x2128+720),(int16_t *)(ytmp128+720),1);
+
+  bfly4_tw1(ytmp128,ytmp128+240,ytmp128+480,ytmp128+720,y128,y128+240,y128+480,y128+720);
+
+  for (i=1,j=0; i<240; i++,j++) {
+    bfly4(ytmp128+i,
+          ytmp128+240+i,
+          ytmp128+480+i,
+          ytmp128+720+i,
+          y128+i,
+          y128+240+i,
+          y128+480+i,
+          y128+720+i,
+          twa128+j,
+          twb128+j,
+          twc128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(16384);//dft_norm_table[13]);
+
+    for (i=0; i<960; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+
+  
+  
+
+};
+
+
+static int16_t twa972[323*2*4];
+static int16_t twb972[323*2*4];
+
+void dft972(int16_t *x,int16_t *y,unsigned char scale_flag)  // 324 x 3
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa972[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb972[0];
+  simd_q15_t x2128[972];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[972];//=&ytmp128array3[0];
+
+
+
+  for (i=0,j=0; i<324; i++,j+=3) {
+    x2128[i]    = x128[j];
+    x2128[i+324] = x128[j+1];
+    x2128[i+648] = x128[j+2];
+  }
+
+  dft324((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft324((int16_t *)(x2128+324),(int16_t *)(ytmp128+324),1);
+  dft324((int16_t *)(x2128+648),(int16_t *)(ytmp128+648),1);
+
+  bfly3_tw1(ytmp128,ytmp128+324,ytmp128+648,y128,y128+324,y128+648);
+
+  for (i=1,j=0; i<324; i++,j++) {
+    bfly3(ytmp128+i,
+          ytmp128+324+i,
+          ytmp128+648+i,
+          y128+i,
+          y128+324+i,
+          y128+648+i,
+          twa128+j,
+          twb128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[14]);
+
+    for (i=0; i<972; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+
+  
+  
+
+};
+
+static int16_t twa1080[359*2*4];
+static int16_t twb1080[359*2*4];
+
+void dft1080(int16_t *x,int16_t *y,unsigned char scale_flag)  // 360 x 3
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa1080[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb1080[0];
+  simd_q15_t x2128[1080];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[1080];//=&ytmp128array3[0];
+
+
+
+  for (i=0,j=0; i<360; i++,j+=3) {
+    x2128[i]    = x128[j];
+    x2128[i+360] = x128[j+1];
+    x2128[i+720] = x128[j+2];
+  }
+
+  dft360((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft360((int16_t *)(x2128+360),(int16_t *)(ytmp128+360),1);
+  dft360((int16_t *)(x2128+720),(int16_t *)(ytmp128+720),1);
+
+  bfly3_tw1(ytmp128,ytmp128+360,ytmp128+720,y128,y128+360,y128+720);
+
+  for (i=1,j=0; i<360; i++,j++) {
+    bfly3(ytmp128+i,
+          ytmp128+360+i,
+          ytmp128+720+i,
+          y128+i,
+          y128+360+i,
+          y128+720+i,
+          twa128+j,
+          twb128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[14]);
+
+    for (i=0; i<1080; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+
+  
+  
+
+};
+
+static int16_t twa1152[287*2*4];
+static int16_t twb1152[287*2*4];
+static int16_t twc1152[287*2*4];
+
+void dft1152(int16_t *x,int16_t *y,unsigned char scale_flag)  // 288 x 4
+{
+
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa1152[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb1152[0];
+  simd_q15_t *twc128=(simd_q15_t *)&twc1152[0];
+  simd_q15_t x2128[1152];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[1152];//=&ytmp128array2[0];
+
+
+
+  for (i=0,j=0; i<288; i++,j+=4) {
+    x2128[i]    = x128[j];
+    x2128[i+288] = x128[j+1];
+    x2128[i+576] = x128[j+2];
+    x2128[i+864] = x128[j+3];
+  }
+
+  dft288((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft288((int16_t *)(x2128+288),(int16_t *)(ytmp128+288),1);
+  dft288((int16_t *)(x2128+576),(int16_t *)(ytmp128+576),1);
+  dft288((int16_t *)(x2128+864),(int16_t *)(ytmp128+864),1);
+
+  bfly4_tw1(ytmp128,ytmp128+288,ytmp128+576,ytmp128+864,y128,y128+288,y128+576,y128+864);
+
+  for (i=1,j=0; i<288; i++,j++) {
+    bfly4(ytmp128+i,
+          ytmp128+288+i,
+          ytmp128+576+i,
+          ytmp128+864+i,
+          y128+i,
+          y128+288+i,
+          y128+576+i,
+          y128+864+i,
+          twa128+j,
+          twb128+j,
+          twc128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(16384);//dft_norm_table[13]);
+
+    for (i=0; i<1152; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+
+  
+  
+};
+
+int16_t twa1200[4784];
+int16_t twb1200[4784];
+int16_t twc1200[4784];
+
+void dft1200(int16_t *x,int16_t *y,unsigned char scale_flag)
+{
+
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa1200[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb1200[0];
+  simd_q15_t *twc128=(simd_q15_t *)&twc1200[0];
+  simd_q15_t x2128[1200];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[1200];//=&ytmp128array2[0];
+
+
+
+  for (i=0,j=0; i<300; i++,j+=4) {
+    x2128[i]    = x128[j];
+    x2128[i+300] = x128[j+1];
+    x2128[i+600] = x128[j+2];
+    x2128[i+900] = x128[j+3];
+  }
+
+  dft300((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft300((int16_t *)(x2128+300),(int16_t *)(ytmp128+300),1);
+  dft300((int16_t *)(x2128+600),(int16_t *)(ytmp128+600),1);
+  dft300((int16_t *)(x2128+900),(int16_t *)(ytmp128+900),1);
+
+  bfly4_tw1(ytmp128,ytmp128+300,ytmp128+600,ytmp128+900,y128,y128+300,y128+600,y128+900);
+
+  for (i=1,j=0; i<300; i++,j++) {
+    bfly4(ytmp128+i,
+          ytmp128+300+i,
+          ytmp128+600+i,
+          ytmp128+900+i,
+          y128+i,
+          y128+300+i,
+          y128+600+i,
+          y128+900+i,
+          twa128+j,
+          twb128+j,
+          twc128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(16384);//dft_norm_table[13]);
+    for (i=0; i<1200; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+
+  
+  
+
+}
+
+
+static int16_t twa1296[431*2*4];
+static int16_t twb1296[431*2*4];
+
+void dft1296(int16_t *x,int16_t *y,unsigned char scale_flag) //432 * 3
+{
+
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa1296[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb1296[0];
+  simd_q15_t x2128[1296];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[1296];//=&ytmp128array3[0];
+
+
+
+  for (i=0,j=0; i<432; i++,j+=3) {
+    x2128[i]    = x128[j];
+    x2128[i+432] = x128[j+1];
+    x2128[i+864] = x128[j+2];
+  }
+
+  dft432((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft432((int16_t *)(x2128+432),(int16_t *)(ytmp128+432),1);
+  dft432((int16_t *)(x2128+864),(int16_t *)(ytmp128+864),1);
+
+  bfly3_tw1(ytmp128,ytmp128+432,ytmp128+864,y128,y128+432,y128+864);
+
+  for (i=1,j=0; i<432; i++,j++) {
+    bfly3(ytmp128+i,
+          ytmp128+432+i,
+          ytmp128+864+i,
+          y128+i,
+          y128+432+i,
+          y128+864+i,
+          twa128+j,
+          twb128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[14]);
+
+    for (i=0; i<1296; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+
+  
+  
+
+};
+
+
+static int16_t twa1440[479*2*4];
+static int16_t twb1440[479*2*4];
+
+void dft1440(int16_t *x,int16_t *y,unsigned char scale_flag)  // 480 x 3
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa1440[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb1440[0];
+  simd_q15_t x2128[1440];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[1440];//=&ytmp128array3[0];
+
+
+
+  for (i=0,j=0; i<480; i++,j+=3) {
+    x2128[i]    = x128[j];
+    x2128[i+480] = x128[j+1];
+    x2128[i+960] = x128[j+2];
+  }
+
+  dft480((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft480((int16_t *)(x2128+480),(int16_t *)(ytmp128+480),1);
+  dft480((int16_t *)(x2128+960),(int16_t *)(ytmp128+960),1);
+
+  bfly3_tw1(ytmp128,ytmp128+480,ytmp128+960,y128,y128+480,y128+960);
+
+  for (i=1,j=0; i<480; i++,j++) {
+    bfly3(ytmp128+i,
+          ytmp128+480+i,
+          ytmp128+960+i,
+          y128+i,
+          y128+480+i,
+          y128+960+i,
+          twa128+j,
+          twb128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[14]);
+
+    for (i=0; i<1440; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+
+  
+  
+
+};
+
+static int16_t twa1500[2392]__attribute__((aligned(32)));
+static int16_t twb1500[2392]__attribute__((aligned(32)));
+static int16_t twc1500[2392]__attribute__((aligned(32)));
+static int16_t twd1500[2392]__attribute__((aligned(32)));
+
+void dft1500(int16_t *x,int16_t *y,unsigned char scale_flag)
+{
+
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa1500[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb1500[0];
+  simd_q15_t *twc128=(simd_q15_t *)&twc1500[0];
+  simd_q15_t *twd128=(simd_q15_t *)&twd1500[0];
+  simd_q15_t x2128[1500];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[1500];//=&ytmp128array2[0];
+
+
+
+  for (i=0,j=0; i<300; i++,j+=5) {
+    x2128[i]    = x128[j];
+    x2128[i+300] = x128[j+1];
+    x2128[i+600] = x128[j+2];
+    x2128[i+900] = x128[j+3];
+    x2128[i+1200] = x128[j+4];
+  }
+
+  dft300((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft300((int16_t *)(x2128+300),(int16_t *)(ytmp128+300),1);
+  dft300((int16_t *)(x2128+600),(int16_t *)(ytmp128+600),1);
+  dft300((int16_t *)(x2128+900),(int16_t *)(ytmp128+900),1);
+  dft300((int16_t *)(x2128+1200),(int16_t *)(ytmp128+1200),1);
+
+  bfly5_tw1(ytmp128,ytmp128+300,ytmp128+600,ytmp128+900,ytmp128+1200,y128,y128+300,y128+600,y128+900,y128+1200);
+
+  for (i=1,j=0; i<300; i++,j++) {
+    bfly5(ytmp128+i,
+          ytmp128+300+i,
+          ytmp128+600+i,
+          ytmp128+900+i,
+          ytmp128+1200+i,
+          y128+i,
+          y128+300+i,
+          y128+600+i,
+          y128+900+i,
+          y128+1200+i,
+          twa128+j,
+          twb128+j,
+          twc128+j,
+          twd128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[15]);
+
+    for (i=0; i<1500; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+
+  
+  
+
+}
+
+static int16_t twa1620[539*2*4];
+static int16_t twb1620[539*2*4];
+
+void dft1620(int16_t *x,int16_t *y,unsigned char scale_flag)  // 540 x 3
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa1620[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb1620[0];
+  simd_q15_t x2128[1620];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[1620];//=&ytmp128array3[0];
+
+
+
+  for (i=0,j=0; i<540; i++,j+=3) {
+    x2128[i]    = x128[j];
+    x2128[i+540] = x128[j+1];
+    x2128[i+1080] = x128[j+2];
+  }
+
+  dft540((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft540((int16_t *)(x2128+540),(int16_t *)(ytmp128+540),1);
+  dft540((int16_t *)(x2128+1080),(int16_t *)(ytmp128+1080),1);
+
+  bfly3_tw1(ytmp128,ytmp128+540,ytmp128+1080,y128,y128+540,y128+1080);
+
+  for (i=1,j=0; i<540; i++,j++) {
+    bfly3(ytmp128+i,
+          ytmp128+540+i,
+          ytmp128+1080+i,
+          y128+i,
+          y128+540+i,
+          y128+1080+i,
+          twa128+j,
+          twb128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[14]);
+
+    for (i=0; i<1620; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+
+  
+  
+
+};
+
+static int16_t twa1728[575*2*4];
+static int16_t twb1728[575*2*4];
+
+void dft1728(int16_t *x,int16_t *y,unsigned char scale_flag)  // 576 x 3
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa1728[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb1728[0];
+  simd_q15_t x2128[1728];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[1728];//=&ytmp128array3[0];
+
+
+
+  for (i=0,j=0; i<576; i++,j+=3) {
+    x2128[i]    = x128[j];
+    x2128[i+576] = x128[j+1];
+    x2128[i+1152] = x128[j+2];
+  }
+
+  dft576((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft576((int16_t *)(x2128+576),(int16_t *)(ytmp128+576),1);
+  dft576((int16_t *)(x2128+1152),(int16_t *)(ytmp128+1152),1);
+
+  bfly3_tw1(ytmp128,ytmp128+576,ytmp128+1152,y128,y128+576,y128+1152);
+
+  for (i=1,j=0; i<576; i++,j++) {
+    bfly3(ytmp128+i,
+          ytmp128+576+i,
+          ytmp128+1152+i,
+          y128+i,
+          y128+576+i,
+          y128+1152+i,
+          twa128+j,
+          twb128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[14]);
+
+    for (i=0; i<1728; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+
+  
+  
+
+};
+
+static int16_t twa1800[599*2*4];
+static int16_t twb1800[599*2*4];
+
+void dft1800(int16_t *x,int16_t *y,unsigned char scale_flag)  // 600 x 3
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa1800[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb1800[0];
+  simd_q15_t x2128[1800];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[1800];//=&ytmp128array3[0];
+
+
+
+  for (i=0,j=0; i<600; i++,j+=3) {
+    x2128[i]    = x128[j];
+    x2128[i+600] = x128[j+1];
+    x2128[i+1200] = x128[j+2];
+  }
+
+  dft600((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft600((int16_t *)(x2128+600),(int16_t *)(ytmp128+600),1);
+  dft600((int16_t *)(x2128+1200),(int16_t *)(ytmp128+1200),1);
+
+  bfly3_tw1(ytmp128,ytmp128+600,ytmp128+1200,y128,y128+600,y128+1200);
+
+  for (i=1,j=0; i<600; i++,j++) {
+    bfly3(ytmp128+i,
+          ytmp128+600+i,
+          ytmp128+1200+i,
+          y128+i,
+          y128+600+i,
+          y128+1200+i,
+          twa128+j,
+          twb128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[14]);
+
+    for (i=0; i<1800; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+
+  
+  
+
+};
+
+static int16_t twa1920[479*2*4];
+static int16_t twb1920[479*2*4];
+static int16_t twc1920[479*2*4];
+
+void dft1920(int16_t *x,int16_t *y,unsigned char scale_flag)  // 480 x 4
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa1920[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb1920[0];
+  simd_q15_t *twc128=(simd_q15_t *)&twc1920[0];
+  simd_q15_t x2128[1920];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[1920];//=&ytmp128array2[0];
+
+
+
+  for (i=0,j=0; i<480; i++,j+=4) {
+    x2128[i]    = x128[j];
+    x2128[i+480] = x128[j+1];
+    x2128[i+960] = x128[j+2];
+    x2128[i+1440] = x128[j+3];
+  }
+
+  dft480((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft480((int16_t *)(x2128+480),(int16_t *)(ytmp128+480),1);
+  dft480((int16_t *)(x2128+960),(int16_t *)(ytmp128+960),1);
+  dft480((int16_t *)(x2128+1440),(int16_t *)(ytmp128+1440),1);
+
+  bfly4_tw1(ytmp128,ytmp128+480,ytmp128+960,ytmp128+1440,y128,y128+480,y128+960,y128+1440);
+
+  for (i=1,j=0; i<480; i++,j++) {
+    bfly4(ytmp128+i,
+          ytmp128+480+i,
+          ytmp128+960+i,
+          ytmp128+1440+i,
+          y128+i,
+          y128+480+i,
+          y128+960+i,
+          y128+1440+i,
+          twa128+j,
+          twb128+j,
+          twc128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[13]);
+    for (i=0; i<1920; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+
+  
+  
+
+};
+
+static int16_t twa1944[647*2*4];
+static int16_t twb1944[647*2*4];
+
+void dft1944(int16_t *x,int16_t *y,unsigned char scale_flag)  // 648 x 3
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa1944[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb1944[0];
+  simd_q15_t x2128[1944];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[1944];//=&ytmp128array3[0];
+
+
+
+  for (i=0,j=0; i<648; i++,j+=3) {
+    x2128[i]    = x128[j];
+    x2128[i+648] = x128[j+1];
+    x2128[i+1296] = x128[j+2];
+  }
+
+  dft648((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft648((int16_t *)(x2128+648),(int16_t *)(ytmp128+648),1);
+  dft648((int16_t *)(x2128+1296),(int16_t *)(ytmp128+1296),1);
+
+  bfly3_tw1(ytmp128,ytmp128+648,ytmp128+1296,y128,y128+648,y128+1296);
+
+  for (i=1,j=0; i<648; i++,j++) {
+    bfly3(ytmp128+i,
+          ytmp128+648+i,
+          ytmp128+1296+i,
+          y128+i,
+          y128+648+i,
+          y128+1296+i,
+          twa128+j,
+          twb128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[14]);
+
+    for (i=0; i<1944; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+
+  
+  
+
+};
+
+static int16_t twa2160[719*2*4];
+static int16_t twb2160[719*2*4];
+
+void dft2160(int16_t *x,int16_t *y,unsigned char scale_flag)  // 720 x 3
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa2160[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb2160[0];
+  simd_q15_t x2128[2160];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[2160];//=&ytmp128array3[0];
+
+
+
+  for (i=0,j=0; i<720; i++,j+=3) {
+    x2128[i]    = x128[j];
+    x2128[i+720] = x128[j+1];
+    x2128[i+1440] = x128[j+2];
+  }
+
+  dft720((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft720((int16_t *)(x2128+720),(int16_t *)(ytmp128+720),1);
+  dft720((int16_t *)(x2128+1440),(int16_t *)(ytmp128+1440),1);
+
+  bfly3_tw1(ytmp128,ytmp128+720,ytmp128+1440,y128,y128+720,y128+1440);
+
+  for (i=1,j=0; i<720; i++,j++) {
+    bfly3(ytmp128+i,
+          ytmp128+720+i,
+          ytmp128+1440+i,
+          y128+i,
+          y128+720+i,
+          y128+1440+i,
+          twa128+j,
+          twb128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[14]);
+
+    for (i=0; i<2160; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+
+  
+  
+
+};
+
+static int16_t twa2304[767*2*4];
+static int16_t twb2304[767*2*4];
+
+void dft2304(int16_t *x,int16_t *y,unsigned char scale_flag)  // 768 x 3
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa2304[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb2304[0];
+  simd_q15_t x2128[2304];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[2304];//=&ytmp128array3[0];
+
+
+
+  for (i=0,j=0; i<768; i++,j+=3) {
+    x2128[i]    = x128[j];
+    x2128[i+768] = x128[j+1];
+    x2128[i+1536] = x128[j+2];
+  }
+
+  dft768((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft768((int16_t *)(x2128+768),(int16_t *)(ytmp128+768),1);
+  dft768((int16_t *)(x2128+1536),(int16_t *)(ytmp128+1536),1);
+
+  bfly3_tw1(ytmp128,ytmp128+768,ytmp128+1536,y128,y128+768,y128+1536);
+
+  for (i=1,j=0; i<768; i++,j++) {
+    bfly3(ytmp128+i,
+          ytmp128+768+i,
+          ytmp128+1536+i,
+          y128+i,
+          y128+768+i,
+          y128+1536+i,
+          twa128+j,
+          twb128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[14]);
+
+    for (i=0; i<2304; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+
+  
+  
+
+};
+
+static int16_t twa2400[599*2*4];
+static int16_t twb2400[599*2*4];
+static int16_t twc2400[599*2*4];
+
+void dft2400(int16_t *x,int16_t *y,unsigned char scale_flag)  // 600 x 4
+{
+
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa2400[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb2400[0];
+  simd_q15_t *twc128=(simd_q15_t *)&twc2400[0];
+  simd_q15_t x2128[2400];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[2400];//=&ytmp128array2[0];
+
+
+
+  for (i=0,j=0; i<600; i++,j+=4) {
+    x2128[i]    = x128[j];
+    x2128[i+600] = x128[j+1];
+    x2128[i+1200] = x128[j+2];
+    x2128[i+1800] = x128[j+3];
+  }
+
+  dft600((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft600((int16_t *)(x2128+600),(int16_t *)(ytmp128+600),1);
+  dft600((int16_t *)(x2128+1200),(int16_t *)(ytmp128+1200),1);
+  dft600((int16_t *)(x2128+1800),(int16_t *)(ytmp128+1800),1);
+
+  bfly4_tw1(ytmp128,ytmp128+600,ytmp128+1200,ytmp128+1800,y128,y128+600,y128+1200,y128+1800);
+
+  for (i=1,j=0; i<600; i++,j++) {
+    bfly4(ytmp128+i,
+          ytmp128+600+i,
+          ytmp128+1200+i,
+          ytmp128+1800+i,
+          y128+i,
+          y128+600+i,
+          y128+1200+i,
+          y128+1800+i,
+          twa128+j,
+          twb128+j,
+          twc128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[13]);
+    for (i=0; i<2400; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+
+  
+  
+
+};
+
+static int16_t twa2592[863*2*4];
+static int16_t twb2592[863*2*4];
+
+void dft2592(int16_t *x,int16_t *y,unsigned char scale_flag)  // 864 x 3
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa2592[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb2592[0];
+  simd_q15_t x2128[2592];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[2592];//=&ytmp128array3[0];
+
+
+
+  for (i=0,j=0; i<864; i++,j+=3) {
+    x2128[i]    = x128[j];
+    x2128[i+864] = x128[j+1];
+    x2128[i+1728] = x128[j+2];
+  }
+
+  dft864((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft864((int16_t *)(x2128+864),(int16_t *)(ytmp128+864),1);
+  dft864((int16_t *)(x2128+1728),(int16_t *)(ytmp128+1728),1);
+
+  bfly3_tw1(ytmp128,ytmp128+864,ytmp128+1728,y128,y128+864,y128+1728);
+
+  for (i=1,j=0; i<864; i++,j++) {
+    bfly3(ytmp128+i,
+          ytmp128+864+i,
+          ytmp128+1728+i,
+          y128+i,
+          y128+864+i,
+          y128+1728+i,
+          twa128+j,
+          twb128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[14]);
+
+    for (i=0; i<2592; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+
+  
+  
+
+};
+
+static int16_t twa2700[899*2*4];
+static int16_t twb2700[899*2*4];
+
+void dft2700(int16_t *x,int16_t *y,unsigned char scale_flag)  // 900 x 3
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa2700[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb2700[0];
+  simd_q15_t x2128[2700];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[2700];//=&ytmp128array3[0];
+
+
+
+  for (i=0,j=0; i<900; i++,j+=3) {
+    x2128[i]    = x128[j];
+    x2128[i+900] = x128[j+1];
+    x2128[i+1800] = x128[j+2];
+  }
+
+  dft900((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft900((int16_t *)(x2128+900),(int16_t *)(ytmp128+900),1);
+  dft900((int16_t *)(x2128+1800),(int16_t *)(ytmp128+1800),1);
+
+  bfly3_tw1(ytmp128,ytmp128+900,ytmp128+1800,y128,y128+900,y128+1800);
+
+  for (i=1,j=0; i<900; i++,j++) {
+    bfly3(ytmp128+i,
+          ytmp128+900+i,
+          ytmp128+1800+i,
+          y128+i,
+          y128+900+i,
+          y128+1800+i,
+          twa128+j,
+          twb128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[14]);
+
+    for (i=0; i<2700; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+
+  
+  
+
+};
+
+static int16_t twa2880[959*2*4];
+static int16_t twb2880[959*2*4];
+
+void dft2880(int16_t *x,int16_t *y,unsigned char scale_flag)  // 960 x 3
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa2880[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb2880[0];
+  simd_q15_t x2128[2880];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[2880];//=&ytmp128array3[0];
+
+
+
+  for (i=0,j=0; i<960; i++,j+=3) {
+    x2128[i]    = x128[j];
+    x2128[i+960] = x128[j+1];
+    x2128[i+1920] = x128[j+2];
+  }
+
+  dft960((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft960((int16_t *)(x2128+960),(int16_t *)(ytmp128+960),1);
+  dft960((int16_t *)(x2128+1920),(int16_t *)(ytmp128+1920),1);
+
+  bfly3_tw1(ytmp128,ytmp128+960,ytmp128+1920,y128,y128+960,y128+1920);
+
+  for (i=1,j=0; i<960; i++,j++) {
+    bfly3(ytmp128+i,
+          ytmp128+960+i,
+          ytmp128+1920+i,
+          y128+i,
+          y128+960+i,
+          y128+1920+i,
+          twa128+j,
+          twb128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[14]);
+
+    for (i=0; i<2880; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+
+  
+  
+
+};
+
+static int16_t twa2916[971*2*4];
+static int16_t twb2916[971*2*4];
+
+void dft2916(int16_t *x,int16_t *y,unsigned char scale_flag)  // 972 x 3
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa2916[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb2916[0];
+  simd_q15_t x2128[2916];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[2916];//=&ytmp128array3[0];
+
+
+
+  for (i=0,j=0; i<972; i++,j+=3) {
+    x2128[i]    = x128[j];
+    x2128[i+972] = x128[j+1];
+    x2128[i+1944] = x128[j+2];
+  }
+
+  dft972((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft972((int16_t *)(x2128+972),(int16_t *)(ytmp128+972),1);
+  dft972((int16_t *)(x2128+1944),(int16_t *)(ytmp128+1944),1);
+
+  bfly3_tw1(ytmp128,ytmp128+972,ytmp128+1944,y128,y128+972,y128+1944);
+
+  for (i=1,j=0; i<972; i++,j++) {
+    bfly3(ytmp128+i,
+          ytmp128+972+i,
+          ytmp128+1944+i,
+          y128+i,
+          y128+972+i,
+          y128+1944+i,
+          twa128+j,
+          twb128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[14]);
+
+    for (i=0; i<2916; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+
+  
+  
+
+};
+
+static int16_t twa3000[599*8]__attribute__((aligned(32)));
+static int16_t twb3000[599*8]__attribute__((aligned(32)));
+static int16_t twc3000[599*8]__attribute__((aligned(32)));
+static int16_t twd3000[599*8]__attribute__((aligned(32)));
+
+void dft3000(int16_t *x,int16_t *y,unsigned char scale_flag) // 600 * 5
+{
+
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa3000[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb3000[0];
+  simd_q15_t *twc128=(simd_q15_t *)&twc3000[0];
+  simd_q15_t *twd128=(simd_q15_t *)&twd3000[0];
+  simd_q15_t x2128[3000];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[3000];//=&ytmp128array2[0];
+
+
+
+  for (i=0,j=0; i<600; i++,j+=5) {
+    x2128[i]    = x128[j];
+    x2128[i+600] = x128[j+1];
+    x2128[i+1200] = x128[j+2];
+    x2128[i+1800] = x128[j+3];
+    x2128[i+2400] = x128[j+4];
+  }
+
+  dft600((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft600((int16_t *)(x2128+600),(int16_t *)(ytmp128+600),1);
+  dft600((int16_t *)(x2128+1200),(int16_t *)(ytmp128+1200),1);
+  dft600((int16_t *)(x2128+1800),(int16_t *)(ytmp128+1800),1);
+  dft600((int16_t *)(x2128+2400),(int16_t *)(ytmp128+2400),1);
+
+  bfly5_tw1(ytmp128,ytmp128+600,ytmp128+1200,ytmp128+1800,ytmp128+2400,y128,y128+600,y128+1200,y128+1800,y128+2400);
+
+  for (i=1,j=0; i<600; i++,j++) {
+    bfly5(ytmp128+i,
+          ytmp128+600+i,
+          ytmp128+1200+i,
+          ytmp128+1800+i,
+          ytmp128+2400+i,
+          y128+i,
+          y128+600+i,
+          y128+1200+i,
+          y128+1800+i,
+          y128+2400+i,
+          twa128+j,
+          twb128+j,
+          twc128+j,
+          twd128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[15]);
+
+    for (i=0; i<3000; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+
+  
+  
+
+}
+
+static int16_t twa3240[1079*2*4];
+static int16_t twb3240[1079*2*4];
+
+void dft3240(int16_t *x,int16_t *y,unsigned char scale_flag)  // 1080 x 3
+{
+  int i,j;
+  simd_q15_t *x128=(simd_q15_t *)x;
+  simd_q15_t *y128=(simd_q15_t *)y;
+  simd_q15_t *twa128=(simd_q15_t *)&twa3240[0];
+  simd_q15_t *twb128=(simd_q15_t *)&twb3240[0];
+  simd_q15_t x2128[3240];// = (simd_q15_t *)&x2128array[0];
+  simd_q15_t ytmp128[3240];//=&ytmp128array3[0];
+
+
+
+  for (i=0,j=0; i<1080; i++,j+=3) {
+    x2128[i]    = x128[j];
+    x2128[i+1080] = x128[j+1];
+    x2128[i+2160] = x128[j+2];
+  }
+
+  dft1080((int16_t *)x2128,(int16_t *)ytmp128,1);
+  dft1080((int16_t *)(x2128+1080),(int16_t *)(ytmp128+1080),1);
+  dft1080((int16_t *)(x2128+2160),(int16_t *)(ytmp128+2160),1);
+
+  bfly3_tw1(ytmp128,ytmp128+1080,ytmp128+2160,y128,y128+1080,y128+2160);
+
+  for (i=1,j=0; i<1080; i++,j++) {
+    bfly3(ytmp128+i,
+          ytmp128+1080+i,
+          ytmp128+2160+i,
+          y128+i,
+          y128+1080+i,
+          y128+2160+i,
+          twa128+j,
+          twb128+j);
+  }
+
+  if (scale_flag==1) {
+    norm128 = set1_int16(dft_norm_table[14]);
+
+    for (i=0; i<3240; i++) {
+      y128[i] = mulhi_int16(y128[i],norm128);
+    }
+  }
+};
+
+void init_rad4(int N,int16_t *tw) {
+
+  int16_t *twa = tw;
+  int16_t *twb = twa+(N/2);
+  int16_t *twc = twb+(N/2);
+  int i;
+
+  for (i=0;i<(N/4);i++) {
+    *twa = (int16_t)round(32767.0*cos(2*M_PI*i/N)); twa++;
+    *twa = -(int16_t)round(32767.0*sin(2*M_PI*i/N)); twa++;
+    *twb = (int16_t)round(32767.0*cos(2*M_PI*2*i/N)); twb++;
+    *twb = -(int16_t)round(32767.0*sin(2*M_PI*2*i/N)); twb++;
+    *twc = (int16_t)round(32767.0*cos(2*M_PI*3*i/N)); twc++;
+    *twc = -(int16_t)round(32767.0*sin(2*M_PI*3*i/N)); twc++;
+  }
+}
+void init_rad4_rep(int N,int16_t *twa,int16_t *twb,int16_t *twc) {
+
+  int i,j;
+
+  for (i=1;i<(N/4);i++) {
+    twa[0] = (int16_t)round(32767.0*cos(2*M_PI*i/N));
+    twa[1] = -(int16_t)round(32767.0*sin(2*M_PI*i/N));
+    twb[0] = (int16_t)round(32767.0*cos(2*M_PI*2*i/N));
+    twb[1] = -(int16_t)round(32767.0*sin(2*M_PI*2*i/N));
+    twc[0] = (int16_t)round(32767.0*cos(2*M_PI*3*i/N));
+    twc[1] = -(int16_t)round(32767.0*sin(2*M_PI*3*i/N));
+    for (j=1;j<4;j++) {
+      ((int32_t*)twa)[j]=((int32_t*)twa)[0];
+      ((int32_t*)twb)[j]=((int32_t*)twb)[0];
+      ((int32_t*)twc)[j]=((int32_t*)twc)[0];
+    }
+    twa+=8;
+    twb+=8;
+    twc+=8;
+  }
+}
+
+void init_rad2(int N,int16_t *tw) {
+
+  int16_t *twa = tw;
+  int i;
+
+  for (i=0;i<(N>>1);i++) {
+    *twa = (int16_t)round(32767.0*cos(2*M_PI*i/N)); twa++;
+    *twa = -(int16_t)round(32767.0*sin(2*M_PI*i/N)); twa++;
+  }
+}
+
+void init_rad2_rep(int N,int16_t *twa) {
+
+  int i,j;
+
+  for (i=1;i<(N/2);i++) {
+    twa[0] = (int16_t)round(32767.0*cos(2*M_PI*i/N));
+    twa[1] = -(int16_t)round(32767.0*sin(2*M_PI*i/N));
+    for (j=1;j<4;j++) {
+      ((int32_t*)twa)[j]=((int32_t*)twa)[0];
+    }
+    twa+=8;
+  }
+}
+
+void init_rad3(int N,int16_t *twa,int16_t *twb) {
+
+  int i;
+
+  for (i=0;i<(N/3);i++) {
+    *twa = (int16_t)round(32767.0*cos(2*M_PI*i/N)); twa++;
+    *twa = -(int16_t)round(32767.0*sin(2*M_PI*i/N)); twa++;
+    *twb = (int16_t)round(32767.0*cos(2*M_PI*2*i/N)); twb++;
+    *twb = -(int16_t)round(32767.0*sin(2*M_PI*2*i/N)); twb++;
+  }
+}
+
+void init_rad3_rep(int N,int16_t *twa,int16_t *twb) {
+
+  int i,j;
+
+  for (i=1;i<(N/3);i++) {
+    twa[0] = (int16_t)round(32767.0*cos(2*M_PI*i/N));
+    twa[1] = -(int16_t)round(32767.0*sin(2*M_PI*i/N));
+    twb[0] = (int16_t)round(32767.0*cos(2*M_PI*2*i/N));
+    twb[1] = -(int16_t)round(32767.0*sin(2*M_PI*2*i/N));
+    for (j=1;j<4;j++) {
+      ((int32_t*)twa)[j]=((int32_t*)twa)[0];
+      ((int32_t*)twb)[j]=((int32_t*)twb)[0];
+    }
+    twa+=8;
+    twb+=8;
+  }
+}
+
+void init_rad5_rep(int N,int16_t *twa,int16_t *twb,int16_t *twc,int16_t *twd) {
+
+  int i,j;
+
+  for (i=1;i<(N/5);i++) {
+    twa[0] = (int16_t)round(32767.0*cos(2*M_PI*i/N));
+    twa[1] = -(int16_t)round(32767.0*sin(2*M_PI*i/N));
+    twb[0] = (int16_t)round(32767.0*cos(2*M_PI*2*i/N));
+    twb[1] = -(int16_t)round(32767.0*sin(2*M_PI*2*i/N));
+    twc[0] = (int16_t)round(32767.0*cos(2*M_PI*3*i/N));
+    twc[1] = -(int16_t)round(32767.0*sin(2*M_PI*3*i/N));
+    twd[0] = (int16_t)round(32767.0*cos(2*M_PI*4*i/N));
+    twd[1] = -(int16_t)round(32767.0*sin(2*M_PI*4*i/N));
+    for (j=1;j<4;j++) {
+      ((int32_t*)twa)[j]=((int32_t*)twa)[0];
+      ((int32_t*)twb)[j]=((int32_t*)twb)[0];
+      ((int32_t*)twc)[j]=((int32_t*)twc)[0];
+      ((int32_t*)twd)[j]=((int32_t*)twd)[0];
+    }
+    twa+=8;
+    twb+=8;
+    twc+=8;
+    twd+=8;
+  }
+}
+/*----------------------------------------------------------------*/
+/* dft library entry points:                                      */
+
+int dfts_autoinit(void)
+{
+  init_rad4(1024,tw1024);
+  init_rad2(2048,tw2048);
+  init_rad4(4096,tw4096);
+  init_rad2(8192,tw8192);
+  init_rad4(16384,tw16384);
+  init_rad2(32768,tw32768);
+
+  init_rad3(384,twa384i,twb384i);
+  init_rad3(768,twa768,twb768);
+  init_rad3(1536,twa1536,twb1536);
+  init_rad3(3072,twa3072,twb3072);
+  init_rad3(6144,twa6144,twb6144);
+  init_rad3(12288,twa12288,twb12288);
+  init_rad3(18432,twa18432,twb18432);
+  init_rad3(24576,twa24576,twb24576);
+  init_rad3(36864,twa36864,twb36864);
+  init_rad3(49152,twa49152,twb49152);
+  init_rad3(98304,twa98304,twb98304);
+
+
+  init_rad2_rep(24,tw24);
+  init_rad3_rep(36,twa36,twb36);
+  init_rad4_rep(48,twa48,twb48,twc48);
+  init_rad5_rep(60,twa60,twb60,twc60,twd60);
+  init_rad2_rep(72,tw72);
+  init_rad2_rep(96,tw96);
+  init_rad3_rep(108,twa108,twb108);
+  init_rad2_rep(120,tw120);
+  init_rad3_rep(144,twa144,twb144);
+  init_rad3_rep(180,twa180,twb180);
+  init_rad4_rep(192,twa192,twb192,twc192);
+  init_rad3_rep(216,twa216,twb216);
+  init_rad4_rep(240,twa240,twb240,twc240);
+  init_rad3_rep(288,twa288,twb288);
+  init_rad5_rep(300,twa300,twb300,twc300,twd300);
+  init_rad3_rep(324,twa324,twb324);
+  init_rad3_rep(360,twa360,twb360);
+  init_rad4_rep(384,twa384,twb384,twc384);
+  init_rad4_rep(432,twa432,twb432,twc432);
+  init_rad4_rep(480,twa480,twb480,twc480);
+  init_rad3_rep(540,twa540,twb540);
+  init_rad3_rep(576,twa576,twb576);
+  init_rad2_rep(600,twa600);
+  init_rad3_rep(648,twa648,twb648);
+  init_rad4_rep(720,twa720,twb720,twc720);
+  init_rad4_rep(768,twa768p,twb768p,twc768p);
+  init_rad3_rep(864,twa864,twb864);
+  init_rad3_rep(900,twa900,twb900);
+  init_rad4_rep(960,twa960,twb960,twc960);
+  init_rad3_rep(972,twa972,twb972);
+  init_rad3_rep(1080,twa1080,twb1080);
+  init_rad4_rep(1152,twa1152,twb1152,twc1152);
+  init_rad4_rep(1200,twa1200,twb1200,twc1200);
+  init_rad3_rep(1296,twa1296,twb1296);
+  init_rad3_rep(1440,twa1440,twb1440);
+  init_rad5_rep(1500,twa1500,twb1500,twc1500,twd1500);
+  init_rad3_rep(1620,twa1620,twb1620);
+  init_rad3_rep(1728,twa1728,twb1728);
+  init_rad3_rep(1800,twa1800,twb1800);
+  init_rad4_rep(1920,twa1920,twb1920, twc1920);
+  init_rad3_rep(1944,twa1944,twb1944);
+  init_rad3_rep(2160,twa2160,twb2160);
+  init_rad3_rep(2304,twa2304,twb2304);
+  init_rad4_rep(2400,twa2400,twb2400,twc2400);
+  init_rad3_rep(2592,twa2592,twb2592);
+  init_rad3_rep(2700,twa2700,twb2700);
+  init_rad3_rep(2880,twa2880,twb2880);
+  init_rad3_rep(2916,twa2916,twb2916);
+  init_rad5_rep(3000,twa3000,twb3000,twc3000,twd3000);
+  init_rad3_rep(3240,twa3240,twb3240);
+
+  return 0;
+}
+
+
+
+#ifndef MR_MAIN
+
+void dft(uint8_t sizeidx, int16_t *input,int16_t *output,unsigned char scale_flag){
+	AssertFatal((sizeidx >= 0 && sizeidx<DFT_SIZE_IDXTABLESIZE),"Invalid dft size index %i\n",sizeidx);
+        int algn=0xF;
+        AssertFatal(((intptr_t)output&algn)==0,"Buffers should be aligned %p",output);
+        if (((intptr_t)input)&algn) {
+          LOG_D(PHY, "DFT called with input not aligned, add a memcpy, size %d\n", sizeidx);
+          int sz=dft_ftab[sizeidx].size;
+          if (sizeidx==DFT_12) // This case does 8 DFTs in //
+            sz*=8;
+          int16_t tmp[sz*2] __attribute__ ((aligned(32))); // input and output are not in right type (int16_t instead of c16_t)
+          memcpy(tmp, input, sizeof tmp);
+          dft_ftab[sizeidx].func(tmp,output,scale_flag);
+        } else
+          dft_ftab[sizeidx].func(input,output,scale_flag);
+};
+
+void idft(uint8_t sizeidx, int16_t *input,int16_t *output,unsigned char scale_flag){
+	AssertFatal((sizeidx>=0 && sizeidx<DFT_SIZE_IDXTABLESIZE),"Invalid idft size index %i\n",sizeidx);
+        int algn=0xF;
+        AssertFatal( ((intptr_t)output&algn)==0,"Buffers should be 16 bytes aligned %p",output);
+        if (((intptr_t)input)&algn ) {  
+          LOG_D(PHY, "DFT called with input not aligned, add a memcpy\n");
+          int sz=idft_ftab[sizeidx].size;
+          int16_t tmp[sz*2] __attribute__ ((aligned(32))); // input and output are not in right type (int16_t instead of c16_t)
+          memcpy(tmp, input, sizeof tmp);
+          dft_ftab[sizeidx].func(tmp,output,scale_flag);
+        } else
+          idft_ftab[sizeidx].func(input,output,scale_flag);
+};
+
+#endif
+
+/*---------------------------------------------------------------------------------------*/
+
+#ifdef MR_MAIN
+#include <string.h>
+#include <stdio.h>
+
+#define LOG_M write_output
+int write_output(const char *fname,const char *vname,void *data,int length,int dec,char format)
+{
+
+  FILE *fp=NULL;
+  int i;
+
+
+  printf("Writing %d elements of type %d to %s\n",length,format,fname);
+
+
+  if (format == 10 || format ==11 || format == 12 || format == 13 || format == 14) {
+    fp = fopen(fname,"a+");
+  } else if (format != 10 && format !=11  && format != 12 && format != 13 && format != 14) {
+    fp = fopen(fname,"w+");
+  }
+
+
+
+  if (fp== NULL) {
+    printf("[OPENAIR][FILE OUTPUT] Cannot open file %s\n",fname);
+    return(-1);
+  }
+
+  if (format != 10 && format !=11  && format != 12 && format != 13 && format != 14)
+    fprintf(fp,"%s = [",vname);
+
+
+  switch (format) {
+  case 0:   // real 16-bit
+
+    for (i=0; i<length; i+=dec) {
+      fprintf(fp,"%d\n",((short *)data)[i]);
+    }
+
+    break;
+
+  case 1:  // complex 16-bit
+  case 13:
+  case 14:
+  case 15:
+
+    for (i=0; i<length<<1; i+=(2*dec)) {
+      fprintf(fp,"%d + j*(%d)\n",((short *)data)[i],((short *)data)[i+1]);
+
+    }
+
+
+    break;
+
+  case 2:  // real 32-bit
+    for (i=0; i<length; i+=dec) {
+      fprintf(fp,"%d\n",((int *)data)[i]);
+    }
+
+    break;
+
+  case 3: // complex 32-bit
+    for (i=0; i<length<<1; i+=(2*dec)) {
+      fprintf(fp,"%d + j*(%d)\n",((int *)data)[i],((int *)data)[i+1]);
+    }
+
+    break;
+
+  case 4: // real 8-bit
+    for (i=0; i<length; i+=dec) {
+      fprintf(fp,"%d\n",((char *)data)[i]);
+    }
+
+    break;
+
+  case 5: // complex 8-bit
+    for (i=0; i<length<<1; i+=(2*dec)) {
+      fprintf(fp,"%d + j*(%d)\n",((char *)data)[i],((char *)data)[i+1]);
+    }
+
+    break;
+
+  case 6:  // real 64-bit
+    for (i=0; i<length; i+=dec) {
+      fprintf(fp,"%lld\n",((long long*)data)[i]);
+    }
+
+    break;
+
+  case 7: // real double
+    for (i=0; i<length; i+=dec) {
+      fprintf(fp,"%g\n",((double *)data)[i]);
+    }
+
+    break;
+
+  case 8: // complex double
+    for (i=0; i<length<<1; i+=2*dec) {
+      fprintf(fp,"%g + j*(%g)\n",((double *)data)[i], ((double *)data)[i+1]);
+    }
+
+    break;
+
+  case 9: // real unsigned 8-bit
+    for (i=0; i<length; i+=dec) {
+      fprintf(fp,"%d\n",((unsigned char *)data)[i]);
+    }
+
+    break;
+
+
+  case 10 : // case eren 16 bit complex :
+
+    for (i=0; i<length<<1; i+=(2*dec)) {
+
+      if((i < 2*(length-1)) && (i > 0))
+        fprintf(fp,"%d + j*(%d),",((short *)data)[i],((short *)data)[i+1]);
+      else if (i == 2*(length-1))
+        fprintf(fp,"%d + j*(%d);",((short *)data)[i],((short *)data)[i+1]);
+      else if (i == 0)
+        fprintf(fp,"\n%d + j*(%d),",((short *)data)[i],((short *)data)[i+1]);
+
+
+
+    }
+
+    break;
+
+  case 11 : //case eren 16 bit real for channel magnitudes:
+    for (i=0; i<length; i+=dec) {
+
+      if((i <(length-1))&& (i > 0))
+        fprintf(fp,"%d,",((short *)data)[i]);
+      else if (i == (length-1))
+        fprintf(fp,"%d;",((short *)data)[i]);
+      else if (i == 0)
+        fprintf(fp,"\n%d,",((short *)data)[i]);
+    }
+
+    printf("\n erennnnnnnnnnnnnnn: length :%d",length);
+    break;
+
+  case 12 : // case eren for log2_maxh real unsigned 8 bit
+    fprintf(fp,"%d \n",((unsigned char *)&data)[0]);
+    break;
+
+  }
+
+  if (format != 10 && format !=11 && format !=12 && format != 13 && format != 15) {
+    fprintf(fp,"];\n");
+    fclose(fp);
+    return(0);
+  } else if (format == 10 || format ==11 || format == 12 || format == 13 || format == 15) {
+    fclose(fp);
+    return(0);
+  }
+
+  return 0;
+}
+
+int main(int argc, char**argv)
+{
+
+
+  time_stats_t ts;
+  simd_q15_t x[32768],y[32768],tw0,tw1,tw2,tw3;
+  int i;
+  simd_q15_t *x128=(simd_q15_t*)x,*y128=(simd_q15_t*)y;
+
+  dfts_autoinit();
+
+  set_taus_seed(0);
+  opp_enabled = 1;
+ /* 
+    ((int16_t *)&tw0)[0] = 32767;
+    ((int16_t *)&tw0)[1] = 0;
+    ((int16_t *)&tw0)[2] = 32767;
+    ((int16_t *)&tw0)[3] = 0;
+    ((int16_t *)&tw0)[4] = 32767;
+    ((int16_t *)&tw0)[5] = 0;
+    ((int16_t *)&tw0)[6] = 32767;
+    ((int16_t *)&tw0)[7] = 0;
+
+    ((int16_t *)&tw1)[0] = 32767;
+    ((int16_t *)&tw1)[1] = 0;
+    ((int16_t *)&tw1)[2] = 32767;
+    ((int16_t *)&tw1)[3] = 0;
+    ((int16_t *)&tw1)[4] = 32767;
+    ((int16_t *)&tw1)[5] = 0;
+    ((int16_t *)&tw1)[6] = 32767;
+    ((int16_t *)&tw1)[7] = 0;
+
+    ((int16_t *)&tw2)[0] = 32767;
+    ((int16_t *)&tw2)[1] = 0;
+    ((int16_t *)&tw2)[2] = 32767;
+    ((int16_t *)&tw2)[3] = 0;
+    ((int16_t *)&tw2)[4] = 32767;
+    ((int16_t *)&tw2)[5] = 0;
+    ((int16_t *)&tw2)[6] = 32767;
+    ((int16_t *)&tw2)[7] = 0;
+
+    ((int16_t *)&tw3)[0] = 32767;
+    ((int16_t *)&tw3)[1] = 0;
+    ((int16_t *)&tw3)[2] = 32767;
+    ((int16_t *)&tw3)[3] = 0;
+    ((int16_t *)&tw3)[4] = 32767;
+    ((int16_t *)&tw3)[5] = 0;
+    ((int16_t *)&tw3)[6] = 32767;
+    ((int16_t *)&tw3)[7] = 0;
+ */
+    for (i=0;i<300;i++) {
+      x[i] = (int16x8_t)vdupq_n_s32(taus());
+      x[i] = vshrq_n_s16(x[i],4);
+    }
+      /*
+    bfly2_tw1(x,x+1,y,y+1);
+    printf("(%d,%d) (%d,%d) => (%d,%d) (%d,%d)\n",((int16_t*)&x[0])[0],((int16_t*)&x[0])[1],((int16_t*)&x[1])[0],((int16_t*)&x[1])[1],((int16_t*)&y[0])[0],((int16_t*)&y[0])[1],((int16_t*)&y[1])[0],((int16_t*)&y[1])[1]);
+    printf("(%d,%d) (%d,%d) => (%d,%d) (%d,%d)\n",((int16_t*)&x[0])[0],((int16_t*)&x[0])[1],((int16_t*)&x[1])[0],((int16_t*)&x[1])[1],((int16_t*)&y[0])[2],((int16_t*)&y[0])[3],((int16_t*)&y[1])[2],((int16_t*)&y[1])[3]);
+    printf("(%d,%d) (%d,%d) => (%d,%d) (%d,%d)\n",((int16_t*)&x[0])[0],((int16_t*)&x[0])[1],((int16_t*)&x[1])[0],((int16_t*)&x[1])[1],((int16_t*)&y[0])[4],((int16_t*)&y[0])[5],((int16_t*)&y[1])[4],((int16_t*)&y[1])[5]);
+    printf("(%d,%d) (%d,%d) => (%d,%d) (%d,%d)\n",((int16_t*)&x[0])[0],((int16_t*)&x[0])[1],((int16_t*)&x[1])[0],((int16_t*)&x[1])[1],((int16_t*)&y[0])[6],((int16_t*)&y[0])[7],((int16_t*)&y[1])[6],((int16_t*)&y[1])[7]);
+    bfly2(x,x+1,y,y+1, &tw0);
+    printf("0(%d,%d) (%d,%d) => (%d,%d) (%d,%d)\n",((int16_t*)&x[0])[0],((int16_t*)&x[0])[1],((int16_t*)&x[1])[0],((int16_t*)&x[1])[1],((int16_t*)&y[0])[0],((int16_t*)&y[0])[1],((int16_t*)&y[1])[0],((int16_t*)&y[1])[1]);
+    printf("1(%d,%d) (%d,%d) => (%d,%d) (%d,%d)\n",((int16_t*)&x[0])[0],((int16_t*)&x[0])[1],((int16_t*)&x[1])[0],((int16_t*)&x[1])[1],((int16_t*)&y[0])[2],((int16_t*)&y[0])[3],((int16_t*)&y[1])[2],((int16_t*)&y[1])[3]);
+    printf("2(%d,%d) (%d,%d) => (%d,%d) (%d,%d)\n",((int16_t*)&x[0])[0],((int16_t*)&x[0])[1],((int16_t*)&x[1])[0],((int16_t*)&x[1])[1],((int16_t*)&y[0])[4],((int16_t*)&y[0])[5],((int16_t*)&y[1])[4],((int16_t*)&y[1])[5]);
+    printf("3(%d,%d) (%d,%d) => (%d,%d) (%d,%d)\n",((int16_t*)&x[0])[0],((int16_t*)&x[0])[1],((int16_t*)&x[1])[0],((int16_t*)&x[1])[1],((int16_t*)&y[0])[6],((int16_t*)&y[0])[7],((int16_t*)&y[1])[6],((int16_t*)&y[1])[7]);
+    bfly2(x,x+1,y,y+1, &tw0);
+
+    bfly3_tw1(x,x+1,x+2,y, y+1,y+2);
+    printf("0(%d,%d) (%d,%d) (%d %d) => (%d,%d) (%d,%d) (%d %d)\n",((int16_t*)&x[0])[0],((int16_t*)&x[0])[1],((int16_t*)&x[1])[0],((int16_t*)&x[1])[1],((int16_t*)&x[2])[0],((int16_t*)&x[2])[1],((int16_t*)&y[0])[0],((int16_t*)&y[0])[1],((int16_t*)&y[1])[0],((int16_t*)&y[1])[1],((int16_t*)&y[2])[0],((int16_t*)&y[2])[1]);
+    printf("1(%d,%d) (%d,%d) (%d %d) => (%d,%d) (%d,%d) (%d %d)\n",((int16_t*)&x[0])[0],((int16_t*)&x[0])[1],((int16_t*)&x[1])[0],((int16_t*)&x[1])[1],((int16_t*)&x[2])[0],((int16_t*)&x[2])[1],((int16_t*)&y[0])[2],((int16_t*)&y[0])[3],((int16_t*)&y[1])[2],((int16_t*)&y[1])[3],((int16_t*)&y[2])[2],((int16_t*)&y[2])[3]);
+    printf("2(%d,%d) (%d,%d) (%d %d) => (%d,%d) (%d,%d) (%d %d)\n",((int16_t*)&x[0])[0],((int16_t*)&x[0])[1],((int16_t*)&x[1])[0],((int16_t*)&x[1])[1],((int16_t*)&x[2])[0],((int16_t*)&x[2])[1],((int16_t*)&y[0])[4],((int16_t*)&y[0])[5],((int16_t*)&y[1])[4],((int16_t*)&y[1])[5],((int16_t*)&y[2])[4],((int16_t*)&y[2])[5]);
+    printf("3(%d,%d) (%d,%d) (%d %d) => (%d,%d) (%d,%d) (%d %d)\n",((int16_t*)&x[0])[0],((int16_t*)&x[0])[1],((int16_t*)&x[1])[0],((int16_t*)&x[1])[1],((int16_t*)&x[2])[0],((int16_t*)&x[2])[1],((int16_t*)&y[0])[6],((int16_t*)&y[0])[7],((int16_t*)&y[1])[6],((int16_t*)&y[1])[7],((int16_t*)&y[2])[6],((int16_t*)&y[2])[7]);
+    bfly3(x,x+1,x+2,y, y+1,y+2,&tw0,&tw1);
+
+    printf("0(%d,%d) (%d,%d) (%d %d) => (%d,%d) (%d,%d) (%d %d)\n",((int16_t*)&x[0])[0],((int16_t*)&x[0])[1],((int16_t*)&x[1])[0],((int16_t*)&x[1])[1],((int16_t*)&x[2])[0],((int16_t*)&x[2])[1],((int16_t*)&y[0])[0],((int16_t*)&y[0])[1],((int16_t*)&y[1])[0],((int16_t*)&y[1])[1],((int16_t*)&y[2])[0],((int16_t*)&y[2])[1]);
+    printf("1(%d,%d) (%d,%d) (%d %d) => (%d,%d) (%d,%d) (%d %d)\n",((int16_t*)&x[0])[0],((int16_t*)&x[0])[1],((int16_t*)&x[1])[0],((int16_t*)&x[1])[1],((int16_t*)&x[2])[0],((int16_t*)&x[2])[1],((int16_t*)&y[0])[2],((int16_t*)&y[0])[3],((int16_t*)&y[1])[2],((int16_t*)&y[1])[3],((int16_t*)&y[2])[2],((int16_t*)&y[2])[3]);
+    printf("2(%d,%d) (%d,%d) (%d %d) => (%d,%d) (%d,%d) (%d %d)\n",((int16_t*)&x[0])[0],((int16_t*)&x[0])[1],((int16_t*)&x[1])[0],((int16_t*)&x[1])[1],((int16_t*)&x[2])[0],((int16_t*)&x[2])[1],((int16_t*)&y[0])[4],((int16_t*)&y[0])[5],((int16_t*)&y[1])[4],((int16_t*)&y[1])[5],((int16_t*)&y[2])[4],((int16_t*)&y[2])[5]);
+    printf("3(%d,%d) (%d,%d) (%d %d) => (%d,%d) (%d,%d) (%d %d)\n",((int16_t*)&x[0])[0],((int16_t*)&x[0])[1],((int16_t*)&x[1])[0],((int16_t*)&x[1])[1],((int16_t*)&x[2])[0],((int16_t*)&x[2])[1],((int16_t*)&y[0])[6],((int16_t*)&y[0])[7],((int16_t*)&y[1])[6],((int16_t*)&y[1])[7],((int16_t*)&y[2])[6],((int16_t*)&y[2])[7]);
+
+
+    bfly4_tw1(x,x+1,x+2,x+3,y, y+1,y+2,y+3);
+    printf("(%d,%d) (%d,%d) (%d %d) (%d,%d) => (%d,%d) (%d,%d) (%d %d) (%d,%d)\n",
+     ((int16_t*)&x[0])[0],((int16_t*)&x[0])[1],((int16_t*)&x[1])[0],((int16_t*)&x[1])[1],
+     ((int16_t*)&x[2])[0],((int16_t*)&x[2])[1],((int16_t*)&x[3])[0],((int16_t*)&x[3])[1],
+     ((int16_t*)&y[0])[0],((int16_t*)&y[0])[1],((int16_t*)&y[1])[0],((int16_t*)&y[1])[1],
+     ((int16_t*)&y[2])[0],((int16_t*)&y[2])[1],((int16_t*)&y[3])[0],((int16_t*)&y[3])[1]);
+
+    bfly4(x,x+1,x+2,x+3,y, y+1,y+2,y+3,&tw0,&tw1,&tw2);
+    printf("0(%d,%d) (%d,%d) (%d %d) (%d,%d) => (%d,%d) (%d,%d) (%d %d) (%d,%d)\n",
+     ((int16_t*)&x[0])[0],((int16_t*)&x[0])[1],((int16_t*)&x[1])[0],((int16_t*)&x[1])[1],
+     ((int16_t*)&x[2])[0],((int16_t*)&x[2])[1],((int16_t*)&x[3])[0],((int16_t*)&x[3])[1],
+     ((int16_t*)&y[0])[0],((int16_t*)&y[0])[1],((int16_t*)&y[1])[0],((int16_t*)&y[1])[1],
+     ((int16_t*)&y[2])[0],((int16_t*)&y[2])[1],((int16_t*)&y[3])[0],((int16_t*)&y[3])[1]);
+    printf("1(%d,%d) (%d,%d) (%d %d) (%d,%d) => (%d,%d) (%d,%d) (%d %d) (%d,%d)\n",
+     ((int16_t*)&x[0])[0],((int16_t*)&x[0])[1],((int16_t*)&x[1])[0],((int16_t*)&x[1])[1],
+     ((int16_t*)&x[2])[0],((int16_t*)&x[2])[1],((int16_t*)&x[3])[0],((int16_t*)&x[3])[1],
+     ((int16_t*)&y[0])[2],((int16_t*)&y[0])[3],((int16_t*)&y[1])[2],((int16_t*)&y[1])[3],
+     ((int16_t*)&y[2])[2],((int16_t*)&y[2])[3],((int16_t*)&y[3])[2],((int16_t*)&y[3])[3]);
+    printf("2(%d,%d) (%d,%d) (%d %d) (%d,%d) => (%d,%d) (%d,%d) (%d %d) (%d,%d)\n",
+     ((int16_t*)&x[0])[0],((int16_t*)&x[0])[1],((int16_t*)&x[1])[0],((int16_t*)&x[1])[1],
+     ((int16_t*)&x[2])[0],((int16_t*)&x[2])[1],((int16_t*)&x[3])[0],((int16_t*)&x[3])[1],
+     ((int16_t*)&y[0])[4],((int16_t*)&y[0])[5],((int16_t*)&y[1])[4],((int16_t*)&y[1])[5],
+     ((int16_t*)&y[2])[4],((int16_t*)&y[2])[5],((int16_t*)&y[3])[4],((int16_t*)&y[3])[5]);
+    printf("3(%d,%d) (%d,%d) (%d %d) (%d,%d) => (%d,%d) (%d,%d) (%d %d) (%d,%d)\n",
+     ((int16_t*)&x[0])[0],((int16_t*)&x[0])[1],((int16_t*)&x[1])[0],((int16_t*)&x[1])[1],
+     ((int16_t*)&x[2])[6],((int16_t*)&x[2])[7],((int16_t*)&x[3])[6],((int16_t*)&x[3])[7],
+     ((int16_t*)&y[0])[6],((int16_t*)&y[0])[7],((int16_t*)&y[1])[6],((int16_t*)&y[1])[7],
+     ((int16_t*)&y[2])[0],((int16_t*)&y[2])[1],((int16_t*)&y[3])[0],((int16_t*)&y[3])[1]);
+
+    bfly5_tw1(x,x+1,x+2,x+3,x+4,y,y+1,y+2,y+3,y+4);
+
+    for (i=0;i<5;i++)
+      printf("%d,%d,",
+       ((int16_t*)&x[i])[0],((int16_t*)&x[i])[1]);
+    printf("\n");
+    for (i=0;i<5;i++)
+      printf("%d,%d,",
+       ((int16_t*)&y[i])[0],((int16_t*)&y[i])[1]);
+    printf("\n");
+
+    bfly5(x,x+1,x+2,x+3,x+4,y, y+1,y+2,y+3,y+4,&tw0,&tw1,&tw2,&tw3);
+    for (i=0;i<5;i++)
+      printf("%d,%d,",
+       ((int16_t*)&x[i])[0],((int16_t*)&x[i])[1]);
+    printf("\n");
+    for (i=0;i<5;i++)
+      printf("%d,%d,",
+       ((int16_t*)&y[i])[0],((int16_t*)&y[i])[1]);
+    printf("\n");
+
+
+    printf("\n\n12-point\n");
+    dft12f(x,
+     x+1,
+     x+2,
+     x+3,
+     x+4,
+     x+5,
+     x+6,
+     x+7,
+     x+8,
+     x+9,
+     x+10,
+     x+11,
+     y,
+     y+1,
+     y+2,
+     y+3,
+     y+4,
+     y+5,
+     y+6,
+     y+7,
+     y+8,
+     y+9,
+     y+10,
+     y+11);
+
+
+    printf("X: ");
+    for (i=0;i<12;i++)
+      printf("%d,%d,",((int16_t*)(&x[i]))[0],((int16_t *)(&x[i]))[1]);
+    printf("\nY:");
+    for (i=0;i<12;i++)
+      printf("%d,%d,",((int16_t*)(&y[i]))[0],((int16_t *)(&y[i]))[1]);
+    printf("\n");
+
+ */
+
+    for (i=0;i<32;i++) {
+      ((int16_t*)x)[i] = (int16_t)((taus()&0xffff))>>5;
+    }
+    memset((void*)&y[0],0,16*4);
+    idft16((int16_t *)x,(int16_t *)y);
+    printf("\n\n16-point\n");
+    printf("X: ");
+    for (i=0;i<4;i++)
+      printf("%d,%d,%d,%d,%d,%d,%d,%d,",((int16_t*)&x[i])[0],((int16_t *)&x[i])[1],((int16_t*)&x[i])[2],((int16_t *)&x[i])[3],((int16_t*)&x[i])[4],((int16_t*)&x[i])[5],((int16_t*)&x[i])[6],((int16_t*)&x[i])[7]);
+    printf("\nY:");
+
+    for (i=0;i<4;i++)
+      printf("%d,%d,%d,%d,%d,%d,%d,%d,",((int16_t*)&y[i])[0],((int16_t *)&y[i])[1],((int16_t*)&y[i])[2],((int16_t *)&y[i])[3],((int16_t*)&y[i])[4],((int16_t *)&y[i])[5],((int16_t*)&y[i])[6],((int16_t *)&y[i])[7]);
+    printf("\n");
+ 
+  memset((void*)&x[0],0,2048*4);
+      
+  for (i=0; i<2048; i+=4) {
+     ((int16_t*)x)[i<<1] = 1024;
+     ((int16_t*)x)[1+(i<<1)] = 0;
+     ((int16_t*)x)[2+(i<<1)] = 0;
+     ((int16_t*)x)[3+(i<<1)] = 1024;
+     ((int16_t*)x)[4+(i<<1)] = -1024;
+     ((int16_t*)x)[5+(i<<1)] = 0;
+     ((int16_t*)x)[6+(i<<1)] = 0;
+     ((int16_t*)x)[7+(i<<1)] = -1024;
+     }
+  /*
+  for (i=0; i<2048; i+=2) {
+     ((int16_t*)x)[i<<1] = 1024;
+     ((int16_t*)x)[1+(i<<1)] = 0;
+     ((int16_t*)x)[2+(i<<1)] = -1024;
+     ((int16_t*)x)[3+(i<<1)] = 0;
+     }
+       
+  for (i=0;i<2048*2;i++) {
+    ((int16_t*)x)[i] = i/2;//(int16_t)((taus()&0xffff))>>5;
+  }
+     */
+  memset((void*)&x[0],0,64*sizeof(int32_t));
+  for (i=2;i<36;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  for (i=(128-36);i<128;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  idft64((int16_t *)x,(int16_t *)y,1);
+  
+
+  printf("64-point\n");
+  printf("X: ");
+  for (i=0;i<8;i++)
+    print_shorts256("",((int16_t *)x)+(i*16));
+
+  printf("\nY:");
+
+  for (i=0;i<8;i++)
+    print_shorts256("",((int16_t *)y)+(i*16));
+  printf("\n");
+
+  
+
+
+  idft64((int16_t *)x,(int16_t *)y,1);
+  idft64((int16_t *)x,(int16_t *)y,1);
+  idft64((int16_t *)x,(int16_t *)y,1);
+  reset_meas(&ts);
+
+  for (i=0; i<10000000; i++) {
+    start_meas(&ts);
+    idft64((int16_t *)x,(int16_t *)y,1);
+    stop_meas(&ts);
+
+  }
+  /*
+  printf("\n\n64-point (%f cycles, #trials %d)\n",(double)ts.diff/(double)ts.trials,ts.trials);
+  //  LOG_M("x64.m","x64",x,64,1,1);
+  LOG_M("y64.m","y64",y,64,1,1);
+  LOG_M("x64.m","x64",x,64,1,1);
+  */
+/*
+  printf("X: ");
+  for (i=0;i<16;i++)
+    printf("%d,%d,%d,%d,%d,%d,%d,%d,",((int16_t*)&x[i])[0],((int16_t *)&x[i])[1],((int16_t*)&x[i])[2],((int16_t *)&x[i])[3],((int16_t*)&x[i])[4],((int16_t*)&x[i])[5],((int16_t*)&x[i])[6],((int16_t*)&x[i])[7]);
+  printf("\nY:");
+
+  for (i=0;i<16;i++)
+    printf("%d,%d,%d,%d,%d,%d,%d,%d,",((int16_t*)&y[i])[0],((int16_t *)&y[i])[1],((int16_t*)&y[i])[2],((int16_t *)&y[i])[3],((int16_t*)&y[i])[4],((int16_t *)&y[i])[5],((int16_t*)&y[i])[6],((int16_t *)&y[i])[7]);
+  printf("\n");
+
+  idft64((int16_t*)y,(int16_t*)x,1);
+  printf("X: ");
+  for (i=0;i<16;i++)
+    printf("%d,%d,%d,%d,%d,%d,%d,%d,",((int16_t*)&x[i])[0],((int16_t *)&x[i])[1],((int16_t*)&x[i])[2],((int16_t *)&x[i])[3],((int16_t*)&x[i])[4],((int16_t*)&x[i])[5],((int16_t*)&x[i])[6],((int16_t*)&x[i])[7]);
+ 
+  for (i=0; i<256; i++) {
+    ((int16_t*)x)[i] = (int16_t)((taus()&0xffff))>>5;
+  }
+*/
+  
+  memset((void*)&x[0],0,128*4);
+  for (i=2;i<72;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  for (i=(256-72);i<256;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  reset_meas(&ts);
+
+  for (i=0; i<10000; i++) {
+    start_meas(&ts);
+    idft128((int16_t *)x,(int16_t *)y,1);
+    stop_meas(&ts);
+  }
+
+  printf("\n\n128-point(%f cycles)\n",(double)ts.diff/(double)ts.trials);
+  LOG_M("y128.m","y128",y,128,1,1);
+  LOG_M("x128.m","x128",x,128,1,1);
+/*
+  printf("X: ");
+   for (i=0;i<32;i++)
+     printf("%d,%d,%d,%d,%d,%d,%d,%d,",((int16_t*)&x[i])[0],((int16_t *)&x[i])[1],((int16_t*)&x[i])[2],((int16_t *)&x[i])[3],((int16_t*)&x[i])[4],((int16_t*)&x[i])[5],((int16_t*)&x[i])[6],((int16_t*)&x[i])[7]);
+   printf("\nY:");
+
+   for (i=0;i<32;i++)
+     printf("%d,%d,%d,%d,%d,%d,%d,%d,",((int16_t*)&y[i])[0],((int16_t *)&y[i])[1],((int16_t*)&y[i])[2],((int16_t *)&y[i])[3],((int16_t*)&y[i])[4],((int16_t *)&y[i])[5],((int16_t*)&y[i])[6],((int16_t *)&y[i])[7]);
+   printf("\n");
+*/
+
+  /*
+  for (i=0; i<512; i++) {
+    ((int16_t*)x)[i] = (int16_t)((taus()&0xffff))>>5;
+  }
+  
+  memset((void*)&y[0],0,256*4);
+  */
+  memset((void*)&x[0],0,256*sizeof(int32_t));
+  for (i=2;i<144;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  for (i=(512-144);i<512;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  reset_meas(&ts);
+
+  for (i=0; i<10000; i++) {
+    start_meas(&ts);
+    idft256((int16_t *)x,(int16_t *)y,1);
+    stop_meas(&ts);
+  }
+
+  printf("\n\n256-point(%f cycles)\n",(double)ts.diff/(double)ts.trials);
+  LOG_M("y256.m","y256",y,256,1,1);
+  LOG_M("x256.m","x256",x,256,1,1);
+
+  memset((void*)&x[0],0,512*sizeof(int32_t));
+  for (i=2;i<302;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  for (i=(1024-300);i<1024;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+
+  reset_meas(&ts);
+  for (i=0; i<10000; i++) {
+    start_meas(&ts);
+    idft512((int16_t *)x,(int16_t *)y,1);
+    stop_meas(&ts);
+  }
+
+  printf("\n\n512-point(%f cycles)\n",(double)ts.diff/(double)ts.trials);
+  LOG_M("y512.m","y512",y,512,1,1);
+  LOG_M("x512.m","x512",x,512,1,1);
+  /*
+  printf("X: ");
+  for (i=0;i<64;i++)
+    printf("%d,%d,%d,%d,%d,%d,%d,%d,",((int16_t*)&x[i])[0],((int16_t *)&x[i])[1],((int16_t*)&x[i])[2],((int16_t *)&x[i])[3],((int16_t*)&x[i])[4],((int16_t*)&x[i])[5],((int16_t*)&x[i])[6],((int16_t*)&x[i])[7]);
+  printf("\nY:");
+
+  for (i=0;i<64;i++)
+    printf("%d,%d,%d,%d,%d,%d,%d,%d,",((int16_t*)&y[i])[0],((int16_t *)&y[i])[1],((int16_t*)&y[i])[2],((int16_t *)&y[i])[3],((int16_t*)&y[i])[4],((int16_t *)&y[i])[5],((int16_t*)&y[i])[6],((int16_t *)&y[i])[7]);
+  printf("\n");
+  */
+
+  memset((void*)x,0,1024*sizeof(int32_t));
+  for (i=2;i<602;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  for (i=2*724;i<2048;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  reset_meas(&ts);
+
+  for (i=0; i<10000; i++) {
+    start_meas(&ts);
+    idft1024((int16_t *)x,(int16_t *)y,1);
+    stop_meas(&ts);
+  }
+
+  printf("\n\n1024-point(%f cycles)\n",(double)ts.diff/(double)ts.trials);
+  LOG_M("y1024.m","y1024",y,1024,1,1);
+  LOG_M("x1024.m","x1024",x,1024,1,1);
+
+
+  memset((void*)x,0,1536*sizeof(int32_t));
+  for (i=2;i<1202;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  for (i=2*(1536-600);i<3072;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  reset_meas(&ts);
+
+  for (i=0; i<10000; i++) {
+    start_meas(&ts);
+    idft1536((int16_t *)x,(int16_t *)y,1);
+    stop_meas(&ts);
+  }
+
+  printf("\n\n1536-point(%f cycles)\n",(double)ts.diff/(double)ts.trials);
+  write_output("y1536.m","y1536",y,1536,1,1);
+  write_output("x1536.m","x1536",x,1536,1,1);
+
+
+  memset((void*)x,0,2048*sizeof(int32_t));
+  for (i=2;i<1202;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  for (i=2*(2048-600);i<4096;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  reset_meas(&ts);
+
+  for (i=0; i<10000; i++) {
+    start_meas(&ts);
+    dft2048((int16_t *)x,(int16_t *)y,1);
+    stop_meas(&ts);
+  }
+
+  printf("\n\n2048-point(%f cycles)\n",(double)ts.diff/(double)ts.trials);
+  LOG_M("y2048.m","y2048",y,2048,1,1);
+  LOG_M("x2048.m","x2048",x,2048,1,1);
+
+// NR 80Mhz, 217 PRB, 3/4 sampling
+  memset((void*)x, 0, 3072*sizeof(int32_t));
+  for (i=2;i<2506;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  for (i=2*(3072-1252);i<6144;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+
+  reset_meas(&ts);
+
+  for (i=0; i<10000; i++) {
+    start_meas(&ts);
+    idft3072((int16_t *)x,(int16_t *)y,1);
+    stop_meas(&ts);
+  }
+
+  printf("\n\n3072-point(%f cycles)\n",(double)ts.diff/(double)ts.trials);
+  write_output("y3072.m","y3072",y,3072,1,1);
+  write_output("x3072.m","x3072",x,3072,1,1);
+
+
+  memset((void*)x,0,4096*sizeof(int32_t));
+  for (i=0;i<2400;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  for (i=2*(4096-1200);i<8192;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  reset_meas(&ts);
+
+  for (i=0; i<10000; i++) {
+    start_meas(&ts);
+    idft4096((int16_t *)x,(int16_t *)y,1);
+    stop_meas(&ts);
+  }
+
+  printf("\n\n4096-point(%f cycles)\n",(double)ts.diff/(double)ts.trials);
+  LOG_M("y4096.m","y4096",y,4096,1,1);
+  LOG_M("x4096.m","x4096",x,4096,1,1);
+
+  dft4096((int16_t *)y,(int16_t *)x2,1);
+  LOG_M("x4096_2.m","x4096_2",x2,4096,1,1);
+
+// NR 160Mhz, 434 PRB, 3/4 sampling
+  memset((void*)x, 0, 6144*sizeof(int32_t));
+  for (i=2;i<5010;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  for (i=2*(6144-2504);i<12288;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+
+  reset_meas(&ts);
+
+  for (i=0; i<10000; i++) {
+    start_meas(&ts);
+    idft6144((int16_t *)x,(int16_t *)y,1);
+    stop_meas(&ts);
+  }
+
+  printf("\n\n6144-point(%f cycles)\n",(double)ts.diff/(double)ts.trials);
+  write_output("y6144.m","y6144",y,6144,1,1);
+  write_output("x6144.m","x6144",x,6144,1,1);
+
+  memset((void*)x,0,8192*sizeof(int32_t));
+  for (i=2;i<4802;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  for (i=2*(8192-2400);i<16384;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  reset_meas(&ts);
+  for (i=0; i<10000; i++) {
+    start_meas(&ts);
+    idft8192((int16_t *)x,(int16_t *)y,1);
+    stop_meas(&ts);
+  }
+
+  printf("\n\n8192-point(%f cycles)\n",(double)ts.diff/(double)ts.trials);
+  LOG_M("y8192.m","y8192",y,8192,1,1);
+  LOG_M("x8192.m","x8192",x,8192,1,1);
+
+  memset((void*)x,0,16384*sizeof(int32_t));
+  for (i=2;i<9602;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  for (i=2*(16384-4800);i<32768;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  reset_meas(&ts);
+  for (i=0; i<10000; i++) {
+    start_meas(&ts);
+    dft16384((int16_t *)x,(int16_t *)y,1);
+    stop_meas(&ts);
+  }
+
+  printf("\n\n16384-point(%f cycles)\n",(double)ts.diff/(double)ts.trials);
+  LOG_M("y16384.m","y16384",y,16384,1,1);
+  LOG_M("x16384.m","x16384",x,16384,1,1);
+
+  memset((void*)x,0,1536*sizeof(int32_t));
+  for (i=2;i<1202;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  for (i=2*(1536-600);i<3072;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  reset_meas(&ts);
+  for (i=0; i<10000; i++) {
+    start_meas(&ts);
+    idft1536((int16_t *)x,(int16_t *)y,1);
+    stop_meas(&ts);
+  }
+
+  printf("\n\n1536-point(%f cycles)\n",(double)ts.diff/(double)ts.trials);
+  LOG_M("y1536.m","y1536",y,1536,1,1);
+  LOG_M("x1536.m","x1536",x,1536,1,1);
+
+  printf("\n\n1536-point(%f cycles)\n",(double)ts.diff/(double)ts.trials);
+  LOG_M("y8192.m","y8192",y,8192,1,1);
+  LOG_M("x8192.m","x8192",x,8192,1,1);
+
+  memset((void*)x,0,3072*sizeof(int32_t));
+  for (i=2;i<1202;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  for (i=2*(3072-600);i<3072;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  reset_meas(&ts);
+  for (i=0; i<10000; i++) {
+    start_meas(&ts);
+    idft3072((int16_t *)x,(int16_t *)y,1);
+    stop_meas(&ts);
+  }
+
+  printf("\n\n3072-point(%f cycles)\n",(double)ts.diff/(double)ts.trials);
+  LOG_M("y3072.m","y3072",y,3072,1,1);
+  LOG_M("x3072.m","x3072",x,3072,1,1);
+
+  memset((void*)x,0,6144*sizeof(int32_t));
+  for (i=2;i<4802;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  for (i=2*(6144-2400);i<12288;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  reset_meas(&ts);
+  for (i=0; i<10000; i++) {
+    start_meas(&ts);
+    idft6144((int16_t *)x,(int16_t *)y,1);
+    stop_meas(&ts);
+  }
+
+  printf("\n\n6144-point(%f cycles)\n",(double)ts.diff/(double)ts.trials);
+  LOG_M("y6144.m","y6144",y,6144,1,1);
+  LOG_M("x6144.m","x6144",x,6144,1,1);
+
+  memset((void*)x,0,12288*sizeof(int32_t));
+  for (i=2;i<9602;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  for (i=2*(12288-4800);i<24576;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  reset_meas(&ts);
+  for (i=0; i<10000; i++) {
+    start_meas(&ts);
+    idft12288((int16_t *)x,(int16_t *)y,1);
+    stop_meas(&ts);
+  }
+
+  printf("\n\n12288-point(%f cycles)\n",(double)ts.diff/(double)ts.trials);
+  LOG_M("y12288.m","y12288",y,12288,1,1);
+  LOG_M("x12288.m","x12288",x,12288,1,1);
+
+  memset((void*)x,0,18432*sizeof(int32_t));
+  for (i=2;i<14402;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  for (i=2*(18432-7200);i<36864;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  reset_meas(&ts);
+  for (i=0; i<10000; i++) {
+    start_meas(&ts);
+    idft18432((int16_t *)x,(int16_t *)y,1);
+    stop_meas(&ts);
+  }
+
+  printf("\n\n18432-point(%f cycles)\n",(double)ts.diff/(double)ts.trials);
+  LOG_M("y18432.m","y18432",y,18432,1,1);
+  LOG_M("x18432.m","x18432",x,18432,1,1);
+
+  memset((void*)x,0,24576*sizeof(int32_t));
+  for (i=2;i<19202;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  for (i=2*(24576-19200);i<49152;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  reset_meas(&ts);
+  for (i=0; i<10000; i++) {
+    start_meas(&ts);
+    idft24576((int16_t *)x,(int16_t *)y,1);
+    stop_meas(&ts);
+  }
+
+  printf("\n\n24576-point(%f cycles)\n",(double)ts.diff/(double)ts.trials);
+  LOG_M("y24576.m","y24576",y,24576,1,1);
+  LOG_M("x24576.m","x24576",x,24576,1,1);
+
+
+  memset((void*)x,0,2*18432*sizeof(int32_t));
+  for (i=2;i<(2*14402);i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  for (i=2*(36864-14400);i<(36864*2);i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  reset_meas(&ts);
+  for (i=0; i<10000; i++) {
+    start_meas(&ts);
+    dft36864((int16_t *)x,(int16_t *)y,1);
+    stop_meas(&ts);
+  }
+
+  printf("\n\n36864-point(%f cycles)\n",(double)ts.diff/(double)ts.trials);
+  LOG_M("y36864.m","y36864",y,36864,1,1);
+  LOG_M("x36864.m","x36864",x,36864,1,1);
+
+
+  memset((void*)x,0,49152*sizeof(int32_t));
+  for (i=2;i<28402;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  } 
+  for (i=2*(49152-14400);i<98304;i++) {
+    if ((taus() & 1)==0)
+      ((int16_t*)x)[i] = 364;
+    else
+      ((int16_t*)x)[i] = -364;
+  }
+  reset_meas(&ts);
+  for (i=0; i<10000; i++) {
+    start_meas(&ts);
+    idft49152((int16_t *)x,(int16_t *)y,1);
+    stop_meas(&ts);
+  }
+
+  printf("\n\n49152-point(%f cycles)\n",(double)ts.diff/(double)ts.trials);
+  LOG_M("y49152.m","y49152",y,49152,1,1);
+  LOG_M("x49152.m","x49152",x,49152,1,1);
+  /*
+  int dftsizes[33]={24,36,48,60,72,96,108,120,144,180,192,216,240,288,300,324,360,384,432,480,540,576,600,648,720,768,864,900,960,972,1080,1152,1200};
+  void (*dft)(int16_t *x,int16_t *y,uint8_t scale)[33] = {dft24,dft36,dft48,dft60,dft72,dft96,dft108,dft120,dft144,dft180,dft192,dft216,dft240,dft288,dft300,dft324,dft360,dft384,dft432,dft480,dft540,dft576,dft600,dft648,dft720,dft768,dft864,dft900,dft960,dft972,dft1080,dft1152,dft1200};
+  for (int n=0;n<33;n++) {
+    // 4xN-point DFT
+    memset((void*)x,0,dftsizes[n]*8*sizeof(int16_t));
+    for (i=0;i<dftsizes[n]*8;i+=8) {
+      if ((taus() & 1)==0)
+	((int16_t*)x)[i]   = 364;
+      else
+	((int16_t*)x)[i]   = -364;
+      if ((taus() & 1)==0)
+	((int16_t*)x)[i+1] = 364;
+      else
+	((int16_t*)x)[i+1] = -364;
+    }
+    
+    reset_meas(&ts);
+    for (i=0; i<10000; i++) {
+      start_meas(&ts);
+      (dft[n])((int16_t *)x,(int16_t *)y,1);
+      stop_meas(&ts);
+    }
+    
+    printf("\n\n4x%d-point(%f cycles)\n",dftsizes[n],(double)ts.diff/(double)ts.trials);
+    char ystr[5],xstr[5],ystr2[5],xstr2[5];
+    sprintf(ystr,"y%d.m",dftsizes[n]);
+    sprintf(xstr,"x%d.m",dftsizes[n]);
+    sprintf(ystr2,"y%d",dftsizes[n]);
+    sprintf(xstr2,"x%d",dftsizes[n]);
+    LOG_M(ystr,ystr2,y,dftsizes[n]*4,1,1);
+    LOG_M(xstr,xstr2,x,dftsizes[n]*4,1,1);
+  }
+  */
+
+  return(0);
+}
+
+
+#endif
+#endif
diff --git a/openair1/PHY/TOOLS/signal_energy.c b/openair1/PHY/TOOLS/signal_energy.c
index 9eeb3d92a0424d1c932096129756a585699d2182..3f2b9a456d85e918485f7e53c7bc9f7400d199bc 100644
--- a/openair1/PHY/TOOLS/signal_energy.c
+++ b/openair1/PHY/TOOLS/signal_energy.c
@@ -31,43 +31,6 @@
 //#define shift_DC 0
 //#define SHRT_MIN -32768
 
-#if defined(__x86_64__) || defined(__i386__)
-#ifdef LOCALIZATION
-int32_t subcarrier_energy(int32_t *input,uint32_t length, int32_t *subcarrier_energy, uint16_t rx_power_correction)
-{
-
-  int32_t i, subcarrier_pwr;
-  register __m64 mm0,mm1, subcarrier;
-  subcarrier = _mm_setzero_si64();//_m_pxor(subcarrier,subcarrier);
-  __m64 *in = (__m64 *)input;
-
-#ifdef MAIN
-  int16_t *printb;
-#endif
-
-  mm0 = _mm_setzero_si64();//pxor(mm0,mm0);
-
-  for (i=0; i<length>>1; i++) {
-
-    mm1 = in[i];
-    mm1 = _m_pmaddwd(mm1,mm1);
-    mm1 = _m_psradi(mm1,shift);// shift any 32 bits blocs of the word by the value shift
-    subcarrier = mm1;
-    subcarrier = _m_psrlqi(subcarrier,32);
-    subcarrier = _m_paddd(subcarrier,mm1);
-    subcarrier_pwr = _m_to_int(subcarrier);
-    subcarrier_pwr<<=shift;
-    subcarrier_pwr = (unsigned short) dB_fixed(subcarrier_pwr);
-    subcarrier_energy[i] = subcarrier_pwr*rx_power_correction;
-  }
-
-  _mm_empty();
-  _m_empty();
-
-  return i;
-}
-#endif
-
 //-----------------------------------------------------------------
 // Average Power calculation with DC removing
 //-----------------------------------------------------------------
@@ -75,30 +38,30 @@ int32_t signal_energy(int32_t *input,uint32_t length)
 {
   uint32_t i;
   int32_t temp;
-  __m128i in, in_clp, i16_min, coe1;
-  __m128 num0, num1, num2, num3, recp1;
+  simde__m128i in, in_clp, i16_min, coe1;
+  simde__m128 num0, num1, num2, num3, recp1;
 
   //init
-  num0 = _mm_setzero_ps();
-  num1 = _mm_setzero_ps();
-  i16_min = _mm_set1_epi16(SHRT_MIN);
-  coe1 = _mm_set1_epi16(1);
-  recp1 = _mm_rcp_ps(_mm_cvtepi32_ps(_mm_set1_epi32(length)));
+  num0 = simde_mm_setzero_ps();
+  num1 = simde_mm_setzero_ps();
+  i16_min = simde_mm_set1_epi16(SHRT_MIN);
+  coe1 = simde_mm_set1_epi16(1);
+  recp1 = simde_mm_rcp_ps(simde_mm_cvtepi32_ps(simde_mm_set1_epi32(length)));
 
   //Acc
   for (i = 0; i < (length >> 2); i++) {
-    in = _mm_loadu_si128((__m128i *)input);
-    in_clp = _mm_subs_epi16(in, _mm_cmpeq_epi16(in, i16_min));//if in=SHRT_MIN in+1, else in
-    num0 = _mm_add_ps(num0, _mm_cvtepi32_ps(_mm_madd_epi16(in_clp, in_clp)));
-    num1 = _mm_add_ps(num1, _mm_cvtepi32_ps(_mm_madd_epi16(in, coe1)));//DC
+    in = simde_mm_loadu_si128((simde__m128i *)input);
+    in_clp = simde_mm_subs_epi16(in, simde_mm_cmpeq_epi16(in, i16_min));//if in=SHRT_MIN in+1, else in
+    num0 = simde_mm_add_ps(num0, simde_mm_cvtepi32_ps(simde_mm_madd_epi16(in_clp, in_clp)));
+    num1 = simde_mm_add_ps(num1, simde_mm_cvtepi32_ps(simde_mm_madd_epi16(in, coe1)));//DC
     input += 4;
   }
   //Ave
-  num2 = _mm_dp_ps(num0, recp1, 0xFF);//AC power
-  num3 = _mm_dp_ps(num1, recp1, 0xFF);//DC
-  num3 = _mm_mul_ps(num3, num3);      //DC power
+  num2 = simde_mm_dp_ps(num0, recp1, 0xFF);//AC power
+  num3 = simde_mm_dp_ps(num1, recp1, 0xFF);//DC
+  num3 = simde_mm_mul_ps(num3, num3);      //DC power
   //remove DC
-  temp = _mm_cvtsi128_si32(_mm_cvttps_epi32(_mm_sub_ps(num2, num3)));
+  temp = simde_mm_cvtsi128_si32(simde_mm_cvttps_epi32(simde_mm_sub_ps(num2, num3)));
 
   return temp;
 }
@@ -108,43 +71,43 @@ int32_t signal_energy_amp_shift(int32_t *input,uint32_t length)
 
   int32_t i;
   int32_t temp,temp2;
-  register __m64 mm0,mm1,mm2,mm3;
-  __m64 *in = (__m64 *)input;
+  register simde__m64 mm0,mm1,mm2,mm3;
+  simde__m64 *in = (simde__m64 *)input;
 
-  mm0 = _mm_setzero_si64();
-  mm3 = _mm_setzero_si64();
+  mm0 = simde_mm_setzero_si64();
+  mm3 = simde_mm_setzero_si64();
 
   for (i=0; i<length>>1; i++) {
 
     mm1 = in[i];
     mm2 = mm1;
-    mm1 = _m_pmaddwd(mm1,mm1);
-    mm1 = _m_psradi(mm1,AMP_SHIFT);// shift any 32 bits blocs of the word by the value shift_p9
-    mm0 = _m_paddd(mm0,mm1);// add the two 64 bits words 4 bytes by 4 bytes
-    mm3 = _m_paddw(mm3,mm2);// add the two 64 bits words 2 bytes by 2 bytes
+    mm1 = simde_m_pmaddwd(mm1,mm1);
+    mm1 = simde_m_psradi(mm1,AMP_SHIFT);// shift any 32 bits blocs of the word by the value shift_p9
+    mm0 = simde_m_paddd(mm0,mm1);// add the two 64 bits words 4 bytes by 4 bytes
+    mm3 = simde_m_paddw(mm3,mm2);// add the two 64 bits words 2 bytes by 2 bytes
   }
 
   mm1 = mm0;
-  mm0 = _m_psrlqi(mm0,32);
-  mm0 = _m_paddd(mm0,mm1);
-  temp = _m_to_int(mm0);
+  mm0 = simde_m_psrlqi(mm0,32);
+  mm0 = simde_m_paddd(mm0,mm1);
+  temp = simde_m_to_int(mm0);
   temp/=length; // this is the average of x^2
 
 
   // now remove the DC component
 
 
-  mm2 = _m_psrlqi(mm3,32);
-  mm2 = _m_paddw(mm2,mm3);
-  mm2 = _m_pmaddwd(mm2,mm2);
-  mm2 = _m_psradi(mm2,AMP_SHIFT); // fixed point representation of elements
-  temp2 = _m_to_int(mm2);
+  mm2 = simde_m_psrlqi(mm3,32);
+  mm2 = simde_m_paddw(mm2,mm3);
+  mm2 = simde_m_pmaddwd(mm2,mm2);
+  mm2 = simde_m_psradi(mm2,AMP_SHIFT); // fixed point representation of elements
+  temp2 = simde_m_to_int(mm2);
   temp2/=(length*length);
 
   temp -= temp2;
 
-  _mm_empty();
-  _m_empty();
+  simde_mm_empty();
+  simde_m_empty();
 
   return((temp>0)?temp:1);
 }
@@ -154,15 +117,15 @@ int32_t signal_energy_nodc(int32_t *input,uint32_t length)
   int32_t i;
   int32_t temp;
 
-  __m128i in;
-  __m128  mm0;
+  simde__m128i in;
+  simde__m128  mm0;
 
 //init
-  mm0 = _mm_setzero_ps();
+  mm0 = simde_mm_setzero_ps();
 //Acc
   for (i=0; i<(length>>2); i++) {
-    in = _mm_loadu_si128((__m128i *)input);
-    mm0 = _mm_add_ps(mm0,_mm_cvtepi32_ps(_mm_madd_epi16(in,in)));
+    in = simde_mm_loadu_si128((simde__m128i *)input);
+    mm0 = simde_mm_add_ps(mm0,simde_mm_cvtepi32_ps(simde_mm_madd_epi16(in,in)));
     input += 4;
   }
   //Ave
@@ -173,82 +136,6 @@ int32_t signal_energy_nodc(int32_t *input,uint32_t length)
 
   return temp;
 }
-
-#elif defined(__arm__) || defined(__aarch64__)
-
-int32_t signal_energy(int32_t *input,uint32_t length)
-{
-
-  int32_t i;
-  int32_t temp,temp2;
-  register int32x4_t tmpE,tmpDC;
-  int32x2_t tmpE2,tmpDC2;
-  int16x4_t *in = (int16x4_t *)input;
-
-  tmpE  = vdupq_n_s32(0);
-  tmpDC = vdupq_n_s32(0);
-
-  for (i=0; i<length>>1; i++) {
-
-    tmpE = vqaddq_s32(tmpE,vshrq_n_s32(vmull_s16(*in,*in),shift));
-    //tmpDC = vaddw_s16(tmpDC,vshr_n_s16(*in++,shift_DC));
-
-  }
-
-  tmpE2 = vpadd_s32(vget_low_s32(tmpE),vget_high_s32(tmpE));
-
-  temp=(vget_lane_s32(tmpE2,0)+vget_lane_s32(tmpE2,1))/length;
-  temp<<=shift;   // this is the average of x^2
-
-  // now remove the DC component
-
-
-  tmpDC2 = vpadd_s32(vget_low_s32(tmpDC),vget_high_s32(tmpDC));
-
-  temp2=(vget_lane_s32(tmpDC2,0)+vget_lane_s32(tmpDC2,1))/(length*length);
-
-  //  temp2<<=(2*shift_DC);
-#ifdef MAIN
-  printf("E x^2 = %d\n",temp);
-#endif
-  temp -= temp2;
-#ifdef MAIN
-  printf("(E x)^2=%d\n",temp2);
-#endif
-
-  return((temp>0)?temp:1);
-}
-
-int32_t signal_energy_nodc(int32_t *input,uint32_t length)
-{
-
-  int32_t i;
-  int32_t temp;
-  register int32x4_t tmpE;
-  int32x2_t tmpE2;
-  int16x4_t *in = (int16x4_t *)input;
-
-  tmpE = vdupq_n_s32(0);
-
-  for (i=0; i<length>>1; i++) {
-
-    tmpE = vqaddq_s32(tmpE,vshrq_n_s32(vmull_s16(*in,*in),shift));
-
-  }
-
-  tmpE2 = vpadd_s32(vget_low_s32(tmpE),vget_high_s32(tmpE));
-
-  temp=(vget_lane_s32(tmpE2,0)+vget_lane_s32(tmpE2,1))/length;
-  temp<<=shift;   // this is the average of x^2
-
-#ifdef MAIN
-  printf("E x^2 = %d\n",temp);
-#endif
-
-  return((temp>0)?temp:1);
-}
-
-#endif
 double signal_energy_fp(double *s_re[2],double *s_im[2],uint32_t nb_antennas,uint32_t length,uint32_t offset)
 {
 
@@ -257,7 +144,7 @@ double signal_energy_fp(double *s_re[2],double *s_im[2],uint32_t nb_antennas,uin
 
   for (i=0; i<length; i++) {
     for (aa=0; aa<nb_antennas; aa++) {
-      V= V + (s_re[aa][i+offset]*s_re[aa][i+offset]) + (s_im[aa][i+offset]*s_im[aa][i+offset]);
+     V= V + (s_re[aa][i+offset]*s_re[aa][i+offset]) + (s_im[aa][i+offset]*s_im[aa][i+offset]);
     }
   }
 
@@ -271,14 +158,13 @@ double signal_energy_fp2(struct complexd *s,uint32_t length)
   double V=0.0;
 
   for (i=0; i<length; i++) {
-    //    printf("signal_energy_fp2 : %f,%f => %f\n",s[i].x,s[i].y,V);
-    //      V= V + (s[i].y*s[i].x) + (s[i].y*s[i].x);
+		          //    printf("signal_energy_fp2 : %f,%f => %f\n",s[i].x,s[i].y,V);
+		  //        //      V= V + (s[i].y*s[i].x) + (s[i].y*s[i].x);
     V= V + (s[i].r*s[i].r) + (s[i].i*s[i].i);
   }
-
   return(V/length);
 }
-
+			  //
 #ifdef MAIN
 #define LENGTH 256
 #include <math.h>
@@ -324,24 +210,24 @@ int32_t signal_power(int32_t *input, uint32_t length)
   uint32_t i;
   int32_t temp;
 
-  __m128i in, in_clp, i16_min;
-  __m128  num0, num1;
-  __m128  recp1;
+  simde__m128i in, in_clp, i16_min;
+  simde__m128  num0, num1;
+  simde__m128  recp1;
 
   //init
-  num0 = _mm_setzero_ps();
-  i16_min = _mm_set1_epi16(SHRT_MIN);
-  recp1 = _mm_rcp_ps(_mm_cvtepi32_ps(_mm_set1_epi32(length)));
+  num0 = simde_mm_setzero_ps();
+  i16_min = simde_mm_set1_epi16(SHRT_MIN);
+  recp1 = simde_mm_rcp_ps(simde_mm_cvtepi32_ps(simde_mm_set1_epi32(length)));
   //Acc
   for (i = 0; i < (length >> 2); i++) {
-    in = _mm_loadu_si128((__m128i *)input);
-    in_clp = _mm_subs_epi16(in, _mm_cmpeq_epi16(in, i16_min));//if in=SHRT_MIN in+1, else in
-    num0 = _mm_add_ps(num0, _mm_cvtepi32_ps(_mm_madd_epi16(in_clp, in_clp)));
+    in = simde_mm_loadu_si128((simde__m128i *)input);
+    in_clp = simde_mm_subs_epi16(in, simde_mm_cmpeq_epi16(in, i16_min));//if in=SHRT_MIN in+1, else in
+    num0 = simde_mm_add_ps(num0, simde_mm_cvtepi32_ps(simde_mm_madd_epi16(in_clp, in_clp)));
     input += 4;
   }
   //Ave
-  num1 = _mm_dp_ps(num0, recp1, 0xFF);
-  temp = _mm_cvtsi128_si32(_mm_cvttps_epi32(num1));
+  num1 = simde_mm_dp_ps(num0, recp1, 0xFF);
+  temp = simde_mm_cvtsi128_si32(simde_mm_cvttps_epi32(num1));
 
   return temp;
 }
@@ -352,30 +238,30 @@ int32_t interference_power(int32_t *input, uint32_t length)
   uint32_t i;
   int32_t temp;
 
-  __m128i in, in_clp, i16_min;
-  __m128i num0, num1, num2, num3;
-  __m128  num4, num5, num6;
-  __m128  recp1;
+  simde__m128i in, in_clp, i16_min;
+  simde__m128i num0, num1, num2, num3;
+  simde__m128  num4, num5, num6;
+  simde__m128  recp1;
 
   //init
-  i16_min = _mm_set1_epi16(SHRT_MIN);
-  num5 = _mm_setzero_ps();
-  recp1 = _mm_rcp_ps(_mm_cvtepi32_ps(_mm_set1_epi32(length>>2)));// 1/n, n= length/4
+  i16_min = simde_mm_set1_epi16(SHRT_MIN);
+  num5 = simde_mm_setzero_ps();
+  recp1 = simde_mm_rcp_ps(simde_mm_cvtepi32_ps(simde_mm_set1_epi32(length>>2)));// 1/n, n= length/4
   //Acc
   for (i = 0; i < (length >> 2); i++) {
-    in = _mm_loadu_si128((__m128i *)input);
-    in_clp = _mm_subs_epi16(in, _mm_cmpeq_epi16(in, i16_min));           //if in=SHRT_MIN, in+1, else in
-    num0 = _mm_cvtepi16_epi32(in_clp);                                   //lower 2 complex [0], [1]
-    num1 = _mm_cvtepi16_epi32(_mm_shuffle_epi32(in_clp, 0x4E));          //upper 2 complex [2], [3]
-    num2 = _mm_srai_epi32(_mm_add_epi32(num0, num1), 0x01);              //average A=complex( [0] + [2] ) / 2, B=complex( [1] + [3] ) / 2 
-    num3 = _mm_sub_epi32(num2, _mm_shuffle_epi32(num2, 0x4E));           //complexA-complexB, B-A
-    num4 = _mm_dp_ps(_mm_cvtepi32_ps(num3), _mm_cvtepi32_ps(num3), 0x3F);//C = num3 lower complex power, C, C, C
-    num5 = _mm_add_ps(num5, num4);                                       //Acc Cn, Cn, Cn, Cn, 
+    in = simde_mm_loadu_si128((simde__m128i *)input);
+    in_clp = simde_mm_subs_epi16(in, simde_mm_cmpeq_epi16(in, i16_min));           //if in=SHRT_MIN, in+1, else in
+    num0 = simde_mm_cvtepi16_epi32(in_clp);                                   //lower 2 complex [0], [1]
+    num1 = simde_mm_cvtepi16_epi32(simde_mm_shuffle_epi32(in_clp, 0x4E));          //upper 2 complex [2], [3]
+    num2 = simde_mm_srai_epi32(simde_mm_add_epi32(num0, num1), 0x01);              //average A=complex( [0] + [2] ) / 2, B=complex( [1] + [3] ) / 2 
+    num3 = simde_mm_sub_epi32(num2, simde_mm_shuffle_epi32(num2, 0x4E));           //complexA-complexB, B-A
+    num4 = simde_mm_dp_ps(simde_mm_cvtepi32_ps(num3), simde_mm_cvtepi32_ps(num3), 0x3F);//C = num3 lower complex power, C, C, C
+    num5 = simde_mm_add_ps(num5, num4);                                       //Acc Cn, Cn, Cn, Cn, 
     input += 4;
   }
   //Interference ve
-  num6 = _mm_mul_ps(num5, recp1); //Cn / n
-  temp = _mm_cvtsi128_si32(_mm_cvttps_epi32(num6));
+  num6 = simde_mm_mul_ps(num5, recp1); //Cn / n
+  temp = simde_mm_cvtsi128_si32(simde_mm_cvttps_epi32(num6));
 
   return temp;
 }
diff --git a/openair1/PHY/TOOLS/simde_operations.c b/openair1/PHY/TOOLS/simde_operations.c
index a61956d87b2ca03d5bb0fcf06217d68e0c6ef895..03a611e08dd69f212e0fe34e9a5f3385be83f028 100644
--- a/openair1/PHY/TOOLS/simde_operations.c
+++ b/openair1/PHY/TOOLS/simde_operations.c
@@ -21,7 +21,7 @@
 
 #include <simde/x86/avx2.h>
 
-void simde_mm128_separate_real_imag_parts(__m128i *out_re, __m128i *out_im, __m128i in0, __m128i in1)
+void simde_mm128_separate_real_imag_parts(simde__m128i *out_re, simde__m128i *out_im, simde__m128i in0, simde__m128i in1)
 {
   // Put in0 = [Re(0,1) Re(2,3) Im(0,1) Im(2,3)]
   in0 = simde_mm_shufflelo_epi16(in0, 0xd8); //_MM_SHUFFLE(0,2,1,3));
@@ -37,7 +37,7 @@ void simde_mm128_separate_real_imag_parts(__m128i *out_re, __m128i *out_im, __m1
   *out_im = simde_mm_unpackhi_epi64(in0, in1);
 }
 
-void simde_mm256_separate_real_imag_parts(__m256i *out_re, __m256i *out_im, __m256i in0, __m256i in1)
+void simde_mm256_separate_real_imag_parts(simde__m256i *out_re, simde__m256i *out_im, simde__m256i in0, simde__m256i in1)
 {
   // Put in0 = [Re(0,1,2,3)   Im(0,1,2,3)   Re(4,5,6,7)     Im(4,5,6,7)]
   in0 = simde_mm256_shufflelo_epi16(in0, 0xd8); //_MM_SHUFFLE(0,2,1,3));
@@ -50,10 +50,10 @@ void simde_mm256_separate_real_imag_parts(__m256i *out_re, __m256i *out_im, __m2
   in1 = simde_mm256_shuffle_epi32(in1, 0xd8);   //_MM_SHUFFLE(0,2,1,3));
 
   // Put tmp0 =[Re(0,1,2,3) Re(8,9,10,11) Re(4,5,6,7) Re(12,13,14,15)]
-  __m256i tmp0 = simde_mm256_unpacklo_epi64(in0, in1);
+  simde__m256i tmp0 = simde_mm256_unpacklo_epi64(in0, in1);
 
   // Put tmp1 = [Im(0,1,2,3) Im(8,9,10,11) Im(4,5,6,7) Im(12,13,14,15)]
-  __m256i tmp1 = simde_mm256_unpackhi_epi64(in0, in1);
+  simde__m256i tmp1 = simde_mm256_unpackhi_epi64(in0, in1);
 
   *out_re = simde_mm256_permute4x64_epi64(tmp0, 0xd8);
   *out_im = simde_mm256_permute4x64_epi64(tmp1, 0xd8);
diff --git a/openair1/PHY/TOOLS/tools_defs.h b/openair1/PHY/TOOLS/tools_defs.h
index 5300ebd622fe1ef0413b5a7b5474bcc576f3eb50..4f4af5f7cb4efab69850e9a04b217061c2b86e4b 100644
--- a/openair1/PHY/TOOLS/tools_defs.h
+++ b/openair1/PHY/TOOLS/tools_defs.h
@@ -35,28 +35,18 @@
 #include "PHY/sse_intrin.h"
 #include "common/utils/assertions.h"
 #include "common/utils/utils.h"
-
-#if defined(__x86_64__) || defined(__i386__)
-#define simd_q15_t __m128i
-#define simdshort_q15_t __m64
-#define shiftright_int16(a,shift) _mm_srai_epi16(a,shift)
-#define set1_int16(a) _mm_set1_epi16(a)
-#define mulhi_int16(a,b) _mm_mulhrs_epi16 (a,b)
-#define mulhi_s1_int16(a,b) _mm_slli_epi16(_mm_mulhi_epi16(a,b),2)
-#define adds_int16(a,b) _mm_adds_epi16(a,b)
-#define mullo_int16(a,b) _mm_mullo_epi16(a,b)
-#elif defined(__arm__) || defined(__aarch64__)
-#define simd_q15_t int16x8_t
-#define simdshort_q15_t int16x4_t
-#define shiftright_int16(a,shift) vshrq_n_s16(a,shift)
-#define set1_int16(a) vdupq_n_s16(a)
-#define mulhi_int16(a,b) vqdmulhq_s16(a,b)
-#define mulhi_s1_int16(a,b) vshlq_n_s16(vqdmulhq_s16(a,b),1)
-#define adds_int16(a,b) vqaddq_s16(a,b)
-#define mullo_int16(a,b) vmulq_s16(a,b)
-#define _mm_empty()
-#define _m_empty()
-#endif
+#include <simde/simde-common.h>
+#include <simde/x86/sse.h>
+#include <simde/x86/avx2.h>
+
+#define simd_q15_t simde__m128i
+#define simdshort_q15_t simde__m64
+#define shiftright_int16(a,shift) simde_mm_srai_epi16(a,shift)
+#define set1_int16(a) simde_mm_set1_epi16(a)
+#define mulhi_int16(a,b) simde_mm_mulhrs_epi16 (a,b)
+#define mulhi_s1_int16(a,b) simde_mm_slli_epi16(simde_mm_mulhi_epi16(a,b),2)
+#define adds_int16(a,b) simde_mm_adds_epi16(a,b)
+#define mullo_int16(a,b) simde_mm_mullo_epi16(a,b)
 
 #ifdef __cplusplus
 extern "C" {
@@ -251,10 +241,10 @@ extern "C" {
   //   y.i += (x * alpha.i) >> 14
   // See regular C implementation at the end
   static __attribute__((always_inline)) inline void c16multaddVectRealComplex(const int16_t *x,
-                                                                       const c16_t *alpha,
-                                                                       c16_t *y,
-                                                                       const int N) {
-#if defined(__x86_64__) || defined(__i386__)
+                                                                              const c16_t *alpha,
+                                                                              c16_t *y,
+                                                                              const int N)
+  {
     // Default implementation for x86
     const int8_t makePairs[32] __attribute__((aligned(32)))={
       0,1,0+16,1+16,
@@ -266,66 +256,22 @@ extern "C" {
       12,13,12+16,13+16,
       14,15,14+16,15+16};
     
-    __m256i alpha256= simde_mm256_set1_epi32(*(int32_t *)alpha);
-    __m128i *x128=(__m128i *)x;
-    __m128i *y128=(__m128i *)y;
+    simde__m256i alpha256= simde_mm256_set1_epi32(*(int32_t *)alpha);
+    simde__m128i *x128=(simde__m128i *)x;
+    simde__m128i *y128=(simde__m128i *)y;
     AssertFatal(N%8==0,"Not implemented\n");
     for (int i=0; i<N/8; i++) {
-      const __m256i xduplicate=simde_mm256_broadcastsi128_si256(*x128);
-      const __m256i x_duplicate_ordered=simde_mm256_shuffle_epi8(xduplicate,*(__m256i*)makePairs);
-      const __m256i x_mul_alpha_shift15 =simde_mm256_mulhrs_epi16(alpha256, x_duplicate_ordered);
+      const simde__m256i xduplicate=simde_mm256_broadcastsi128_si256(*x128);
+      const simde__m256i x_duplicate_ordered=simde_mm256_shuffle_epi8(xduplicate,*(simde__m256i*)makePairs);
+      const simde__m256i x_mul_alpha_shift15 =simde_mm256_mulhrs_epi16(alpha256, x_duplicate_ordered);
       // Existing multiplication normalization is weird, constant table in alpha need to be doubled
-      const __m256i x_mul_alpha_x2= simde_mm256_adds_epi16(x_mul_alpha_shift15,x_mul_alpha_shift15);
-      *y128= _mm_adds_epi16(simde_mm256_extracti128_si256(x_mul_alpha_x2,0),*y128);
+      const simde__m256i x_mul_alpha_x2= simde_mm256_adds_epi16(x_mul_alpha_shift15,x_mul_alpha_shift15);
+      *y128= simde_mm_adds_epi16(simde_mm256_extracti128_si256(x_mul_alpha_x2,0),*y128);
       y128++;
-      *y128= _mm_adds_epi16(simde_mm256_extracti128_si256(x_mul_alpha_x2,1),*y128);
+      *y128= simde_mm_adds_epi16(simde_mm256_extracti128_si256(x_mul_alpha_x2,1),*y128);
       y128++;
       x128++;
-    } 
-    
-#elif defined(__arm__) || defined(__aarch64__)
-    // Default implementation for ARM
-    uint32_t i;
-
-    // do 8 multiplications at a time
-    simd_q15_t alpha_r_128,alpha_i_128,yr,yi,*x_128=(simd_q15_t*)x,*y_128=(simd_q15_t*)y;
-    int j;
-
-    //  printf("alpha = %d,%d\n",alpha[0],alpha[1]);
-    alpha_r_128 = set1_int16(alpha->r);
-    alpha_i_128 = set1_int16(alpha->i);
-
-    j=0;
-
-    for (i=0; i<N>>3; i++) {
-
-      yr     = mulhi_s1_int16(alpha_r_128,x_128[i]);
-      yi     = mulhi_s1_int16(alpha_i_128,x_128[i]);
-      int16x8x2_t yint;
-      yint = vzipq_s16(yr,yi);
-      y_128[j]   = adds_int16(y_128[j],yint.val[0]);
-      j++;
-      y_128[j]   = adds_int16(y_128[j],yint.val[1]);
-
-      j++;
     }
-#else
-    // Almost dead code (BMC)
-    for (int i=0; i<N; i++) {
-      int tmpr=y[i].r+((x[i]*alpha->r)>>14);
-      if (tmpr>INT16_MAX)
-        tmpr=INT16_MAX;
-      if (tmpr<INT16_MIN)
-        tmpr=INT16_MIN;
-      int tmpi=y[i].i+((x[i]*alpha->i)>>14);
-      if (tmpi>INT16_MAX)
-        tmpi=INT16_MAX;
-      if (tmpi<INT16_MIN)
-        tmpi=INT16_MIN;
-      y[i].r=(int16_t)tmpr;
-      y[i].i=(int16_t)tmpi;
-    }
-#endif
   }
 //cmult_sv.h
 
@@ -340,10 +286,10 @@ The function implemented is : \f$\mathbf{y} = y + \alpha\mathbf{x}\f$
 */
   void multadd_real_vector_complex_scalar(const int16_t *x, const int16_t *alpha, int16_t *y, uint32_t N);
 
-  __attribute__((always_inline)) inline void multadd_real_four_symbols_vector_complex_scalar(const int16_t *x,
-                                                                                             c16_t *alpha,
-                                                                                             c16_t *y)
-  {
+static __attribute__((always_inline)) inline void multadd_real_four_symbols_vector_complex_scalar(const int16_t *x,
+                                                                                           c16_t *alpha,
+                                                                                           c16_t *y)
+{
     // do 8 multiplications at a time
     const simd_q15_t alpha_r_128 = set1_int16(alpha->r);
     const simd_q15_t alpha_i_128 = set1_int16(alpha->i);
@@ -352,12 +298,12 @@ The function implemented is : \f$\mathbf{y} = y + \alpha\mathbf{x}\f$
     const simd_q15_t yr = mulhi_s1_int16(alpha_r_128, *x_128);
     const simd_q15_t yi = mulhi_s1_int16(alpha_i_128, *x_128);
 
-    simd_q15_t y_128 = _mm_loadu_si128((simd_q15_t *)y);
-    y_128 = _mm_adds_epi16(y_128, _mm_unpacklo_epi16(yr, yi));
-    y_128 = _mm_adds_epi16(y_128, _mm_unpackhi_epi16(yr, yi));
+    simd_q15_t y_128 = simde_mm_loadu_si128((simd_q15_t *)y);
+    y_128 = simde_mm_adds_epi16(y_128, simde_mm_unpacklo_epi16(yr, yi));
+    y_128 = simde_mm_adds_epi16(y_128, simde_mm_unpackhi_epi16(yr, yi));
 
-    _mm_storeu_si128((simd_q15_t *)y, y_128);
-  }
+    simde_mm_storeu_si128((simd_q15_t *)y, y_128);
+}
 
 /*!\fn void multadd_complex_vector_real_scalar(int16_t *x,int16_t alpha,int16_t *y,uint8_t zero_flag,uint32_t N)
 This function performs componentwise multiplication and accumulation of a real scalar and a complex vector.
@@ -842,8 +788,8 @@ c32_t dot_product(const c16_t *x,
 
 double interp(double x, double *xs, double *ys, int count);
 
-void simde_mm128_separate_real_imag_parts(__m128i *out_re, __m128i *out_im, __m128i in0, __m128i in1);
-void simde_mm256_separate_real_imag_parts(__m256i *out_re, __m256i *out_im, __m256i in0, __m256i in1);
+void simde_mm128_separate_real_imag_parts(simde__m128i *out_re, simde__m128i *out_im, simde__m128i in0, simde__m128i in1);
+void simde_mm256_separate_real_imag_parts(simde__m256i *out_re, simde__m256i *out_im, simde__m256i in0, simde__m256i in1);
 
 #ifdef __cplusplus
 }
diff --git a/openair1/PHY/sse_intrin.h b/openair1/PHY/sse_intrin.h
index fc6836fd30faeaa985cb025722b41cf1ce4f0f36..655c38982e69067b50a61a4e91eebd84b59fa774 100644
--- a/openair1/PHY/sse_intrin.h
+++ b/openair1/PHY/sse_intrin.h
@@ -49,10 +49,6 @@
 #define SSE_INTRIN_H
 
 
-#if defined(__x86_64) || defined(__i386__)
-
-/* x86 processors */
-
 #include <simde/x86/mmx.h>
 #include <simde/x86/sse.h>
 #include <simde/x86/sse2.h>
@@ -62,63 +58,54 @@
 #include <simde/x86/sse4.2.h>
 #include <simde/x86/avx2.h>
 #include <simde/x86/fma.h>
+#if defined(__x86_64) || defined(__i386__)
+
+/* x86 processors */
 
 #if defined(__AVX512BW__) || defined(__AVX512F__)
 #include <immintrin.h>
 #endif
-
 #elif defined(__arm__) || defined(__aarch64__)
 
 /* ARM processors */
+// note this fails on some x86 machines, with an error like:
+// /usr/lib/gcc/x86_64-redhat-linux/8/include/gfniintrin.h:57:1: error: inlining failed in call to always_inline ‘_mm_gf2p8affine_epi64_epi8’: target specific option mismatch
+#include <simde/x86/clmul.h>
 
 #include <simde/arm/neon.h>
-
 #endif // x86_64 || i386
+#include <stdbool.h>
+#include "assertions.h"
 
 /*
  * OAI specific
  */
 
-#if defined(__x86_64__) || defined(__i386__)
-  #define vect128 __m128i
-#elif defined(__arm__) || defined(__aarch64__)
-  #define vect128 int16x8_t
-#endif
-
 static const short minusConjug128[8]__attribute__((aligned(16))) = {-1,1,-1,1,-1,1,-1,1};
-static inline vect128 mulByConjugate128(vect128 *a, vect128 *b, int8_t output_shift) {
-
-#if defined(__x86_64__) || defined(__i386__)
-  vect128 realPart = _mm_madd_epi16(*a,*b);
-  realPart = _mm_srai_epi32(realPart,output_shift);
-  vect128 imagPart = _mm_shufflelo_epi16(*b,_MM_SHUFFLE(2,3,0,1));
-  imagPart = _mm_shufflehi_epi16(imagPart,_MM_SHUFFLE(2,3,0,1));
-  imagPart = _mm_sign_epi16(imagPart,*(vect128 *)minusConjug128);
-  imagPart = _mm_madd_epi16(imagPart,*a);
-  imagPart = _mm_srai_epi32(imagPart,output_shift);
-  vect128 lowPart = _mm_unpacklo_epi32(realPart,imagPart);
-  vect128 highPart = _mm_unpackhi_epi32(realPart,imagPart);
-  return ( _mm_packs_epi32(lowPart,highPart));
-#elif defined(__arm__) || defined(__aarch64__)
-  AssertFatal(false, "not developped\n");
-#endif
+static inline simde__m128i mulByConjugate128(simde__m128i *a, simde__m128i *b, int8_t output_shift) {
+
+  simde__m128i realPart = simde_mm_madd_epi16(*a,*b);
+  realPart = simde_mm_srai_epi32(realPart,output_shift);
+  simde__m128i imagPart = simde_mm_shufflelo_epi16(*b, SIMDE_MM_SHUFFLE(2,3,0,1));
+  imagPart = simde_mm_shufflehi_epi16(imagPart, SIMDE_MM_SHUFFLE(2,3,0,1));
+  imagPart = simde_mm_sign_epi16(imagPart,*(simde__m128i *)minusConjug128);
+  imagPart = simde_mm_madd_epi16(imagPart,*a);
+  imagPart = simde_mm_srai_epi32(imagPart,output_shift);
+  simde__m128i lowPart = simde_mm_unpacklo_epi32(realPart,imagPart);
+  simde__m128i highPart = simde_mm_unpackhi_epi32(realPart,imagPart);
+  return ( simde_mm_packs_epi32(lowPart,highPart));
 }
 
-#if defined(__x86_64__) || defined(__i386__)
 #define displaySamples128(vect)  {\
-    __m128i x=vect;                                       \
+    simde__m128i x=vect;                                       \
     printf("vector: %s = (%hd,%hd) (%hd,%hd) (%hd,%hd) (%hd,%hd)\n", #vect, \
-           _mm_extract_epi16(x,0),                                  \
-           _mm_extract_epi16(x,1),\
-           _mm_extract_epi16(x,2),\
-           _mm_extract_epi16(x,3),\
-           _mm_extract_epi16(x,4),\
-           _mm_extract_epi16(x,5),\
-           _mm_extract_epi16(x,6),\
-           _mm_extract_epi16(x,7));\
+           simde_mm_extract_epi16(x,0),                                  \
+           simde_mm_extract_epi16(x,1),\
+           simde_mm_extract_epi16(x,2),\
+           simde_mm_extract_epi16(x,3),\
+           simde_mm_extract_epi16(x,4),\
+           simde_mm_extract_epi16(x,5),\
+           simde_mm_extract_epi16(x,6),\
+           simde_mm_extract_epi16(x,7));\
   }
-#elif defined(__arm__) || defined(__aarch64__)
-  displaySamples128(vect) {}
-//TBD
-#endif
 #endif // SSE_INTRIN_H
diff --git a/openair1/SIMULATION/LTE_PHY/dlsim.c b/openair1/SIMULATION/LTE_PHY/dlsim.c
index 592565bb160b03df4dd1180536e344c50fc73ffd..c5f9659d7c513280bf23c4ab26cf8978cac9d25b 100644
--- a/openair1/SIMULATION/LTE_PHY/dlsim.c
+++ b/openair1/SIMULATION/LTE_PHY/dlsim.c
@@ -588,24 +588,7 @@ int main(int argc, char **argv) {
   nfapi_tx_request_t TX_req;
   Sched_Rsp_t sched_resp;
   int pa=dB0;
-#if defined(__arm__) || defined(__aarch64__)
-  FILE    *proc_fd = NULL;
-  char buf[64];
-  memset(buf,0,sizeof(buf));
-  proc_fd = fopen("/sys/devices/system/cpu/cpu4/cpufreq/cpuinfo_cur_freq", "r");
-
-  if(!proc_fd)
-    printf("cannot open /sys/devices/system/cpu/cpu4/cpufreq/cpuinfo_cur_freq");
-  else {
-    while(fgets(buf, 63, proc_fd))
-      printf("%s", buf);
-  }
-
-  fclose(proc_fd);
-  cpu_freq_GHz = ((double)atof(buf))/1e6;
-#else
   cpu_freq_GHz = get_cpu_freq_GHz();
-#endif
   printf("Detected cpu_freq %f GHz\n",cpu_freq_GHz);
   memset((void *)&sched_resp,0,sizeof(sched_resp));
   sched_resp.DL_req = &DL_req;
diff --git a/openair1/SIMULATION/NR_PHY/dlsim.c b/openair1/SIMULATION/NR_PHY/dlsim.c
index c436fc5a27d5788606f983258ee32cbd329bf50e..47ac6a4c228fde116f66daf1461e59cffbc66a2c 100644
--- a/openair1/SIMULATION/NR_PHY/dlsim.c
+++ b/openair1/SIMULATION/NR_PHY/dlsim.c
@@ -276,7 +276,7 @@ int NB_UE_INST = 1;
 int main(int argc, char **argv)
 {
   setbuf(stdout, NULL);
-  char c;
+  int c;
   int i,aa;//,l;
   double sigma2, sigma2_dB=10, SNR, snr0=-2.0, snr1=2.0;
   uint8_t snr1set=0;
diff --git a/openair1/SIMULATION/NR_PHY/ulsim.c b/openair1/SIMULATION/NR_PHY/ulsim.c
index bd30fa1267924e602729fe10a7fd02c7c9ec2c88..f307884d452bc9713b8625cbdd128d5e1cb284e2 100644
--- a/openair1/SIMULATION/NR_PHY/ulsim.c
+++ b/openair1/SIMULATION/NR_PHY/ulsim.c
@@ -151,10 +151,10 @@ openair0_config_t openair0_cfg[MAX_CARDS];
 channel_desc_t *UE2gNB[NUMBER_OF_UE_MAX][NUMBER_OF_gNB_MAX];
 int NB_UE_INST = 1;
 
-int main(int argc, char **argv)
+int main(int argc, char *argv[])
 {
 
-  char c;
+  int c;
   int i;
   double SNR, snr0 = -2.0, snr1 = 2.0;
   double sigma, sigma_dB;
@@ -233,7 +233,7 @@ int main(int argc, char **argv)
   randominit(0);
 
   /* initialize the sin-cos table */
-   InitSinLUT();
+  InitSinLUT();
 
   while ((c = getopt(argc, argv, "a:b:c:d:ef:g:h:i:kl:m:n:op:q:r:s:t:u:v:w:y:z:C:F:G:H:I:M:N:PR:S:T:U:L:ZW:E:")) != -1) {
     printf("handling optarg %c\n",c);
diff --git a/openair1/SIMULATION/TOOLS/multipath_channel.c b/openair1/SIMULATION/TOOLS/multipath_channel.c
index 3f92dbbf045b587a3e5f5e8d4d0be863da2bdf7a..55bc8d70b8a9cd49831f7c89deab06e312bf01f1 100644
--- a/openair1/SIMULATION/TOOLS/multipath_channel.c
+++ b/openair1/SIMULATION/TOOLS/multipath_channel.c
@@ -51,12 +51,13 @@ void __attribute__ ((no_sanitize_address)) multipath_channel(channel_desc_t *des
 
   int i,ii,j,l;
   int length1, length2, tail;
-  __m128d rx_tmp128_re_f,rx_tmp128_im_f,rx_tmp128_re,rx_tmp128_im, rx_tmp128_1,rx_tmp128_2,rx_tmp128_3,rx_tmp128_4,tx128_re,tx128_im,ch128_x,ch128_y,pathloss128;
+  simde__m128d rx_tmp128_re_f, rx_tmp128_im_f, rx_tmp128_re, rx_tmp128_im, rx_tmp128_1, rx_tmp128_2, rx_tmp128_3, rx_tmp128_4,
+      tx128_re, tx128_im, ch128_x, ch128_y, pathloss128;
 
   double path_loss = pow(10,desc->path_loss_dB/20);
   int dd = abs(desc->channel_offset);
 
-  pathloss128 = _mm_set1_pd(path_loss);
+  pathloss128 = simde_mm_set1_pd(path_loss);
 
 #ifdef DEBUG_CH
   printf("[CHANNEL] keep = %d : path_loss = %g (%f), nb_rx %d, nb_tx %d, dd %d, len %d \n",keep_channel,path_loss,desc->path_loss_dB,desc->nb_rx,desc->nb_tx,dd,desc->channel_length);
@@ -92,44 +93,44 @@ void __attribute__ ((no_sanitize_address)) multipath_channel(channel_desc_t *des
     for (ii=0; ii<desc->nb_rx; ii++) {
       // rx_tmp.x = 0;
       // rx_tmp.y = 0;
-      rx_tmp128_re_f = _mm_setzero_pd();
-      rx_tmp128_im_f = _mm_setzero_pd();
+      rx_tmp128_re_f = simde_mm_setzero_pd();
+      rx_tmp128_im_f = simde_mm_setzero_pd();
 
       for (j=0; j<desc->nb_tx; j++) {
         for (l = 0; l<(int)desc->channel_length; l++) {
           if ((i>=0) && (i-l)>=0) { //SIMD correct only if length1 > 2*channel_length...which is almost always satisfied
             // tx.x = tx_sig_re[j][i-l];
             // tx.y = tx_sig_im[j][i-l];
-            tx128_re = _mm_loadu_pd(&tx_sig_re[j][2*i-l]); // tx_sig_re[j][i-l+1], tx_sig_re[j][i-l]
-            tx128_im = _mm_loadu_pd(&tx_sig_im[j][2*i-l]);
+            tx128_re = simde_mm_loadu_pd(&tx_sig_re[j][2 * i - l]); // tx_sig_re[j][i-l+1], tx_sig_re[j][i-l]
+            tx128_im = simde_mm_loadu_pd(&tx_sig_im[j][2 * i - l]);
           } else {
             //tx.x =0;
             //tx.y =0;
-            tx128_re = _mm_setzero_pd();
-            tx128_im = _mm_setzero_pd();
+            tx128_re = simde_mm_setzero_pd();
+            tx128_im = simde_mm_setzero_pd();
           }
 
-          ch128_x = _mm_set1_pd(desc->ch[ii+(j*desc->nb_rx)][l].x);
-          ch128_y = _mm_set1_pd(desc->ch[ii+(j*desc->nb_rx)][l].y);
+          ch128_x = simde_mm_set1_pd(desc->ch[ii + (j * desc->nb_rx)][l].x);
+          ch128_y = simde_mm_set1_pd(desc->ch[ii + (j * desc->nb_rx)][l].y);
           //  rx_tmp.x += (tx.x * desc->ch[ii+(j*desc->nb_rx)][l].x) - (tx.y * desc->ch[ii+(j*desc->nb_rx)][l].y);
           //  rx_tmp.y += (tx.y * desc->ch[ii+(j*desc->nb_rx)][l].x) + (tx.x * desc->ch[ii+(j*desc->nb_rx)][l].y);
-          rx_tmp128_1 = _mm_mul_pd(tx128_re,ch128_x);
-          rx_tmp128_2 = _mm_mul_pd(tx128_re,ch128_y);
-          rx_tmp128_3 = _mm_mul_pd(tx128_im,ch128_x);
-          rx_tmp128_4 = _mm_mul_pd(tx128_im,ch128_y);
-          rx_tmp128_re = _mm_sub_pd(rx_tmp128_1,rx_tmp128_4);
-          rx_tmp128_im = _mm_add_pd(rx_tmp128_2,rx_tmp128_3);
-          rx_tmp128_re_f = _mm_add_pd(rx_tmp128_re_f,rx_tmp128_re);
-          rx_tmp128_im_f = _mm_add_pd(rx_tmp128_im_f,rx_tmp128_im);
+          rx_tmp128_1 = simde_mm_mul_pd(tx128_re, ch128_x);
+          rx_tmp128_2 = simde_mm_mul_pd(tx128_re, ch128_y);
+          rx_tmp128_3 = simde_mm_mul_pd(tx128_im, ch128_x);
+          rx_tmp128_4 = simde_mm_mul_pd(tx128_im, ch128_y);
+          rx_tmp128_re = simde_mm_sub_pd(rx_tmp128_1, rx_tmp128_4);
+          rx_tmp128_im = simde_mm_add_pd(rx_tmp128_2, rx_tmp128_3);
+          rx_tmp128_re_f = simde_mm_add_pd(rx_tmp128_re_f, rx_tmp128_re);
+          rx_tmp128_im_f = simde_mm_add_pd(rx_tmp128_im_f, rx_tmp128_im);
         } //l
       }  // j
 
       //rx_sig_re[ii][i+dd] = rx_tmp.x*path_loss;
       //rx_sig_im[ii][i+dd] = rx_tmp.y*path_loss;
-      rx_tmp128_re_f = _mm_mul_pd(rx_tmp128_re_f,pathloss128);
-      rx_tmp128_im_f = _mm_mul_pd(rx_tmp128_im_f,pathloss128);
-      _mm_storeu_pd(&rx_sig_re[ii][2*i+dd],rx_tmp128_re_f); // max index: length-dd -1 + dd = length -1
-      _mm_storeu_pd(&rx_sig_im[ii][2*i+dd],rx_tmp128_im_f);
+      rx_tmp128_re_f = simde_mm_mul_pd(rx_tmp128_re_f, pathloss128);
+      rx_tmp128_im_f = simde_mm_mul_pd(rx_tmp128_im_f, pathloss128);
+      simde_mm_storeu_pd(&rx_sig_re[ii][2 * i + dd], rx_tmp128_re_f); // max index: length-dd -1 + dd = length -1
+      simde_mm_storeu_pd(&rx_sig_im[ii][2 * i + dd], rx_tmp128_im_f);
       /*
       if ((ii==0)&&((i%32)==0)) {
       printf("%p %p %f,%f => %e,%e\n",rx_sig_re[ii],rx_sig_im[ii],rx_tmp.x,rx_tmp.y,rx_sig_re[ii][i-dd],rx_sig_im[ii][i-dd]);
diff --git a/openair1/SIMULATION/TOOLS/random_channel.c b/openair1/SIMULATION/TOOLS/random_channel.c
index f360adad32dc822c12292eda32799bfced1fbfc0..75dcd5707d16555bc611aeedae187955e36b052d 100644
--- a/openair1/SIMULATION/TOOLS/random_channel.c
+++ b/openair1/SIMULATION/TOOLS/random_channel.c
@@ -42,7 +42,7 @@
 
 #include "assertions.h"
 
-extern void print_shorts(char *s,__m128i *x);
+extern void print_shorts(char *s,simde__m128i *x);
 static mapping channelmod_names[] = {
   CHANNELMOD_MAP_INIT
 };
diff --git a/radio/ETHERNET/eth_udp.c b/radio/ETHERNET/eth_udp.c
index fcf34661f5d49bad00714f856867aa03454cadec..82772e89056bd8debf11ba3bfc8de5a1aa7f77de 100644
--- a/radio/ETHERNET/eth_udp.c
+++ b/radio/ETHERNET/eth_udp.c
@@ -321,18 +321,10 @@ void *trx_eth_write_udp_cmd(udpTXelem_t *udpTXelem) {
   if (TS_advance < (nsamps/2)) {
      LOG_W(PHY,"Starting TX FH for TS %llu absslot %llu(%llu) last_rxTS %llu TS_advance %llu samples\n",(unsigned long long)timestamp,(unsigned long long)timestamp/nsamps,((unsigned long long)timestamp/nsamps)%20,(unsigned long long)last_rxTS,(unsigned long long)TS_advance);
   }
-     void *buff2;
-#if defined(__x86_64) || defined(__i386__)
+  void *buff2;
   int nsamps2 = 256>>3;
-  __m256i buff_tx[nsamps2+1];
-  buff2=(void*)&buff_tx[1] - APP_HEADER_SIZE_BYTES;
-#elif defined(__arm__) || defined(__aarch64__)
-  int nsamps2 = 256>>2;
-  int16x8_t buff_tx[nsamps2+2];
-  buff2=(void*)&buff_tx[2] - APP_HEADER_SIZE_BYTES;
-#else
-#error Unsupported CPU architecture, ethernet device cannot be built
-#endif
+  simde__m256i buff_tx[nsamps2 + 1];
+  buff2 = (void *)&buff_tx[1] - APP_HEADER_SIZE_BYTES;
 
   /* construct application header */
   // ECPRI Protocol revision + reserved bits (1 byte)
@@ -360,9 +352,8 @@ void *trx_eth_write_udp_cmd(udpTXelem_t *udpTXelem) {
       LOG_D(PHY,"TS %llu (TS0 %llu) aa %d : offset %d, len %d\n",(unsigned long long)TS,(unsigned long long)fhstate->TS0,aid,offset,len);
       // ECPRI PC_ID (2 bytes)
       *(uint16_t *)(buff2 + 4) = aid;
-      // bring TX data into 12 MSBs 
-#if defined(__x86_64__) || defined(__i386__)
-      __m256i *buff256 = (__m256i *)&(((int32_t*)buff[aid])[offset]);
+      // bring TX data into 12 MSBs
+      simde__m256i *buff256 = (simde__m256i *)&(((int32_t *)buff[aid])[offset]);
       for (int j=0; j<32; j+=8) {
         buff_tx[1+j] = simde_mm256_slli_epi16(buff256[j],4);
         buff_tx[2+j] = simde_mm256_slli_epi16(buff256[j+1],4);
@@ -373,11 +364,7 @@ void *trx_eth_write_udp_cmd(udpTXelem_t *udpTXelem) {
         buff_tx[7+j] = simde_mm256_slli_epi16(buff256[j+6],4);
         buff_tx[8+j] = simde_mm256_slli_epi16(buff256[j+7],4);
       }
-#elif defined(__arm__)
-      int16x8_t *buff128 = (__int16x8_t*)&buff[aid][offset];
-      for (int j=0; j<64; j++) buff_tx[2+j] = vshlq_n_s16(((int16x8_t *)buff128)[j],4);
-#endif
-    
+
        /* Send packet */
       bytes_sent = sendto(eth->sockfdd[0],
                           buff2, 
diff --git a/radio/IRIS/iris_lib.cpp b/radio/IRIS/iris_lib.cpp
index 32a2f51db0211572f873065f7a136786830d6d47..13cdfb6a4469868704996c956f9630c0bd86cd64 100644
--- a/radio/IRIS/iris_lib.cpp
+++ b/radio/IRIS/iris_lib.cpp
@@ -139,20 +139,14 @@ trx_iris_write(openair0_device *device, openair0_timestamp timestamp, void **buf
     int flag = 0;
 
     iris_state_t *s = (iris_state_t *) device->priv;
-    int nsamps2;  // aligned to upper 32 or 16 byte boundary
-#if defined(__x86_64) || defined(__i386__)
+    int nsamps2; // aligned to upper 32 or 16 byte boundary
     nsamps2 = (nsamps+7)>>3;
-    __m256i buff_tx[2][nsamps2];
-#else
-  #error unsupported CPU architecture, iris device cannot be built
-#endif
+    simde__m256i buff_tx[2][nsamps2];
 
     // bring RX data into 12 LSBs for softmodem RX
     for (int i=0; i<cc; i++) {
-      for (int j=0; j<nsamps2; j++) {
-#if defined(__x86_64__) || defined(__i386__)
-        buff_tx[i][j] = simde_mm256_slli_epi16(((__m256i *)buff[i])[j],4);
-#endif
+      for (int j = 0; j < nsamps2; j++) {
+        buff_tx[i][j] = simde_mm256_slli_epi16(((simde__m256i *)buff[i])[j], 4);
       }
     }
 
@@ -228,11 +222,9 @@ static int trx_iris_read(openair0_device *device, openair0_timestamp *ptimestamp
 
     int r;
     int m = s->rx_num_channels;
-    int nsamps2;  // aligned to upper 32 or 16 byte boundary
-#if defined(__x86_64) || defined(__i386__)
+    int nsamps2; // aligned to upper 32 or 16 byte boundary
     nsamps2 = (nsamps+7)>>3;
-    __m256i buff_tmp[2][nsamps2];
-#endif
+    simde__m256i buff_tmp[2][nsamps2];
 
     for (r = 0; r < s->device_num; r++) {
         flags = 0;
@@ -301,10 +293,8 @@ static int trx_iris_read(openair0_device *device, openair0_timestamp *ptimestamp
 
         // bring RX data into 12 LSBs for softmodem RX
         for (int i=0; i<cc; i++) {
-          for (int j=0; j<nsamps2; j++) {
-#if defined(__x86_64__) || defined(__i386__)
-            ((__m256i *)buff[i])[j] = simde_mm256_srai_epi16(buff_tmp[i][j],4);
-#endif
+          for (int j = 0; j < nsamps2; j++) {
+            ((simde__m256i *)buff[i])[j] = simde_mm256_srai_epi16(buff_tmp[i][j], 4);
           }
         }
     }
diff --git a/radio/USRP/usrp_lib.cpp b/radio/USRP/usrp_lib.cpp
index 03058869e8561f3ae182d775c77dd4234008823f..b071d3fe8e409918210a19ee802cc8c759e83ea2 100644
--- a/radio/USRP/usrp_lib.cpp
+++ b/radio/USRP/usrp_lib.cpp
@@ -470,33 +470,20 @@ static int trx_usrp_write(openair0_device *device,
      last_packet_state  = true;
     }
 
-  if(usrp_tx_thread == 0){
-#if defined(__x86_64) || defined(__i386__)
+    if (usrp_tx_thread == 0) {
       nsamps2 = (nsamps+7)>>3;
-      __m256i buff_tx[cc<2?2:cc][nsamps2];
-#elif defined(__arm__) || defined(__aarch64__)
-    nsamps2 = (nsamps+3)>>2;
-    int16x8_t buff_tx[cc<2?2:cc][nsamps2];
-#else
-#error Unsupported CPU architecture, USRP device cannot be built
-#endif
-
-    // bring RX data into 12 LSBs for softmodem RX
-    for (int i=0; i<cc; i++) {
-      for (int j=0; j<nsamps2; j++) {
-#if defined(__x86_64__) || defined(__i386__)
-        if ((((uintptr_t) buff[i])&0x1F)==0) {
-          buff_tx[i][j] = simde_mm256_slli_epi16(((__m256i *)buff[i])[j],4);
+      simde__m256i buff_tx[cc < 2 ? 2 : cc][nsamps2];
+
+      // bring RX data into 12 LSBs for softmodem RX
+      for (int i = 0; i < cc; i++) {
+        for (int j = 0; j < nsamps2; j++) {
+          if ((((uintptr_t)buff[i]) & 0x1F) == 0) {
+            buff_tx[i][j] = simde_mm256_slli_epi16(((simde__m256i *)buff[i])[j], 4);
+          } else {
+            simde__m256i tmp = simde_mm256_loadu_si256(((simde__m256i *)buff[i]) + j);
+            buff_tx[i][j] = simde_mm256_slli_epi16(tmp, 4);
+          }
         }
-        else 
-        {
-          __m256i tmp = simde_mm256_loadu_si256(((__m256i *)buff[i])+j);
-          buff_tx[i][j] = simde_mm256_slli_epi16(tmp,4);
-        }
-#elif defined(__arm__) || defined(__aarch64__)
-        buff_tx[i][j] = vshlq_n_s16(((int16x8_t *)buff[i])[j],4);
-#endif
-      }
     }
 
     s->tx_md.has_time_spec  = true;
@@ -529,33 +516,35 @@ VCD_SIGNAL_DUMPER_DUMP_FUNCTION_BY_NAME(VCD_SIGNAL_DUMPER_FUNCTIONS_BEAM_SWITCHI
 
     if (ret != nsamps) LOG_E(HW,"[xmit] tx samples %d != %d\n",ret,nsamps);
     return ret;
-  }
-  else{
-    pthread_mutex_lock(&write_thread->mutex_write);
+    } else {
+      pthread_mutex_lock(&write_thread->mutex_write);
+
+      if (write_thread->count_write >= MAX_WRITE_THREAD_PACKAGE) {
+        LOG_W(HW,
+              "Buffer overflow, count_write = %d, start = %d end = %d, resetting write package\n",
+              write_thread->count_write,
+              write_thread->start,
+              write_thread->end);
+        write_thread->end = write_thread->start;
+        write_thread->count_write = 0;
+      }
 
-    if(write_thread->count_write >= MAX_WRITE_THREAD_PACKAGE){
-      LOG_W(HW,"Buffer overflow, count_write = %d, start = %d end = %d, resetting write package\n", write_thread->count_write, write_thread->start, write_thread->end);
-      write_thread->end = write_thread->start;
-      write_thread->count_write = 0;
+      end = write_thread->end;
+      write_package[end].timestamp = timestamp;
+      write_package[end].nsamps = nsamps;
+      write_package[end].cc = cc;
+      write_package[end].first_packet = first_packet_state;
+      write_package[end].last_packet = last_packet_state;
+      write_package[end].flags_gpio = flags_gpio;
+      for (int i = 0; i < cc; i++)
+        write_package[end].buff[i] = buff[i];
+      write_thread->count_write++;
+      write_thread->end = (write_thread->end + 1) % MAX_WRITE_THREAD_PACKAGE;
+      LOG_D(HW, "Signaling TX TS %llu\n", (unsigned long long)timestamp);
+      pthread_cond_signal(&write_thread->cond_write);
+      pthread_mutex_unlock(&write_thread->mutex_write);
+      return 0;
     }
-
-    end = write_thread->end;
-    write_package[end].timestamp    = timestamp;
-    write_package[end].nsamps       = nsamps;
-    write_package[end].cc           = cc;
-    write_package[end].first_packet = first_packet_state;
-    write_package[end].last_packet  = last_packet_state;
-    write_package[end].flags_gpio    = flags_gpio;
-    for (int i = 0; i < cc; i++)
-      write_package[end].buff[i]    = buff[i];
-    write_thread->count_write++;
-    write_thread->end = (write_thread->end + 1)% MAX_WRITE_THREAD_PACKAGE;
-    LOG_D(HW,"Signaling TX TS %llu\n",(unsigned long long)timestamp);
-    pthread_cond_signal(&write_thread->cond_write);
-    pthread_mutex_unlock(&write_thread->mutex_write);
-    return 0;
-  }
-
 }
 
 //-----------------------start--------------------------
@@ -609,34 +598,22 @@ void *trx_usrp_write_thread(void * arg){
       LOG_W(HW,"count write = %d, start = %d, end = %d\n", write_thread->count_write, write_thread->start, write_thread->end);
     }*/
 
-    #if defined(__x86_64) || defined(__i386__)
         nsamps2 = (nsamps+7)>>3;
-        __m256i buff_tx[cc<2?2:cc][nsamps2];
-    #elif defined(__arm__) || defined(__aarch64__)
-      nsamps2 = (nsamps+3)>>2;
-      int16x8_t buff_tx[cc<2?2:cc][nsamps2];
-    #else
-    #error Unsupported CPU architecture, USRP device cannot be built
-    #endif
-
-    // bring RX data into 12 LSBs for softmodem RX
-    for (int i=0; i<cc; i++) {
-      for (int j=0; j<nsamps2; j++) {
-        #if defined(__x86_64__) || defined(__i386__)
+        simde__m256i buff_tx[cc < 2 ? 2 : cc][nsamps2];
+
+        // bring RX data into 12 LSBs for softmodem RX
+        for (int i = 0; i < cc; i++) {
+          for (int j = 0; j < nsamps2; j++) {
             if ((((uintptr_t) buff[i])&0x1F)==0) {
-              buff_tx[i][j] = simde_mm256_slli_epi16(((__m256i *)buff[i])[j],4);
+              buff_tx[i][j] = simde_mm256_slli_epi16(((simde__m256i *)buff[i])[j], 4);
             }
             else
             {
-              __m256i tmp = simde_mm256_loadu_si256(((__m256i *)buff[i])+j);
+              simde__m256i tmp = simde_mm256_loadu_si256(((simde__m256i *)buff[i]) + j);
               buff_tx[i][j] = simde_mm256_slli_epi16(tmp,4);
             }
-        #elif defined(__arm__) || defined(__aarch64__)
-          buff_tx[i][j] = vshlq_n_s16(((int16x8_t *)buff[i])[j],4);
-        #endif
-      }
-    }
-
+          }
+        }
 
     s->tx_md.has_time_spec  = true;
     s->tx_md.start_of_burst = (s->tx_count==0) ? true : first_packet;
@@ -723,14 +700,9 @@ static void trx_usrp_write_reset(openair0_thread_t *wt) {
 static int trx_usrp_read(openair0_device *device, openair0_timestamp *ptimestamp, void **buff, int nsamps, int cc) {
   usrp_state_t *s = (usrp_state_t *)device->priv;
   int samples_received=0;
-  int nsamps2;  // aligned to upper 32 or 16 byte boundary
-#if defined(__x86_64) || defined(__i386__)
+  int nsamps2; // aligned to upper 32 or 16 byte boundary
   nsamps2 = (nsamps+7)>>3;
-  __m256i buff_tmp[cc<2 ? 2 : cc][nsamps2];
-#elif defined(__arm__) || defined(__aarch64__)
-  nsamps2 = (nsamps+3)>>2;
-  int16x8_t buff_tmp[cc<2 ? 2 : cc][nsamps2];
-#endif
+  simde__m256i buff_tmp[cc < 2 ? 2 : cc][nsamps2];
   static int read_count = 0;
   int rxshift;
   switch (device->type) {
@@ -772,21 +744,16 @@ static int trx_usrp_read(openair0_device *device, openair0_timestamp *ptimestamp
 
   // bring RX data into 12 LSBs for softmodem RX
   for (int i=0; i<cc; i++) {
-    for (int j=0; j<nsamps2; j++) {
-#if defined(__x86_64__) || defined(__i386__)
+    for (int j = 0; j < nsamps2; j++) {
       // FK: in some cases the buffer might not be 32 byte aligned, so we cannot use avx2
 
       if ((((uintptr_t) buff[i])&0x1F)==0) {
-        ((__m256i *)buff[i])[j] = simde_mm256_srai_epi16(buff_tmp[i][j],rxshift);
+        ((simde__m256i *)buff[i])[j] = simde_mm256_srai_epi16(buff_tmp[i][j], rxshift);
       } else {
-        __m256i tmp = simde_mm256_srai_epi16(buff_tmp[i][j],rxshift);
-        simde_mm256_storeu_si256(((__m256i *)buff[i])+j, tmp);
+        simde__m256i tmp = simde_mm256_srai_epi16(buff_tmp[i][j], rxshift);
+        simde_mm256_storeu_si256(((simde__m256i *)buff[i]) + j, tmp);
       }
     }
-#elif defined(__arm__) || defined(__aarch64__)
-      for (int j=0; j<nsamps2; j++) 
-        ((int16x8_t *)buff[i])[j] = vshrq_n_s16(buff_tmp[i][j],rxshift);
-#endif
   }
 
   if (samples_received < nsamps) {
diff --git a/radio/rfsimulator/stored_node.c b/radio/rfsimulator/stored_node.c
index 17ab3cb734391e417dda4793993f1fddfac353c4..28f236ab37be095b75f7fe6f9a40ae4025f18798 100644
--- a/radio/rfsimulator/stored_node.c
+++ b/radio/rfsimulator/stored_node.c
@@ -48,10 +48,10 @@ int fullread(int fd, void *_buf, int count) {
 int32_t signal_energy(int32_t *input,uint32_t length) {
   int32_t i;
   int32_t temp,temp2;
-  register __m64 mm0,mm1,mm2,mm3;
-  __m64 *in = (__m64 *)input;
-  mm0 = _mm_setzero_si64();//pxor(mm0,mm0);
-  mm3 = _mm_setzero_si64();//pxor(mm3,mm3);
+  register simde__m64 mm0, mm1, mm2, mm3;
+  simde__m64 *in = (simde__m64 *)input;
+  mm0 = simde_mm_setzero_si64(); // pxor(mm0,mm0);
+  mm3 = simde_mm_setzero_si64(); // pxor(mm3,mm3);
 
   for (i=0; i<length>>1; i++) {
     mm1 = in[i];
@@ -77,8 +77,6 @@ int32_t signal_energy(int32_t *input,uint32_t length) {
   temp2/=(length*length);
   //  temp2<<=(2*shift_DC);
   temp -= temp2;
-  _mm_empty();
-  _m_empty();
   return((temp>0)?temp:1);
 }